file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
tgsrv.go | package main
import (
"bytes"
"crypto/md5"
"encoding/hex"
"encoding/json"
"io/ioutil"
"log"
"net/url"
"strconv"
"strings"
"sort"
"golang.org/x/net/html"
"golang.org/x/net/publicsuffix"
"github.com/azhinu/Telefeed/httputils"
"github.com/azhinu/Telefeed/params"
"github.com/azhinu/Telefeed/vkapi"
"github.com/mmcdole/gofeed"
"github.com/go-telegram-bot-api/telegram-bot-api"
)
var (
bot *tgbotapi.BotAPI
)
func catch(e error) {
if e != nil {
log.Println(e.Error)
}
}
func main() |
func subs2cmds(subs map[string]bool) map[string]string {
var cmds = make(map[string]string)
for k, _ := range subs {
log.Println(k)
if strings.Contains(k, params.PubNames) {
cmd := "delete https://vk.com/" + strings.Replace(k, params.PubNames, "", -1)
key := "delete" + k
cmds[key] = cmd
}
if strings.Contains(k, params.Feed) {
b := httputils.HttpGet(params.Api+k, nil)
if b != nil {
cmd := "delete " + string(b)
key := "delete" + k
cmds[key] = cmd
}
}
}
log.Println("cmds:", cmds)
return cmds
}
func createButtons(buttonsCmds map[string]string) tgbotapi.InlineKeyboardMarkup {
var buttons [][]tgbotapi.InlineKeyboardButton
var keys []string
for k := range buttonsCmds {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
choice := buttonsCmds[k]
cleanedChoice := strings.TrimSpace(choice)
cleanedChoice = strings.Replace(cleanedChoice, "\n", "", -1)
button := tgbotapi.NewInlineKeyboardButtonData(cleanedChoice, k)
buttons = append(buttons, tgbotapi.NewInlineKeyboardRow(button))
}
buttonCancel := tgbotapi.NewInlineKeyboardButtonData("Cancel", "cancel")
buttons = append(buttons, tgbotapi.NewInlineKeyboardRow(buttonCancel))
buttonsRow := tgbotapi.NewInlineKeyboardMarkup(buttons...)
return buttonsRow
}
func userNew(user *tgbotapi.User) bool {
urlUsr := params.Users + strconv.Itoa(user.ID)
log.Println("userNew", urlUsr)
b, _ := json.Marshal(user)
httputils.HttpPut(params.UserName+user.UserName, nil, b)
res := httputils.HttpPut(urlUsr, nil, b)
//telefeedbot
if user.ID > 0 {
pubSubTgAdd(146445941, "telefeedbot", nil, false, int64(user.ID))
}
return res
}
func channelNew(chat *tgbotapi.Chat) bool {
url := params.Users + strconv.FormatInt(chat.ID, 10)
log.Println("channelNew", url)
b, _ := json.Marshal(chat)
httputils.HttpPut(params.UserName+chat.UserName, nil, b)
return httputils.HttpPut(url, nil, b)
}
func pubFind(msg *tgbotapi.Message, txt string, userid int64) {
log.Println("pubFind")
var delete = false
var tmp = strings.Replace(txt, "\n", " ", -1)
tmp = strings.Replace(tmp, "\r", "", -1)
tmp = strings.TrimSpace(tmp)
words := strings.Split(tmp, " ")
for i := range words {
var word = strings.TrimSpace(words[i])
if word == "delete" || word == "Delete" {
delete = true
continue
}
if strings.HasPrefix(word, "@") {
chanName := strings.Replace(word, "@", "", -1)
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
userChannelsUrl := params.Channels + strconv.FormatInt(userid, 10)
userChannelsbody := httputils.HttpGet(userChannelsUrl, nil)
userChannels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(userChannelsbody, &userChannels)
if userChannels[chat.ID] != nil {
userid = chat.ID
} else {
bot.Send(tgbotapi.NewMessage(userid, chanName+" not yours"))
}
}
continue
}
if strings.HasPrefix(word, "http") == false {
//default sheme is https
word = "https://" + word
}
urls, err := url.Parse(word)
if err != nil {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Rss feed on domain:'"+word+"'\n"+params.NotFound+params.Example))
return
}
mainDomain, _ := publicsuffix.EffectiveTLDPlusOne(urls.Host)
switch mainDomain {
case "t.me":
parts := strings.Split(urls.Path, "/")
if len(parts) > 1 {
channelName := "@" + parts[len(parts)-1]
m := tgbotapi.NewMessageToChannel(channelName, "Ok")
m.DisableWebPagePreview = true
reply, err := bot.Send(m)
if err != nil {
s := err.Error()
if strings.Contains(s, "orbidden") {
m := tgbotapi.NewMessage(msg.Chat.ID, "Add @telefeedbot as admin 2 channel: "+channelName)
bot.Send(m)
} else {
m := tgbotapi.NewMessage(msg.Chat.ID, s)
bot.Send(m)
}
} else {
channel := reply.Chat
addChannel(msg.Chat.ID, channel, false)
}
}
case "twitter.com":
parts := strings.Split(urls.Path, "/")
for _, part := range parts {
if part != "" {
findFeed("https://twitrss.me/twitter_user_to_rss/?user="+part, msg, delete, userid)
}
}
case "instagram.com":
parts := strings.Split(urls.Path, "/")
for _, part := range parts {
if part != "" {
findFeed("https://web.stagram.com/rss/n/"+part, msg, delete, userid)
}
}
case "vk.com":
parts := strings.Split(urls.Path, "/")
for j := range parts {
if parts[j] != "" {
domain := parts[j]
log.Println(domain)
groupDb := pubDbGet(domain)
if groupDb.Gid == 0 {
// public not found
groups := vkapi.GroupsGetById(domain)
if len(groups) > 0 {
// we have group
groupVk := groups[0]
// save group to DB
if pubDbSet(groupVk) {
// new group set
pubSubTgAdd(groupVk.Gid, groupVk.ScreenName, msg, delete, userid)
} else {
// group not set
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Error create domain:'"+domain+"'"))
}
} else {
// group not found
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Error vk domain:'"+domain+"'"+" not found"))
}
} else {
// public exists
pubSubTgAdd(groupDb.Gid, groupDb.ScreenName, msg, delete, userid)
}
}
}
default:
findFeed(word, msg, delete, userid)
}
}
}
func addChannel(userId int64, channel *tgbotapi.Chat, isDelete bool) {
if channel == nil {
return
}
url := params.Channels + strconv.FormatInt(userId, 10)
body := httputils.HttpGet(url, nil)
channels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(body, &channels)
channels[channel.ID] = channel
delete(channels, channel.ID)
if !isDelete {
channels[channel.ID] = channel
}
log.Println("channels ", channels)
data, err := json.Marshal(channels)
if err == nil {
result := httputils.HttpPut(url, nil, data)
if result == true {
if isDelete {
bot.Send(tgbotapi.NewMessage(userId, "👍 Removed: "+channel.UserName+"\n\n"))
} else {
//add channel as User
if channelNew(channel) {
bot.Send(tgbotapi.NewMessage(userId, channel.UserName+" 👍\n\nUse /channels for list of channels\n\nSend @"+
channel.UserName+" http://url for add url 2 channel"))
}
}
}
}
}
func findFeed(word string, msg *tgbotapi.Message, isDelete bool, userid int64) {
log.Println("word", word)
var feedlink = getFeedLink(word)
if feedlink == "" {
log.Println("feedlink", feedlink)
rss := rssExtract(word)
if rss != "" {
log.Println("rss", rss)
feedlink = getFeedLink(rss)
log.Println("feedlink", feedlink)
}
}
if feedlink != "" {
feedkey := GetMD5Hash(feedlink)
//create feed or overwrite
httputils.HttpPut(params.Feeds+feedkey, nil, []byte(feedlink))
feedSubTgAdd(feedlink, msg, isDelete, userid)
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, word+"\n"+params.NotFound))
}
}
func feedSubTgAdd(feedlink string, msg *tgbotapi.Message, isDelete bool, userid int64) {
url := params.FeedSubs + GetMD5Hash(feedlink)
log.Println("feedSubTgAdd", url)
body := httputils.HttpGet(url, nil)
users := make(map[int64]bool)
json.Unmarshal(body, &users)
delete(users, userid)
if !isDelete {
users[userid] = true
}
log.Println("feedSubTgAdd users ", users)
//user subs
usersub(params.Feed+GetMD5Hash(feedlink), userid, isDelete)
data, err := json.Marshal(users)
if err == nil {
log.Println("feedSubTgAdd data ", string(data))
result := httputils.HttpPut(url, nil, data)
if result == true {
if isDelete {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 Removed: "+feedlink+"\n\n"))
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, feedlink+" 👍\n\n"+
params.Psst))
}
}
}
}
func usersub(url string, userid int64, isDelete bool) map[string]bool {
suburl := params.UserSubs + strconv.FormatInt(userid, 10)
bodysub := httputils.HttpGet(suburl, nil)
subs := make(map[string]bool)
json.Unmarshal(bodysub, &subs)
delete(subs, url)
if !isDelete {
subs[url] = true
}
if url == "" {
return subs
}
bsubs, _ := json.Marshal(subs)
httputils.HttpPut(suburl, nil, bsubs)
return subs
}
func GetMD5Hash(text string) string {
hash := md5.Sum([]byte(strings.TrimSpace(text)))
return hex.EncodeToString(hash[:])
}
func getFeedLink(link string) (feedlink string) {
var defHeaders = make(map[string]string)
defHeaders["User-Agent"] = "script::recoilme:v1"
defHeaders["Authorization"] = "Client-ID 4191ffe3736cfcb"
b := httputils.HttpGet(link, defHeaders)
if b == nil {
return feedlink
}
fp := gofeed.NewParser()
feed, err := fp.Parse(bytes.NewReader(b))
if err != nil {
return feedlink
}
if len(feed.Items) > 0 {
feedlink = link
}
return feedlink
}
func pubDbGet(domain string) (group vkapi.Group) {
log.Println("pubDbGet")
url := params.Publics + domain
body := httputils.HttpGet(url, nil)
if body != nil {
json.Unmarshal(body, &group)
}
return
}
func pubDbSet(group vkapi.Group) bool {
log.Println("pubDbSet")
domain := group.ScreenName
b, err := json.Marshal(group)
if err != nil {
return false
}
return httputils.HttpPut(params.Publics+domain, nil, b)
}
func pubSubTgAdd(gId int, screenName string, msg *tgbotapi.Message, isDelete bool, userid int64) {
gid := strconv.Itoa(gId)
url := params.Subs + gid
log.Println("pubSubTgAdd", url)
body := httputils.HttpGet(url, nil)
users := make(map[int64]bool)
json.Unmarshal(body, &users)
delete(users, userid)
if !isDelete {
users[userid] = true
}
log.Println("pubSubTgAdd users ", users)
data, err := json.Marshal(users)
if err == nil {
log.Println("pubSubTgAdd data ", string(data))
result := httputils.HttpPut(url, nil, data)
if result == true {
if msg != nil {
if isDelete {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 Removed: https://vk.com/"+screenName+"\n"))
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 https://vk.com/"+screenName+"\n"+
params.Psst))
}
}
usersub(params.PubNames+screenName, userid, isDelete)
}
}
}
func rssExtract(link string) string {
var rss string
var defHeaders = make(map[string]string)
defHeaders["User-Agent"] = "script::recoilme:v1"
defHeaders["Authorization"] = "Client-ID 4191ffe3736cfcb"
b := httputils.HttpGet(link, defHeaders)
if b == nil {
return rss
}
doc, err := html.Parse(bytes.NewReader(b)) //strings.NewReader(s))
if err != nil {
log.Fatal(err)
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "link" {
var isRss bool
for _, a := range n.Attr {
if a.Key == "type" {
if a.Val == "application/rss+xml" || a.Val == "application/atom+xml" {
isRss = true
break
}
}
}
if isRss {
for _, a := range n.Attr {
if a.Key == "href" {
rss = a.Val
break
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
if rss != "" {
break
}
}
}
f(doc)
return rss
}
| {
var err error
tlgrmtoken, err := ioutil.ReadFile(params.Telefeedfile)
catch(err)
tgtoken := strings.Replace(strings.Replace(string(tlgrmtoken), "\n", "", -1), "\r", "", -1)
bot, err = tgbotapi.NewBotAPI(tgtoken)
catch(err)
bot.Debug = false
log.Printf("Authorized on account %s", bot.Self.UserName)
u := tgbotapi.NewUpdate(0)
u.Timeout = 60
updates, err := bot.GetUpdatesChan(u)
for update := range updates {
if update.CallbackQuery != nil && update.CallbackQuery.Message != nil {
data := update.CallbackQuery.Data
msgCancel := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"ZzZzZzzz ...")
if strings.HasPrefix(data, "delete"+params.Feed) {
feed := strings.Replace(data, "delete"+params.Feed, "", -1)
b := httputils.HttpGet(params.Feeds+feed, nil)
if b != nil {
url := string(b)
log.Println("delete " + url)
pubFind(update.CallbackQuery.Message, "delete "+url, int64(update.CallbackQuery.From.ID))
bot.Send(msgCancel)
}
} else {
if strings.HasPrefix(data, "delete"+params.PubNames) {
screenname := strings.Replace(data, "delete"+params.PubNames, "", -1)
pubFind(update.CallbackQuery.Message, "delete https://vk.com/"+screenname, int64(update.CallbackQuery.From.ID))
log.Println("update.CallbackQuery.From.ID", update.CallbackQuery.From.ID)
bot.Send(msgCancel)
} else {
if strings.Contains(data, "_!_") {
parts := strings.Split(data, "_!_")
cmd := parts[0]
cmdval := parts[1]
switch cmd {
case "channel":
switch cmdval {
case "new":
msgNewCh := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
params.NewChannel)
bot.Send(msgNewCh)
case "delete":
msgDel := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"delete")
if len(parts) > 2 {
chanName := parts[2]
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
subs := usersub("", chat.ID, true)
if len(subs) > 0 {
msgDel.Text = "Channel @" + chanName + " have subscriptions\nDelete urls before delete channel!"
} else {
addChannel(update.CallbackQuery.Message.Chat.ID, chat, true)
msgDel.Text = "deleted @" + chanName
}
} else {
msgDel.Text = "@" + chanName + " not found("
}
}
bot.Send(msgDel)
case "list":
msgList := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"list\n")
msgList.DisableWebPagePreview = true
if len(parts) > 2 {
chanName := parts[2]
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
subs := usersub("", chat.ID, true)
cmds := subs2cmds(subs)
var txt = strings.Replace(params.SubsHelp, "channelname", chanName, -1) + "\n\nList of urls of @" + chanName + ":\n\n"
for _, v := range cmds {
txt = txt + strings.Replace(v, "delete ", "", -1) + "\n"
}
msgList.Text = txt + "\n"
}
bot.Send(msgList)
}
}
default:
bot.Send(msgCancel)
}
} else {
//unknown cmd
bot.Send(msgCancel)
}
}
}
} else {
if update.Message == nil {
continue
}
switch update.Message.Text {
case "/start":
user := update.Message.From
if userNew(user) {
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.Hello)
m.DisableWebPagePreview = true
bot.Send(m)
} else {
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.SomeErr))
}
case "/top":
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.TopLinks)
m.DisableWebPagePreview = true
bot.Send(m)
case "/rateme":
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.Rate)
m.DisableWebPagePreview = true
bot.Send(m)
case "/help":
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.Help))
case "/donate":
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.Donate))
case "/channels":
var cmds = make(map[string]string)
cmds["channel_!_new"] = "new channel"
url := params.Channels + strconv.FormatInt(update.Message.Chat.ID, 10)
body := httputils.HttpGet(url, nil)
channels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(body, &channels)
for _, channel := range channels {
cmds["channel_!_delete_!_"+channel.UserName] = "delete @" + channel.UserName
cmds["channel_!_list_!_"+channel.UserName] = "list of urls of @" + channel.UserName
}
msg := tgbotapi.NewMessage(update.Message.Chat.ID, "Instruction: http://telegra.ph/telefeedbot-05-12\n\nYour channels:\n")
msg.DisableWebPagePreview = true
msg.ReplyMarkup = createButtons(cmds)
msg.ReplyToMessageID = update.Message.MessageID
bot.Send(msg)
case "/list":
//botYa.Track(update.Message.From.ID, nil, "list")
//var cmds = make(map[string]string)
//fmt.Printf("fromid:%d: %d\n", update.Message.From.ID, update.Message.Chat.ID)
subs := usersub("", int64(update.Message.From.ID), true)
//var s = "Subscriptions (send 'delete http://..' - for unsubscribe):\n"
cmds := subs2cmds(subs)
if len(cmds) == 0 {
m := tgbotapi.NewMessage(update.Message.Chat.ID, "No feeds..\n\n"+params.Hello)
m.DisableWebPagePreview = true
bot.Send(m)
} else {
msg := tgbotapi.NewMessage(update.Message.Chat.ID, "Subscriptions (press button bellow for unsubscribe):\n")
msg.ReplyMarkup = createButtons(cmds)
msg.ReplyToMessageID = update.Message.MessageID
bot.Send(msg)
}
case "/subs":
subs := usersub("", int64(update.Message.From.ID), true)
cmds := subs2cmds(subs)
msgList := tgbotapi.NewMessage(update.Message.Chat.ID, "")
var txt = "List of urls:\nSend delete url(s) for unsubscribe\n\n"
for _, v := range cmds {
txt = txt + strings.Replace(v, "delete ", "", -1) + "\n"
}
msgList.Text = txt + "\n"
bot.Send(msgList)
default:
msg := update.Message.Text
pubFind(update.Message, msg, int64(update.Message.From.ID))
}
}
}
} | identifier_body |
tgsrv.go | package main
import (
"bytes"
"crypto/md5"
"encoding/hex"
"encoding/json"
"io/ioutil"
"log"
"net/url"
"strconv"
"strings"
"sort"
"golang.org/x/net/html"
"golang.org/x/net/publicsuffix"
"github.com/azhinu/Telefeed/httputils"
"github.com/azhinu/Telefeed/params"
"github.com/azhinu/Telefeed/vkapi"
"github.com/mmcdole/gofeed"
"github.com/go-telegram-bot-api/telegram-bot-api"
)
var (
bot *tgbotapi.BotAPI
)
func catch(e error) {
if e != nil {
log.Println(e.Error)
}
}
func main() {
var err error
tlgrmtoken, err := ioutil.ReadFile(params.Telefeedfile)
catch(err)
tgtoken := strings.Replace(strings.Replace(string(tlgrmtoken), "\n", "", -1), "\r", "", -1)
bot, err = tgbotapi.NewBotAPI(tgtoken)
catch(err)
bot.Debug = false
log.Printf("Authorized on account %s", bot.Self.UserName)
u := tgbotapi.NewUpdate(0)
u.Timeout = 60
updates, err := bot.GetUpdatesChan(u)
for update := range updates {
if update.CallbackQuery != nil && update.CallbackQuery.Message != nil {
data := update.CallbackQuery.Data
msgCancel := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"ZzZzZzzz ...")
if strings.HasPrefix(data, "delete"+params.Feed) {
feed := strings.Replace(data, "delete"+params.Feed, "", -1)
b := httputils.HttpGet(params.Feeds+feed, nil)
if b != nil {
url := string(b)
log.Println("delete " + url)
pubFind(update.CallbackQuery.Message, "delete "+url, int64(update.CallbackQuery.From.ID))
bot.Send(msgCancel)
}
} else {
if strings.HasPrefix(data, "delete"+params.PubNames) {
screenname := strings.Replace(data, "delete"+params.PubNames, "", -1)
pubFind(update.CallbackQuery.Message, "delete https://vk.com/"+screenname, int64(update.CallbackQuery.From.ID))
log.Println("update.CallbackQuery.From.ID", update.CallbackQuery.From.ID)
bot.Send(msgCancel)
} else {
if strings.Contains(data, "_!_") {
parts := strings.Split(data, "_!_")
cmd := parts[0]
cmdval := parts[1]
switch cmd {
case "channel":
switch cmdval {
case "new":
msgNewCh := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
params.NewChannel)
bot.Send(msgNewCh)
case "delete":
msgDel := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"delete")
if len(parts) > 2 {
chanName := parts[2]
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
subs := usersub("", chat.ID, true)
if len(subs) > 0 {
msgDel.Text = "Channel @" + chanName + " have subscriptions\nDelete urls before delete channel!"
} else {
addChannel(update.CallbackQuery.Message.Chat.ID, chat, true)
msgDel.Text = "deleted @" + chanName
}
} else {
msgDel.Text = "@" + chanName + " not found("
}
}
bot.Send(msgDel)
case "list":
msgList := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"list\n")
msgList.DisableWebPagePreview = true
if len(parts) > 2 {
chanName := parts[2]
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
subs := usersub("", chat.ID, true)
cmds := subs2cmds(subs)
var txt = strings.Replace(params.SubsHelp, "channelname", chanName, -1) + "\n\nList of urls of @" + chanName + ":\n\n"
for _, v := range cmds {
txt = txt + strings.Replace(v, "delete ", "", -1) + "\n"
}
msgList.Text = txt + "\n"
}
bot.Send(msgList)
}
}
default:
bot.Send(msgCancel)
}
} else {
//unknown cmd
bot.Send(msgCancel)
}
}
}
} else {
if update.Message == nil {
continue
}
switch update.Message.Text {
case "/start":
user := update.Message.From
if userNew(user) {
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.Hello)
m.DisableWebPagePreview = true
bot.Send(m)
} else {
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.SomeErr))
}
case "/top":
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.TopLinks)
m.DisableWebPagePreview = true
bot.Send(m)
case "/rateme":
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.Rate)
m.DisableWebPagePreview = true
bot.Send(m)
case "/help":
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.Help))
case "/donate":
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.Donate))
case "/channels":
var cmds = make(map[string]string)
cmds["channel_!_new"] = "new channel"
url := params.Channels + strconv.FormatInt(update.Message.Chat.ID, 10)
body := httputils.HttpGet(url, nil)
channels := make(map[int64]*tgbotapi.Chat) | json.Unmarshal(body, &channels)
for _, channel := range channels {
cmds["channel_!_delete_!_"+channel.UserName] = "delete @" + channel.UserName
cmds["channel_!_list_!_"+channel.UserName] = "list of urls of @" + channel.UserName
}
msg := tgbotapi.NewMessage(update.Message.Chat.ID, "Instruction: http://telegra.ph/telefeedbot-05-12\n\nYour channels:\n")
msg.DisableWebPagePreview = true
msg.ReplyMarkup = createButtons(cmds)
msg.ReplyToMessageID = update.Message.MessageID
bot.Send(msg)
case "/list":
//botYa.Track(update.Message.From.ID, nil, "list")
//var cmds = make(map[string]string)
//fmt.Printf("fromid:%d: %d\n", update.Message.From.ID, update.Message.Chat.ID)
subs := usersub("", int64(update.Message.From.ID), true)
//var s = "Subscriptions (send 'delete http://..' - for unsubscribe):\n"
cmds := subs2cmds(subs)
if len(cmds) == 0 {
m := tgbotapi.NewMessage(update.Message.Chat.ID, "No feeds..\n\n"+params.Hello)
m.DisableWebPagePreview = true
bot.Send(m)
} else {
msg := tgbotapi.NewMessage(update.Message.Chat.ID, "Subscriptions (press button bellow for unsubscribe):\n")
msg.ReplyMarkup = createButtons(cmds)
msg.ReplyToMessageID = update.Message.MessageID
bot.Send(msg)
}
case "/subs":
subs := usersub("", int64(update.Message.From.ID), true)
cmds := subs2cmds(subs)
msgList := tgbotapi.NewMessage(update.Message.Chat.ID, "")
var txt = "List of urls:\nSend delete url(s) for unsubscribe\n\n"
for _, v := range cmds {
txt = txt + strings.Replace(v, "delete ", "", -1) + "\n"
}
msgList.Text = txt + "\n"
bot.Send(msgList)
default:
msg := update.Message.Text
pubFind(update.Message, msg, int64(update.Message.From.ID))
}
}
}
}
func subs2cmds(subs map[string]bool) map[string]string {
var cmds = make(map[string]string)
for k, _ := range subs {
log.Println(k)
if strings.Contains(k, params.PubNames) {
cmd := "delete https://vk.com/" + strings.Replace(k, params.PubNames, "", -1)
key := "delete" + k
cmds[key] = cmd
}
if strings.Contains(k, params.Feed) {
b := httputils.HttpGet(params.Api+k, nil)
if b != nil {
cmd := "delete " + string(b)
key := "delete" + k
cmds[key] = cmd
}
}
}
log.Println("cmds:", cmds)
return cmds
}
func createButtons(buttonsCmds map[string]string) tgbotapi.InlineKeyboardMarkup {
var buttons [][]tgbotapi.InlineKeyboardButton
var keys []string
for k := range buttonsCmds {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
choice := buttonsCmds[k]
cleanedChoice := strings.TrimSpace(choice)
cleanedChoice = strings.Replace(cleanedChoice, "\n", "", -1)
button := tgbotapi.NewInlineKeyboardButtonData(cleanedChoice, k)
buttons = append(buttons, tgbotapi.NewInlineKeyboardRow(button))
}
buttonCancel := tgbotapi.NewInlineKeyboardButtonData("Cancel", "cancel")
buttons = append(buttons, tgbotapi.NewInlineKeyboardRow(buttonCancel))
buttonsRow := tgbotapi.NewInlineKeyboardMarkup(buttons...)
return buttonsRow
}
func userNew(user *tgbotapi.User) bool {
urlUsr := params.Users + strconv.Itoa(user.ID)
log.Println("userNew", urlUsr)
b, _ := json.Marshal(user)
httputils.HttpPut(params.UserName+user.UserName, nil, b)
res := httputils.HttpPut(urlUsr, nil, b)
//telefeedbot
if user.ID > 0 {
pubSubTgAdd(146445941, "telefeedbot", nil, false, int64(user.ID))
}
return res
}
func channelNew(chat *tgbotapi.Chat) bool {
url := params.Users + strconv.FormatInt(chat.ID, 10)
log.Println("channelNew", url)
b, _ := json.Marshal(chat)
httputils.HttpPut(params.UserName+chat.UserName, nil, b)
return httputils.HttpPut(url, nil, b)
}
func pubFind(msg *tgbotapi.Message, txt string, userid int64) {
log.Println("pubFind")
var delete = false
var tmp = strings.Replace(txt, "\n", " ", -1)
tmp = strings.Replace(tmp, "\r", "", -1)
tmp = strings.TrimSpace(tmp)
words := strings.Split(tmp, " ")
for i := range words {
var word = strings.TrimSpace(words[i])
if word == "delete" || word == "Delete" {
delete = true
continue
}
if strings.HasPrefix(word, "@") {
chanName := strings.Replace(word, "@", "", -1)
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
userChannelsUrl := params.Channels + strconv.FormatInt(userid, 10)
userChannelsbody := httputils.HttpGet(userChannelsUrl, nil)
userChannels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(userChannelsbody, &userChannels)
if userChannels[chat.ID] != nil {
userid = chat.ID
} else {
bot.Send(tgbotapi.NewMessage(userid, chanName+" not yours"))
}
}
continue
}
if strings.HasPrefix(word, "http") == false {
//default sheme is https
word = "https://" + word
}
urls, err := url.Parse(word)
if err != nil {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Rss feed on domain:'"+word+"'\n"+params.NotFound+params.Example))
return
}
mainDomain, _ := publicsuffix.EffectiveTLDPlusOne(urls.Host)
switch mainDomain {
case "t.me":
parts := strings.Split(urls.Path, "/")
if len(parts) > 1 {
channelName := "@" + parts[len(parts)-1]
m := tgbotapi.NewMessageToChannel(channelName, "Ok")
m.DisableWebPagePreview = true
reply, err := bot.Send(m)
if err != nil {
s := err.Error()
if strings.Contains(s, "orbidden") {
m := tgbotapi.NewMessage(msg.Chat.ID, "Add @telefeedbot as admin 2 channel: "+channelName)
bot.Send(m)
} else {
m := tgbotapi.NewMessage(msg.Chat.ID, s)
bot.Send(m)
}
} else {
channel := reply.Chat
addChannel(msg.Chat.ID, channel, false)
}
}
case "twitter.com":
parts := strings.Split(urls.Path, "/")
for _, part := range parts {
if part != "" {
findFeed("https://twitrss.me/twitter_user_to_rss/?user="+part, msg, delete, userid)
}
}
case "instagram.com":
parts := strings.Split(urls.Path, "/")
for _, part := range parts {
if part != "" {
findFeed("https://web.stagram.com/rss/n/"+part, msg, delete, userid)
}
}
case "vk.com":
parts := strings.Split(urls.Path, "/")
for j := range parts {
if parts[j] != "" {
domain := parts[j]
log.Println(domain)
groupDb := pubDbGet(domain)
if groupDb.Gid == 0 {
// public not found
groups := vkapi.GroupsGetById(domain)
if len(groups) > 0 {
// we have group
groupVk := groups[0]
// save group to DB
if pubDbSet(groupVk) {
// new group set
pubSubTgAdd(groupVk.Gid, groupVk.ScreenName, msg, delete, userid)
} else {
// group not set
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Error create domain:'"+domain+"'"))
}
} else {
// group not found
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Error vk domain:'"+domain+"'"+" not found"))
}
} else {
// public exists
pubSubTgAdd(groupDb.Gid, groupDb.ScreenName, msg, delete, userid)
}
}
}
default:
findFeed(word, msg, delete, userid)
}
}
}
func addChannel(userId int64, channel *tgbotapi.Chat, isDelete bool) {
if channel == nil {
return
}
url := params.Channels + strconv.FormatInt(userId, 10)
body := httputils.HttpGet(url, nil)
channels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(body, &channels)
channels[channel.ID] = channel
delete(channels, channel.ID)
if !isDelete {
channels[channel.ID] = channel
}
log.Println("channels ", channels)
data, err := json.Marshal(channels)
if err == nil {
result := httputils.HttpPut(url, nil, data)
if result == true {
if isDelete {
bot.Send(tgbotapi.NewMessage(userId, "👍 Removed: "+channel.UserName+"\n\n"))
} else {
//add channel as User
if channelNew(channel) {
bot.Send(tgbotapi.NewMessage(userId, channel.UserName+" 👍\n\nUse /channels for list of channels\n\nSend @"+
channel.UserName+" http://url for add url 2 channel"))
}
}
}
}
}
func findFeed(word string, msg *tgbotapi.Message, isDelete bool, userid int64) {
log.Println("word", word)
var feedlink = getFeedLink(word)
if feedlink == "" {
log.Println("feedlink", feedlink)
rss := rssExtract(word)
if rss != "" {
log.Println("rss", rss)
feedlink = getFeedLink(rss)
log.Println("feedlink", feedlink)
}
}
if feedlink != "" {
feedkey := GetMD5Hash(feedlink)
//create feed or overwrite
httputils.HttpPut(params.Feeds+feedkey, nil, []byte(feedlink))
feedSubTgAdd(feedlink, msg, isDelete, userid)
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, word+"\n"+params.NotFound))
}
}
func feedSubTgAdd(feedlink string, msg *tgbotapi.Message, isDelete bool, userid int64) {
url := params.FeedSubs + GetMD5Hash(feedlink)
log.Println("feedSubTgAdd", url)
body := httputils.HttpGet(url, nil)
users := make(map[int64]bool)
json.Unmarshal(body, &users)
delete(users, userid)
if !isDelete {
users[userid] = true
}
log.Println("feedSubTgAdd users ", users)
//user subs
usersub(params.Feed+GetMD5Hash(feedlink), userid, isDelete)
data, err := json.Marshal(users)
if err == nil {
log.Println("feedSubTgAdd data ", string(data))
result := httputils.HttpPut(url, nil, data)
if result == true {
if isDelete {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 Removed: "+feedlink+"\n\n"))
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, feedlink+" 👍\n\n"+
params.Psst))
}
}
}
}
func usersub(url string, userid int64, isDelete bool) map[string]bool {
suburl := params.UserSubs + strconv.FormatInt(userid, 10)
bodysub := httputils.HttpGet(suburl, nil)
subs := make(map[string]bool)
json.Unmarshal(bodysub, &subs)
delete(subs, url)
if !isDelete {
subs[url] = true
}
if url == "" {
return subs
}
bsubs, _ := json.Marshal(subs)
httputils.HttpPut(suburl, nil, bsubs)
return subs
}
func GetMD5Hash(text string) string {
hash := md5.Sum([]byte(strings.TrimSpace(text)))
return hex.EncodeToString(hash[:])
}
func getFeedLink(link string) (feedlink string) {
var defHeaders = make(map[string]string)
defHeaders["User-Agent"] = "script::recoilme:v1"
defHeaders["Authorization"] = "Client-ID 4191ffe3736cfcb"
b := httputils.HttpGet(link, defHeaders)
if b == nil {
return feedlink
}
fp := gofeed.NewParser()
feed, err := fp.Parse(bytes.NewReader(b))
if err != nil {
return feedlink
}
if len(feed.Items) > 0 {
feedlink = link
}
return feedlink
}
func pubDbGet(domain string) (group vkapi.Group) {
log.Println("pubDbGet")
url := params.Publics + domain
body := httputils.HttpGet(url, nil)
if body != nil {
json.Unmarshal(body, &group)
}
return
}
func pubDbSet(group vkapi.Group) bool {
log.Println("pubDbSet")
domain := group.ScreenName
b, err := json.Marshal(group)
if err != nil {
return false
}
return httputils.HttpPut(params.Publics+domain, nil, b)
}
func pubSubTgAdd(gId int, screenName string, msg *tgbotapi.Message, isDelete bool, userid int64) {
gid := strconv.Itoa(gId)
url := params.Subs + gid
log.Println("pubSubTgAdd", url)
body := httputils.HttpGet(url, nil)
users := make(map[int64]bool)
json.Unmarshal(body, &users)
delete(users, userid)
if !isDelete {
users[userid] = true
}
log.Println("pubSubTgAdd users ", users)
data, err := json.Marshal(users)
if err == nil {
log.Println("pubSubTgAdd data ", string(data))
result := httputils.HttpPut(url, nil, data)
if result == true {
if msg != nil {
if isDelete {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 Removed: https://vk.com/"+screenName+"\n"))
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 https://vk.com/"+screenName+"\n"+
params.Psst))
}
}
usersub(params.PubNames+screenName, userid, isDelete)
}
}
}
func rssExtract(link string) string {
var rss string
var defHeaders = make(map[string]string)
defHeaders["User-Agent"] = "script::recoilme:v1"
defHeaders["Authorization"] = "Client-ID 4191ffe3736cfcb"
b := httputils.HttpGet(link, defHeaders)
if b == nil {
return rss
}
doc, err := html.Parse(bytes.NewReader(b)) //strings.NewReader(s))
if err != nil {
log.Fatal(err)
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "link" {
var isRss bool
for _, a := range n.Attr {
if a.Key == "type" {
if a.Val == "application/rss+xml" || a.Val == "application/atom+xml" {
isRss = true
break
}
}
}
if isRss {
for _, a := range n.Attr {
if a.Key == "href" {
rss = a.Val
break
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
if rss != "" {
break
}
}
}
f(doc)
return rss
} | random_line_split | |
tgsrv.go | package main
import (
"bytes"
"crypto/md5"
"encoding/hex"
"encoding/json"
"io/ioutil"
"log"
"net/url"
"strconv"
"strings"
"sort"
"golang.org/x/net/html"
"golang.org/x/net/publicsuffix"
"github.com/azhinu/Telefeed/httputils"
"github.com/azhinu/Telefeed/params"
"github.com/azhinu/Telefeed/vkapi"
"github.com/mmcdole/gofeed"
"github.com/go-telegram-bot-api/telegram-bot-api"
)
var (
bot *tgbotapi.BotAPI
)
func catch(e error) {
if e != nil {
log.Println(e.Error)
}
}
func main() {
var err error
tlgrmtoken, err := ioutil.ReadFile(params.Telefeedfile)
catch(err)
tgtoken := strings.Replace(strings.Replace(string(tlgrmtoken), "\n", "", -1), "\r", "", -1)
bot, err = tgbotapi.NewBotAPI(tgtoken)
catch(err)
bot.Debug = false
log.Printf("Authorized on account %s", bot.Self.UserName)
u := tgbotapi.NewUpdate(0)
u.Timeout = 60
updates, err := bot.GetUpdatesChan(u)
for update := range updates {
if update.CallbackQuery != nil && update.CallbackQuery.Message != nil {
data := update.CallbackQuery.Data
msgCancel := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"ZzZzZzzz ...")
if strings.HasPrefix(data, "delete"+params.Feed) {
feed := strings.Replace(data, "delete"+params.Feed, "", -1)
b := httputils.HttpGet(params.Feeds+feed, nil)
if b != nil {
url := string(b)
log.Println("delete " + url)
pubFind(update.CallbackQuery.Message, "delete "+url, int64(update.CallbackQuery.From.ID))
bot.Send(msgCancel)
}
} else {
if strings.HasPrefix(data, "delete"+params.PubNames) {
screenname := strings.Replace(data, "delete"+params.PubNames, "", -1)
pubFind(update.CallbackQuery.Message, "delete https://vk.com/"+screenname, int64(update.CallbackQuery.From.ID))
log.Println("update.CallbackQuery.From.ID", update.CallbackQuery.From.ID)
bot.Send(msgCancel)
} else {
if strings.Contains(data, "_!_") {
parts := strings.Split(data, "_!_")
cmd := parts[0]
cmdval := parts[1]
switch cmd {
case "channel":
switch cmdval {
case "new":
msgNewCh := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
params.NewChannel)
bot.Send(msgNewCh)
case "delete":
msgDel := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"delete")
if len(parts) > 2 {
chanName := parts[2]
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
subs := usersub("", chat.ID, true)
if len(subs) > 0 {
msgDel.Text = "Channel @" + chanName + " have subscriptions\nDelete urls before delete channel!"
} else {
addChannel(update.CallbackQuery.Message.Chat.ID, chat, true)
msgDel.Text = "deleted @" + chanName
}
} else {
msgDel.Text = "@" + chanName + " not found("
}
}
bot.Send(msgDel)
case "list":
msgList := tgbotapi.NewEditMessageText(update.CallbackQuery.Message.Chat.ID,
update.CallbackQuery.Message.MessageID,
"list\n")
msgList.DisableWebPagePreview = true
if len(parts) > 2 {
chanName := parts[2]
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
subs := usersub("", chat.ID, true)
cmds := subs2cmds(subs)
var txt = strings.Replace(params.SubsHelp, "channelname", chanName, -1) + "\n\nList of urls of @" + chanName + ":\n\n"
for _, v := range cmds {
txt = txt + strings.Replace(v, "delete ", "", -1) + "\n"
}
msgList.Text = txt + "\n"
}
bot.Send(msgList)
}
}
default:
bot.Send(msgCancel)
}
} else {
//unknown cmd
bot.Send(msgCancel)
}
}
}
} else {
if update.Message == nil {
continue
}
switch update.Message.Text {
case "/start":
user := update.Message.From
if userNew(user) {
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.Hello)
m.DisableWebPagePreview = true
bot.Send(m)
} else {
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.SomeErr))
}
case "/top":
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.TopLinks)
m.DisableWebPagePreview = true
bot.Send(m)
case "/rateme":
m := tgbotapi.NewMessage(update.Message.Chat.ID, params.Rate)
m.DisableWebPagePreview = true
bot.Send(m)
case "/help":
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.Help))
case "/donate":
bot.Send(tgbotapi.NewMessage(update.Message.Chat.ID, params.Donate))
case "/channels":
var cmds = make(map[string]string)
cmds["channel_!_new"] = "new channel"
url := params.Channels + strconv.FormatInt(update.Message.Chat.ID, 10)
body := httputils.HttpGet(url, nil)
channels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(body, &channels)
for _, channel := range channels {
cmds["channel_!_delete_!_"+channel.UserName] = "delete @" + channel.UserName
cmds["channel_!_list_!_"+channel.UserName] = "list of urls of @" + channel.UserName
}
msg := tgbotapi.NewMessage(update.Message.Chat.ID, "Instruction: http://telegra.ph/telefeedbot-05-12\n\nYour channels:\n")
msg.DisableWebPagePreview = true
msg.ReplyMarkup = createButtons(cmds)
msg.ReplyToMessageID = update.Message.MessageID
bot.Send(msg)
case "/list":
//botYa.Track(update.Message.From.ID, nil, "list")
//var cmds = make(map[string]string)
//fmt.Printf("fromid:%d: %d\n", update.Message.From.ID, update.Message.Chat.ID)
subs := usersub("", int64(update.Message.From.ID), true)
//var s = "Subscriptions (send 'delete http://..' - for unsubscribe):\n"
cmds := subs2cmds(subs)
if len(cmds) == 0 {
m := tgbotapi.NewMessage(update.Message.Chat.ID, "No feeds..\n\n"+params.Hello)
m.DisableWebPagePreview = true
bot.Send(m)
} else {
msg := tgbotapi.NewMessage(update.Message.Chat.ID, "Subscriptions (press button bellow for unsubscribe):\n")
msg.ReplyMarkup = createButtons(cmds)
msg.ReplyToMessageID = update.Message.MessageID
bot.Send(msg)
}
case "/subs":
subs := usersub("", int64(update.Message.From.ID), true)
cmds := subs2cmds(subs)
msgList := tgbotapi.NewMessage(update.Message.Chat.ID, "")
var txt = "List of urls:\nSend delete url(s) for unsubscribe\n\n"
for _, v := range cmds {
txt = txt + strings.Replace(v, "delete ", "", -1) + "\n"
}
msgList.Text = txt + "\n"
bot.Send(msgList)
default:
msg := update.Message.Text
pubFind(update.Message, msg, int64(update.Message.From.ID))
}
}
}
}
func subs2cmds(subs map[string]bool) map[string]string {
var cmds = make(map[string]string)
for k, _ := range subs {
log.Println(k)
if strings.Contains(k, params.PubNames) {
cmd := "delete https://vk.com/" + strings.Replace(k, params.PubNames, "", -1)
key := "delete" + k
cmds[key] = cmd
}
if strings.Contains(k, params.Feed) {
b := httputils.HttpGet(params.Api+k, nil)
if b != nil {
cmd := "delete " + string(b)
key := "delete" + k
cmds[key] = cmd
}
}
}
log.Println("cmds:", cmds)
return cmds
}
func createButtons(buttonsCmds map[string]string) tgbotapi.InlineKeyboardMarkup {
var buttons [][]tgbotapi.InlineKeyboardButton
var keys []string
for k := range buttonsCmds {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
choice := buttonsCmds[k]
cleanedChoice := strings.TrimSpace(choice)
cleanedChoice = strings.Replace(cleanedChoice, "\n", "", -1)
button := tgbotapi.NewInlineKeyboardButtonData(cleanedChoice, k)
buttons = append(buttons, tgbotapi.NewInlineKeyboardRow(button))
}
buttonCancel := tgbotapi.NewInlineKeyboardButtonData("Cancel", "cancel")
buttons = append(buttons, tgbotapi.NewInlineKeyboardRow(buttonCancel))
buttonsRow := tgbotapi.NewInlineKeyboardMarkup(buttons...)
return buttonsRow
}
func userNew(user *tgbotapi.User) bool {
urlUsr := params.Users + strconv.Itoa(user.ID)
log.Println("userNew", urlUsr)
b, _ := json.Marshal(user)
httputils.HttpPut(params.UserName+user.UserName, nil, b)
res := httputils.HttpPut(urlUsr, nil, b)
//telefeedbot
if user.ID > 0 {
pubSubTgAdd(146445941, "telefeedbot", nil, false, int64(user.ID))
}
return res
}
func channelNew(chat *tgbotapi.Chat) bool {
url := params.Users + strconv.FormatInt(chat.ID, 10)
log.Println("channelNew", url)
b, _ := json.Marshal(chat)
httputils.HttpPut(params.UserName+chat.UserName, nil, b)
return httputils.HttpPut(url, nil, b)
}
func pubFind(msg *tgbotapi.Message, txt string, userid int64) {
log.Println("pubFind")
var delete = false
var tmp = strings.Replace(txt, "\n", " ", -1)
tmp = strings.Replace(tmp, "\r", "", -1)
tmp = strings.TrimSpace(tmp)
words := strings.Split(tmp, " ")
for i := range words {
var word = strings.TrimSpace(words[i])
if word == "delete" || word == "Delete" {
delete = true
continue
}
if strings.HasPrefix(word, "@") {
chanName := strings.Replace(word, "@", "", -1)
url := params.UserName + chanName
b := httputils.HttpGet(url, nil)
var chat *tgbotapi.Chat
json.Unmarshal(b, &chat)
if chat != nil {
userChannelsUrl := params.Channels + strconv.FormatInt(userid, 10)
userChannelsbody := httputils.HttpGet(userChannelsUrl, nil)
userChannels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(userChannelsbody, &userChannels)
if userChannels[chat.ID] != nil {
userid = chat.ID
} else {
bot.Send(tgbotapi.NewMessage(userid, chanName+" not yours"))
}
}
continue
}
if strings.HasPrefix(word, "http") == false {
//default sheme is https
word = "https://" + word
}
urls, err := url.Parse(word)
if err != nil {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Rss feed on domain:'"+word+"'\n"+params.NotFound+params.Example))
return
}
mainDomain, _ := publicsuffix.EffectiveTLDPlusOne(urls.Host)
switch mainDomain {
case "t.me":
parts := strings.Split(urls.Path, "/")
if len(parts) > 1 {
channelName := "@" + parts[len(parts)-1]
m := tgbotapi.NewMessageToChannel(channelName, "Ok")
m.DisableWebPagePreview = true
reply, err := bot.Send(m)
if err != nil {
s := err.Error()
if strings.Contains(s, "orbidden") {
m := tgbotapi.NewMessage(msg.Chat.ID, "Add @telefeedbot as admin 2 channel: "+channelName)
bot.Send(m)
} else {
m := tgbotapi.NewMessage(msg.Chat.ID, s)
bot.Send(m)
}
} else {
channel := reply.Chat
addChannel(msg.Chat.ID, channel, false)
}
}
case "twitter.com":
parts := strings.Split(urls.Path, "/")
for _, part := range parts {
if part != "" {
findFeed("https://twitrss.me/twitter_user_to_rss/?user="+part, msg, delete, userid)
}
}
case "instagram.com":
parts := strings.Split(urls.Path, "/")
for _, part := range parts {
if part != "" {
findFeed("https://web.stagram.com/rss/n/"+part, msg, delete, userid)
}
}
case "vk.com":
parts := strings.Split(urls.Path, "/")
for j := range parts {
if parts[j] != "" {
domain := parts[j]
log.Println(domain)
groupDb := pubDbGet(domain)
if groupDb.Gid == 0 {
// public not found
groups := vkapi.GroupsGetById(domain)
if len(groups) > 0 {
// we have group
groupVk := groups[0]
// save group to DB
if pubDbSet(groupVk) {
// new group set
pubSubTgAdd(groupVk.Gid, groupVk.ScreenName, msg, delete, userid)
} else {
// group not set
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Error create domain:'"+domain+"'"))
}
} else {
// group not found
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "Error vk domain:'"+domain+"'"+" not found"))
}
} else {
// public exists
pubSubTgAdd(groupDb.Gid, groupDb.ScreenName, msg, delete, userid)
}
}
}
default:
findFeed(word, msg, delete, userid)
}
}
}
func addChannel(userId int64, channel *tgbotapi.Chat, isDelete bool) {
if channel == nil {
return
}
url := params.Channels + strconv.FormatInt(userId, 10)
body := httputils.HttpGet(url, nil)
channels := make(map[int64]*tgbotapi.Chat)
json.Unmarshal(body, &channels)
channels[channel.ID] = channel
delete(channels, channel.ID)
if !isDelete {
channels[channel.ID] = channel
}
log.Println("channels ", channels)
data, err := json.Marshal(channels)
if err == nil {
result := httputils.HttpPut(url, nil, data)
if result == true {
if isDelete {
bot.Send(tgbotapi.NewMessage(userId, "👍 Removed: "+channel.UserName+"\n\n"))
} else {
//add channel as User
if channelNew(channel) {
bot.Send(tgbotapi.NewMessage(userId, channel.UserName+" 👍\n\nUse /channels for list of channels\n\nSend @"+
channel.UserName+" http://url for add url 2 channel"))
}
}
}
}
}
func findFeed(word string, msg *tgbotapi.Message, isDelete bool, userid int64) {
log.Println("word", word)
var feedlink = getFeedLink(word)
if feedlink == "" {
log.Println("feedlink", feedlink)
rss := rssExtract(word)
if rss != "" {
log.Println("rss", rss)
feedlink = getFeedLink(rss)
log.Println("feedlink", feedlink)
}
}
if feedlink != "" {
feedkey := GetMD5Hash(feedlink)
//create feed or overwrite
httputils.HttpPut(params.Feeds+feedkey, nil, []byte(feedlink))
feedSubTgAdd(feedlink, msg, isDelete, userid)
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, word+"\n"+params.NotFound))
}
}
func feedSubTgAdd(feedlink string, msg *tgbotapi.Message, isDelete bool, userid int64) {
url := params.FeedSubs + GetMD5Hash(feedlink)
log.Println("feedSubTgAdd", url)
body := httputils.HttpGet(url, nil)
users := make(map[int64]bool)
json.Unmarshal(body, &users)
delete(users, userid)
if !isDelete {
users[userid] = true
}
log.Println("feedSubTgAdd users ", users)
//user subs
usersub(params.Feed+GetMD5Hash(feedlink), userid, isDelete)
data, err := json.Marshal(users)
if err == nil {
log.Println("feedSubTgAdd data ", string(data))
result := httputils.HttpPut(url, nil, data)
if result == true {
if isDelete {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 Removed: "+feedlink+"\n\n"))
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, feedlink+" 👍\n\n"+
params.Psst))
}
}
}
}
func usersub(url string, userid int64, isDelete bool) map[string]bool {
suburl := params.UserSubs + strconv.FormatInt(userid, 10)
bodysub := httputils.HttpGet(suburl, nil)
subs := make(map[string]bool)
json.Unmarshal(bodysub, &subs)
delete(subs, url)
if !isDelete {
subs[url] = true
}
if url == "" {
return subs
}
bsubs, _ := json.Marshal(subs)
httputils.HttpPut(suburl, nil, bsubs)
return subs
}
func GetMD5Hash(t | ) string {
hash := md5.Sum([]byte(strings.TrimSpace(text)))
return hex.EncodeToString(hash[:])
}
func getFeedLink(link string) (feedlink string) {
var defHeaders = make(map[string]string)
defHeaders["User-Agent"] = "script::recoilme:v1"
defHeaders["Authorization"] = "Client-ID 4191ffe3736cfcb"
b := httputils.HttpGet(link, defHeaders)
if b == nil {
return feedlink
}
fp := gofeed.NewParser()
feed, err := fp.Parse(bytes.NewReader(b))
if err != nil {
return feedlink
}
if len(feed.Items) > 0 {
feedlink = link
}
return feedlink
}
func pubDbGet(domain string) (group vkapi.Group) {
log.Println("pubDbGet")
url := params.Publics + domain
body := httputils.HttpGet(url, nil)
if body != nil {
json.Unmarshal(body, &group)
}
return
}
func pubDbSet(group vkapi.Group) bool {
log.Println("pubDbSet")
domain := group.ScreenName
b, err := json.Marshal(group)
if err != nil {
return false
}
return httputils.HttpPut(params.Publics+domain, nil, b)
}
func pubSubTgAdd(gId int, screenName string, msg *tgbotapi.Message, isDelete bool, userid int64) {
gid := strconv.Itoa(gId)
url := params.Subs + gid
log.Println("pubSubTgAdd", url)
body := httputils.HttpGet(url, nil)
users := make(map[int64]bool)
json.Unmarshal(body, &users)
delete(users, userid)
if !isDelete {
users[userid] = true
}
log.Println("pubSubTgAdd users ", users)
data, err := json.Marshal(users)
if err == nil {
log.Println("pubSubTgAdd data ", string(data))
result := httputils.HttpPut(url, nil, data)
if result == true {
if msg != nil {
if isDelete {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 Removed: https://vk.com/"+screenName+"\n"))
} else {
bot.Send(tgbotapi.NewMessage(msg.Chat.ID, "👍 https://vk.com/"+screenName+"\n"+
params.Psst))
}
}
usersub(params.PubNames+screenName, userid, isDelete)
}
}
}
func rssExtract(link string) string {
var rss string
var defHeaders = make(map[string]string)
defHeaders["User-Agent"] = "script::recoilme:v1"
defHeaders["Authorization"] = "Client-ID 4191ffe3736cfcb"
b := httputils.HttpGet(link, defHeaders)
if b == nil {
return rss
}
doc, err := html.Parse(bytes.NewReader(b)) //strings.NewReader(s))
if err != nil {
log.Fatal(err)
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "link" {
var isRss bool
for _, a := range n.Attr {
if a.Key == "type" {
if a.Val == "application/rss+xml" || a.Val == "application/atom+xml" {
isRss = true
break
}
}
}
if isRss {
for _, a := range n.Attr {
if a.Key == "href" {
rss = a.Val
break
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
if rss != "" {
break
}
}
}
f(doc)
return rss
}
| ext string | identifier_name |
cfg.rs | use anyhow::{bail, Context, Result};
use expanduser::expanduser;
use log::{debug, warn};
use std::collections::HashMap;
use std::default::Default;
use std::fmt::Display;
use std::fs::{read_dir, read_to_string};
use std::path::PathBuf;
use yaml_rust::{yaml::Hash, Yaml, YamlLoader};
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
const CONTROLS_ENHANCED: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`');
use crate::util::*;
/// The main configuration
#[derive(Debug)]
pub struct Config {
/// Authentication settings to use if no host-specific authentication settings specified.
pub auth: Auth,
/// Default host to upload to.
default_host: Option<String>,
/// List of all configured hosts.
hosts: HashMap<String, Host>,
/// Expire the uploaded file after the given amount of time via `at`-scheduled remote job.
///
/// Select files newer than the given duration. Durations can be: seconds (sec, s), minutes
/// (min, m), days (d), weeks (w), months (M) or years (y).
///
/// Mininum time till expiration is a minute.
pub expire: Option<String>,
/// Length of prefix to use unless overwritten in host
pub prefix_length: u8,
/// Compute hash on remote side after upload to verify.
pub verify_via_hash: bool,
}
/// Authentication configuration
#[derive(Debug, Clone)]
pub struct Auth {
/// Try to use auth information for the given host from openssh settings
pub from_openssh: bool,
/// Perform interactive authentication (if private key is set password will be used for private
/// key instead).
pub interactive: bool,
/// Perform authentication via explicit private key
pub private_key_file: Option<String>,
/// Explicit password for private key (unsafe)
pub private_key_file_password: Option<String>,
/// Perform agent authentication
pub use_agent: bool,
}
/// A host entry
#[derive(Debug)]
pub struct Host {
/// Alias under which the host is known
pub alias: String,
/// Overwrite global authentication settings for this host.
pub auth: Auth,
/// Expire the uploaded file after the given amount of time via `at`-scheduled remote job.
///
/// Select files newer than the given duration. Durations can be: seconds (sec, s), minutes
/// (min, m), days (d), weeks (w), months (M) or years (y).
///
/// Mininum time till expiration is a minute.
///
/// Overrides the global setting.
pub expire: Option<String>,
/// In which folder do we store files on the host.
pub folder: PathBuf,
/// In case files on the remote site need to have a special group setting in order to be
/// readable by the webserver.
pub group: Option<String>,
/// Self-explanatory (if not set alias will be used)
pub hostname: Option<String>,
/// If the user REALLY REALLY wants to, a plaintext password can be provided (but it is not
/// recommended!).
pub password: Option<String>,
/// Length of prefix to use
pub prefix_length: u8,
/// url-prefix to apply to file link
pub url: String,
/// The user to sign in, otherwise ssh config will be used.
pub user: Option<String>,
}
fn default_config_directories() -> Vec<&'static str> {
vec!["~/.config/asfa", "/etc/asfa"]
}
pub fn load<T: AsRef<str> + Display>(path: &Option<T>) -> Result<Config> {
let possible_paths: Vec<&str> = match path {
Some(path) => vec![path.as_ref()],
None => default_config_directories(),
};
for path in possible_paths.iter() {
match Config::load(path)? {
None => continue,
Some(cfg) => return Ok(cfg),
}
}
bail!("Did not find valid configuration!");
}
#[allow(dead_code)]
pub fn dummy_host_str() -> &'static str {
include_str!("dummy_host.yml")
}
#[allow(dead_code)]
pub fn dummy_host() -> Host {
Host::from_yaml(
"dummy_host".to_string(),
&YamlLoader::load_from_str(dummy_host_str()).unwrap()[0],
)
.unwrap()
}
impl Default for Config {
fn default() -> Self {
Config {
auth: Auth::default(),
default_host: None,
expire: None,
hosts: HashMap::new(),
prefix_length: 32,
verify_via_hash: true,
}
}
}
impl Config {
pub fn load<T: AsRef<str> + Display>(dir: T) -> Result<Option<Config>> {
let config_dir = match expanduser(dir.as_ref()) {
Ok(p) => p,
Err(e) => {
bail!("Error when expanding path to config file: {}", e);
}
};
let global = {
let mut global = config_dir.clone();
global.push("config.yaml");
global
};
let raw: String = match read_to_string(&global) {
Err(e) => {
debug!(
"Could not read configuration file '{}', error: {}",
global.to_str().unwrap_or("invalid"),
e
);
return Ok(None);
}
Ok(raw) => raw,
};
let mut config = Self::from_yaml(&raw)?;
let hosts_dir = {
let mut hosts_dir = config_dir;
hosts_dir.push("hosts");
hosts_dir
};
if hosts_dir.is_dir() {
for entry in read_dir(&hosts_dir)? {
let possible_host = entry?.path();
match possible_host.extension() {
None => {
continue;
}
Some(ext) => {
if ext != "yaml" {
continue;
}
}
};
let alias = match possible_host.file_stem() {
None => {
warn!(
"Could not extract file stem for: {}",
possible_host.display()
);
continue;
}
Some(alias) => alias
.to_str()
.context("Could not convert host file name to String.")?
.to_string(),
};
if config.hosts.contains_key(&alias) {
bail!("Host {} configured in config.yaml and as host-file.", alias);
};
let host_yaml = YamlLoader::load_from_str(&read_to_string(&possible_host)?)?;
let error = format!("Invalid host-file for host {}", &alias);
let host =
Host::from_yaml_with_config(alias, &host_yaml[0], &config).context(error)?;
config.hosts.insert(host.alias.clone(), host);
}
}
Ok(Some(config))
}
pub fn from_yaml(input: &str) -> Result<Config> {
let documents = match YamlLoader::load_from_str(input) {
Ok(data) => data,
Err(e) => {
bail!("Error while loading config file: {}", e);
}
};
let mut config = Config::default();
let config_yaml = match &documents[0] {
Yaml::Hash(h) => h,
_ => {
bail!("Root object in configuration file is no dictionary!");
}
};
config.prefix_length = {
let length = get_int_from(config_yaml, "prefix_length")?
.cloned()
.unwrap_or(config.prefix_length as i64);
check_prefix_length(length)?;
length as u8
};
config.auth = if let Some(Yaml::Hash(auth)) = config_yaml.get(&yaml_string("auth")) {
match Auth::from_yaml(&auth, None) {
Ok(auth) => auth,
Err(e) => {
bail!("Could not read global authentication settings: {}", e);
}
}
} else {
config.auth
};
config.default_host =
std::env::var("ASFA_HOST")
.ok()
.or(get_string_from(config_yaml, "default_host")?.cloned());
config.expire = get_string_from(config_yaml, "expire")?.cloned();
config.verify_via_hash = get_bool_from(config_yaml, "verify_via_hash")?
.cloned()
.unwrap_or(config.verify_via_hash);
match config_yaml.get(&yaml_string("hosts")) {
Some(Yaml::Hash(dict)) => {
for entry in dict.clone().entries() {
let alias = match entry.key() {
Yaml::String(alias) => alias.to_string(),
invalid => {
warn!("Found invalid alias for host entry: {:?}", invalid);
continue;
}
};
let host_yaml = entry.get();
let host = Host::from_yaml_with_config(alias.clone(), host_yaml, &config)?;
config.hosts.insert(alias, host);
}
}
// Some(Yaml::Array(a)) => a,
Some(_) => {
bail!("'hosts' entry in config file needs to be dictionary mapping host-alias to configuration!");
}
None => {
debug!("No 'hosts'-entry in config file.");
}
};
Ok(config)
}
pub fn get_host<T: AsRef<str>>(&self, alias: Option<T>) -> Result<&Host> {
match alias
.as_ref()
.map(|a| a.as_ref())
.or_else(|| self.default_host.as_deref())
{
None => match self.hosts.len() {
0 => {
bail!("No hosts configured, define some!");
}
1 => Ok(self.hosts.values().next().unwrap()),
_ => {
bail!("More than one host entry defined but neither `default_host` set in config or --config given via command line.");
}
},
Some(alias) => Ok(self
.hosts
.get(alias)
.with_context(|| format!("Did not find alias: {}", alias))?),
}
}
}
impl Host {
fn from_yaml(alias: String, input: &Yaml) -> Result<Host> {
Self::from_yaml_with_config(alias, input, &Config::default())
}
fn from_yaml_with_config(alias: String, input: &Yaml, config: &Config) -> Result<Host> {
log::trace!("Reading host: {}", alias);
if let Yaml::Hash(dict) = input {
let url = get_required(dict, "url", get_string_from)?.clone();
let hostname = get_string_from(dict, "hostname")?.cloned();
let user = get_string_from(dict, "user")?.cloned();
let expire = get_string_from(dict, "expire")?
.cloned()
.or_else(|| config.expire.clone());
let folder = expanduser(get_required(dict, "folder", get_string_from)?)?;
let group = get_string_from(dict, "group")?.cloned();
let auth = match get_dict_from(dict, "auth")? {
Some(auth) => Auth::from_yaml(auth, Some(&config.auth))?,
None => config.auth.clone(),
};
let prefix_length = match get_int_from(dict, "prefix_length")? {
Some(prefix) => {
check_prefix_length(*prefix)?;
*prefix as u8
}
None => config.prefix_length,
};
let password = get_string_from(dict, "password")?.cloned();
Ok(Host {
alias,
auth,
expire,
folder,
group,
hostname,
password,
prefix_length,
url,
user,
})
} else {
bail!("Invalid yaml data for Host-alias '{}'", alias);
}
}
pub fn get_url(&self, file: &str) -> Result<String> {
Ok(format!(
"{}/{}",
&self.url,
utf8_percent_encode(file, CONTROLS_ENHANCED)
))
}
}
impl Auth {
fn from_yaml(dict: &Hash, default: Option<&Auth>) -> Result<Auth, InvalidYamlTypeError> {
let auth_default = Self::default();
let default = default.unwrap_or(&auth_default);
let use_agent = get_bool_from(dict, "use_agent")?
.cloned()
.unwrap_or(default.use_agent);
let interactive = get_bool_from(dict, "interactive")?
.cloned()
.unwrap_or(default.interactive);
let private_key_file = get_string_from(dict, "private_key_file")?
.cloned()
.or_else(|| default.private_key_file.clone());
let private_key_file_password = get_string_from(dict, "private_key_file_password")?
.cloned()
.or_else(|| default.private_key_file_password.clone());
let from_openssh = get_bool_from(dict, "from_openssh")?
.cloned()
.unwrap_or(default.from_openssh);
Ok(Auth {
from_openssh,
interactive,
private_key_file,
private_key_file_password,
use_agent,
})
}
}
impl Default for Auth {
fn default() -> Self {
Auth {
from_openssh: true,
interactive: true,
private_key_file: None,
private_key_file_password: None,
use_agent: true,
}
}
}
fn check_prefix_length(length: i64) -> Result<()> {
if !(8..=128).contains(&length) { | }
Ok(())
}
#[cfg(test)]
mod tests {
use crate::util;
#[test]
fn load_example_config() {
util::test::init().unwrap();
let cfg = crate::cfg::Config::load("example-config/asfa")
.unwrap()
.unwrap();
log::debug!("Loaded: {:?}", cfg);
assert_eq!(&cfg.hosts.len(), &2);
assert_eq!(&cfg.default_host.clone().unwrap(), &"my-remote-site");
assert_eq!(
&cfg.get_host(Some("my-remote-site-2")).unwrap().hostname,
&Some("my-hostname-2.eu".to_string())
);
}
} | bail! {"Prefix needs to be between 8 and 128 characters."}; | random_line_split |
cfg.rs | use anyhow::{bail, Context, Result};
use expanduser::expanduser;
use log::{debug, warn};
use std::collections::HashMap;
use std::default::Default;
use std::fmt::Display;
use std::fs::{read_dir, read_to_string};
use std::path::PathBuf;
use yaml_rust::{yaml::Hash, Yaml, YamlLoader};
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
const CONTROLS_ENHANCED: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`');
use crate::util::*;
/// The main configuration
#[derive(Debug)]
pub struct Config {
/// Authentication settings to use if no host-specific authentication settings specified.
pub auth: Auth,
/// Default host to upload to.
default_host: Option<String>,
/// List of all configured hosts.
hosts: HashMap<String, Host>,
/// Expire the uploaded file after the given amount of time via `at`-scheduled remote job.
///
/// Select files newer than the given duration. Durations can be: seconds (sec, s), minutes
/// (min, m), days (d), weeks (w), months (M) or years (y).
///
/// Mininum time till expiration is a minute.
pub expire: Option<String>,
/// Length of prefix to use unless overwritten in host
pub prefix_length: u8,
/// Compute hash on remote side after upload to verify.
pub verify_via_hash: bool,
}
/// Authentication configuration
#[derive(Debug, Clone)]
pub struct Auth {
/// Try to use auth information for the given host from openssh settings
pub from_openssh: bool,
/// Perform interactive authentication (if private key is set password will be used for private
/// key instead).
pub interactive: bool,
/// Perform authentication via explicit private key
pub private_key_file: Option<String>,
/// Explicit password for private key (unsafe)
pub private_key_file_password: Option<String>,
/// Perform agent authentication
pub use_agent: bool,
}
/// A host entry
#[derive(Debug)]
pub struct Host {
/// Alias under which the host is known
pub alias: String,
/// Overwrite global authentication settings for this host.
pub auth: Auth,
/// Expire the uploaded file after the given amount of time via `at`-scheduled remote job.
///
/// Select files newer than the given duration. Durations can be: seconds (sec, s), minutes
/// (min, m), days (d), weeks (w), months (M) or years (y).
///
/// Mininum time till expiration is a minute.
///
/// Overrides the global setting.
pub expire: Option<String>,
/// In which folder do we store files on the host.
pub folder: PathBuf,
/// In case files on the remote site need to have a special group setting in order to be
/// readable by the webserver.
pub group: Option<String>,
/// Self-explanatory (if not set alias will be used)
pub hostname: Option<String>,
/// If the user REALLY REALLY wants to, a plaintext password can be provided (but it is not
/// recommended!).
pub password: Option<String>,
/// Length of prefix to use
pub prefix_length: u8,
/// url-prefix to apply to file link
pub url: String,
/// The user to sign in, otherwise ssh config will be used.
pub user: Option<String>,
}
fn default_config_directories() -> Vec<&'static str> {
vec!["~/.config/asfa", "/etc/asfa"]
}
pub fn load<T: AsRef<str> + Display>(path: &Option<T>) -> Result<Config> {
let possible_paths: Vec<&str> = match path {
Some(path) => vec![path.as_ref()],
None => default_config_directories(),
};
for path in possible_paths.iter() {
match Config::load(path)? {
None => continue,
Some(cfg) => return Ok(cfg),
}
}
bail!("Did not find valid configuration!");
}
#[allow(dead_code)]
pub fn dummy_host_str() -> &'static str {
include_str!("dummy_host.yml")
}
#[allow(dead_code)]
pub fn dummy_host() -> Host {
Host::from_yaml(
"dummy_host".to_string(),
&YamlLoader::load_from_str(dummy_host_str()).unwrap()[0],
)
.unwrap()
}
impl Default for Config {
fn default() -> Self |
}
impl Config {
pub fn load<T: AsRef<str> + Display>(dir: T) -> Result<Option<Config>> {
let config_dir = match expanduser(dir.as_ref()) {
Ok(p) => p,
Err(e) => {
bail!("Error when expanding path to config file: {}", e);
}
};
let global = {
let mut global = config_dir.clone();
global.push("config.yaml");
global
};
let raw: String = match read_to_string(&global) {
Err(e) => {
debug!(
"Could not read configuration file '{}', error: {}",
global.to_str().unwrap_or("invalid"),
e
);
return Ok(None);
}
Ok(raw) => raw,
};
let mut config = Self::from_yaml(&raw)?;
let hosts_dir = {
let mut hosts_dir = config_dir;
hosts_dir.push("hosts");
hosts_dir
};
if hosts_dir.is_dir() {
for entry in read_dir(&hosts_dir)? {
let possible_host = entry?.path();
match possible_host.extension() {
None => {
continue;
}
Some(ext) => {
if ext != "yaml" {
continue;
}
}
};
let alias = match possible_host.file_stem() {
None => {
warn!(
"Could not extract file stem for: {}",
possible_host.display()
);
continue;
}
Some(alias) => alias
.to_str()
.context("Could not convert host file name to String.")?
.to_string(),
};
if config.hosts.contains_key(&alias) {
bail!("Host {} configured in config.yaml and as host-file.", alias);
};
let host_yaml = YamlLoader::load_from_str(&read_to_string(&possible_host)?)?;
let error = format!("Invalid host-file for host {}", &alias);
let host =
Host::from_yaml_with_config(alias, &host_yaml[0], &config).context(error)?;
config.hosts.insert(host.alias.clone(), host);
}
}
Ok(Some(config))
}
pub fn from_yaml(input: &str) -> Result<Config> {
let documents = match YamlLoader::load_from_str(input) {
Ok(data) => data,
Err(e) => {
bail!("Error while loading config file: {}", e);
}
};
let mut config = Config::default();
let config_yaml = match &documents[0] {
Yaml::Hash(h) => h,
_ => {
bail!("Root object in configuration file is no dictionary!");
}
};
config.prefix_length = {
let length = get_int_from(config_yaml, "prefix_length")?
.cloned()
.unwrap_or(config.prefix_length as i64);
check_prefix_length(length)?;
length as u8
};
config.auth = if let Some(Yaml::Hash(auth)) = config_yaml.get(&yaml_string("auth")) {
match Auth::from_yaml(&auth, None) {
Ok(auth) => auth,
Err(e) => {
bail!("Could not read global authentication settings: {}", e);
}
}
} else {
config.auth
};
config.default_host =
std::env::var("ASFA_HOST")
.ok()
.or(get_string_from(config_yaml, "default_host")?.cloned());
config.expire = get_string_from(config_yaml, "expire")?.cloned();
config.verify_via_hash = get_bool_from(config_yaml, "verify_via_hash")?
.cloned()
.unwrap_or(config.verify_via_hash);
match config_yaml.get(&yaml_string("hosts")) {
Some(Yaml::Hash(dict)) => {
for entry in dict.clone().entries() {
let alias = match entry.key() {
Yaml::String(alias) => alias.to_string(),
invalid => {
warn!("Found invalid alias for host entry: {:?}", invalid);
continue;
}
};
let host_yaml = entry.get();
let host = Host::from_yaml_with_config(alias.clone(), host_yaml, &config)?;
config.hosts.insert(alias, host);
}
}
// Some(Yaml::Array(a)) => a,
Some(_) => {
bail!("'hosts' entry in config file needs to be dictionary mapping host-alias to configuration!");
}
None => {
debug!("No 'hosts'-entry in config file.");
}
};
Ok(config)
}
pub fn get_host<T: AsRef<str>>(&self, alias: Option<T>) -> Result<&Host> {
match alias
.as_ref()
.map(|a| a.as_ref())
.or_else(|| self.default_host.as_deref())
{
None => match self.hosts.len() {
0 => {
bail!("No hosts configured, define some!");
}
1 => Ok(self.hosts.values().next().unwrap()),
_ => {
bail!("More than one host entry defined but neither `default_host` set in config or --config given via command line.");
}
},
Some(alias) => Ok(self
.hosts
.get(alias)
.with_context(|| format!("Did not find alias: {}", alias))?),
}
}
}
impl Host {
fn from_yaml(alias: String, input: &Yaml) -> Result<Host> {
Self::from_yaml_with_config(alias, input, &Config::default())
}
fn from_yaml_with_config(alias: String, input: &Yaml, config: &Config) -> Result<Host> {
log::trace!("Reading host: {}", alias);
if let Yaml::Hash(dict) = input {
let url = get_required(dict, "url", get_string_from)?.clone();
let hostname = get_string_from(dict, "hostname")?.cloned();
let user = get_string_from(dict, "user")?.cloned();
let expire = get_string_from(dict, "expire")?
.cloned()
.or_else(|| config.expire.clone());
let folder = expanduser(get_required(dict, "folder", get_string_from)?)?;
let group = get_string_from(dict, "group")?.cloned();
let auth = match get_dict_from(dict, "auth")? {
Some(auth) => Auth::from_yaml(auth, Some(&config.auth))?,
None => config.auth.clone(),
};
let prefix_length = match get_int_from(dict, "prefix_length")? {
Some(prefix) => {
check_prefix_length(*prefix)?;
*prefix as u8
}
None => config.prefix_length,
};
let password = get_string_from(dict, "password")?.cloned();
Ok(Host {
alias,
auth,
expire,
folder,
group,
hostname,
password,
prefix_length,
url,
user,
})
} else {
bail!("Invalid yaml data for Host-alias '{}'", alias);
}
}
pub fn get_url(&self, file: &str) -> Result<String> {
Ok(format!(
"{}/{}",
&self.url,
utf8_percent_encode(file, CONTROLS_ENHANCED)
))
}
}
impl Auth {
fn from_yaml(dict: &Hash, default: Option<&Auth>) -> Result<Auth, InvalidYamlTypeError> {
let auth_default = Self::default();
let default = default.unwrap_or(&auth_default);
let use_agent = get_bool_from(dict, "use_agent")?
.cloned()
.unwrap_or(default.use_agent);
let interactive = get_bool_from(dict, "interactive")?
.cloned()
.unwrap_or(default.interactive);
let private_key_file = get_string_from(dict, "private_key_file")?
.cloned()
.or_else(|| default.private_key_file.clone());
let private_key_file_password = get_string_from(dict, "private_key_file_password")?
.cloned()
.or_else(|| default.private_key_file_password.clone());
let from_openssh = get_bool_from(dict, "from_openssh")?
.cloned()
.unwrap_or(default.from_openssh);
Ok(Auth {
from_openssh,
interactive,
private_key_file,
private_key_file_password,
use_agent,
})
}
}
impl Default for Auth {
fn default() -> Self {
Auth {
from_openssh: true,
interactive: true,
private_key_file: None,
private_key_file_password: None,
use_agent: true,
}
}
}
fn check_prefix_length(length: i64) -> Result<()> {
if !(8..=128).contains(&length) {
bail! {"Prefix needs to be between 8 and 128 characters."};
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::util;
#[test]
fn load_example_config() {
util::test::init().unwrap();
let cfg = crate::cfg::Config::load("example-config/asfa")
.unwrap()
.unwrap();
log::debug!("Loaded: {:?}", cfg);
assert_eq!(&cfg.hosts.len(), &2);
assert_eq!(&cfg.default_host.clone().unwrap(), &"my-remote-site");
assert_eq!(
&cfg.get_host(Some("my-remote-site-2")).unwrap().hostname,
&Some("my-hostname-2.eu".to_string())
);
}
}
| {
Config {
auth: Auth::default(),
default_host: None,
expire: None,
hosts: HashMap::new(),
prefix_length: 32,
verify_via_hash: true,
}
} | identifier_body |
cfg.rs | use anyhow::{bail, Context, Result};
use expanduser::expanduser;
use log::{debug, warn};
use std::collections::HashMap;
use std::default::Default;
use std::fmt::Display;
use std::fs::{read_dir, read_to_string};
use std::path::PathBuf;
use yaml_rust::{yaml::Hash, Yaml, YamlLoader};
use percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};
const CONTROLS_ENHANCED: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`');
use crate::util::*;
/// The main configuration
#[derive(Debug)]
pub struct Config {
/// Authentication settings to use if no host-specific authentication settings specified.
pub auth: Auth,
/// Default host to upload to.
default_host: Option<String>,
/// List of all configured hosts.
hosts: HashMap<String, Host>,
/// Expire the uploaded file after the given amount of time via `at`-scheduled remote job.
///
/// Select files newer than the given duration. Durations can be: seconds (sec, s), minutes
/// (min, m), days (d), weeks (w), months (M) or years (y).
///
/// Mininum time till expiration is a minute.
pub expire: Option<String>,
/// Length of prefix to use unless overwritten in host
pub prefix_length: u8,
/// Compute hash on remote side after upload to verify.
pub verify_via_hash: bool,
}
/// Authentication configuration
#[derive(Debug, Clone)]
pub struct Auth {
/// Try to use auth information for the given host from openssh settings
pub from_openssh: bool,
/// Perform interactive authentication (if private key is set password will be used for private
/// key instead).
pub interactive: bool,
/// Perform authentication via explicit private key
pub private_key_file: Option<String>,
/// Explicit password for private key (unsafe)
pub private_key_file_password: Option<String>,
/// Perform agent authentication
pub use_agent: bool,
}
/// A host entry
#[derive(Debug)]
pub struct Host {
/// Alias under which the host is known
pub alias: String,
/// Overwrite global authentication settings for this host.
pub auth: Auth,
/// Expire the uploaded file after the given amount of time via `at`-scheduled remote job.
///
/// Select files newer than the given duration. Durations can be: seconds (sec, s), minutes
/// (min, m), days (d), weeks (w), months (M) or years (y).
///
/// Mininum time till expiration is a minute.
///
/// Overrides the global setting.
pub expire: Option<String>,
/// In which folder do we store files on the host.
pub folder: PathBuf,
/// In case files on the remote site need to have a special group setting in order to be
/// readable by the webserver.
pub group: Option<String>,
/// Self-explanatory (if not set alias will be used)
pub hostname: Option<String>,
/// If the user REALLY REALLY wants to, a plaintext password can be provided (but it is not
/// recommended!).
pub password: Option<String>,
/// Length of prefix to use
pub prefix_length: u8,
/// url-prefix to apply to file link
pub url: String,
/// The user to sign in, otherwise ssh config will be used.
pub user: Option<String>,
}
fn default_config_directories() -> Vec<&'static str> {
vec!["~/.config/asfa", "/etc/asfa"]
}
pub fn load<T: AsRef<str> + Display>(path: &Option<T>) -> Result<Config> {
let possible_paths: Vec<&str> = match path {
Some(path) => vec![path.as_ref()],
None => default_config_directories(),
};
for path in possible_paths.iter() {
match Config::load(path)? {
None => continue,
Some(cfg) => return Ok(cfg),
}
}
bail!("Did not find valid configuration!");
}
#[allow(dead_code)]
pub fn dummy_host_str() -> &'static str {
include_str!("dummy_host.yml")
}
#[allow(dead_code)]
pub fn dummy_host() -> Host {
Host::from_yaml(
"dummy_host".to_string(),
&YamlLoader::load_from_str(dummy_host_str()).unwrap()[0],
)
.unwrap()
}
impl Default for Config {
fn default() -> Self {
Config {
auth: Auth::default(),
default_host: None,
expire: None,
hosts: HashMap::new(),
prefix_length: 32,
verify_via_hash: true,
}
}
}
impl Config {
pub fn load<T: AsRef<str> + Display>(dir: T) -> Result<Option<Config>> {
let config_dir = match expanduser(dir.as_ref()) {
Ok(p) => p,
Err(e) => {
bail!("Error when expanding path to config file: {}", e);
}
};
let global = {
let mut global = config_dir.clone();
global.push("config.yaml");
global
};
let raw: String = match read_to_string(&global) {
Err(e) => {
debug!(
"Could not read configuration file '{}', error: {}",
global.to_str().unwrap_or("invalid"),
e
);
return Ok(None);
}
Ok(raw) => raw,
};
let mut config = Self::from_yaml(&raw)?;
let hosts_dir = {
let mut hosts_dir = config_dir;
hosts_dir.push("hosts");
hosts_dir
};
if hosts_dir.is_dir() {
for entry in read_dir(&hosts_dir)? {
let possible_host = entry?.path();
match possible_host.extension() {
None => {
continue;
}
Some(ext) => {
if ext != "yaml" {
continue;
}
}
};
let alias = match possible_host.file_stem() {
None => {
warn!(
"Could not extract file stem for: {}",
possible_host.display()
);
continue;
}
Some(alias) => alias
.to_str()
.context("Could not convert host file name to String.")?
.to_string(),
};
if config.hosts.contains_key(&alias) {
bail!("Host {} configured in config.yaml and as host-file.", alias);
};
let host_yaml = YamlLoader::load_from_str(&read_to_string(&possible_host)?)?;
let error = format!("Invalid host-file for host {}", &alias);
let host =
Host::from_yaml_with_config(alias, &host_yaml[0], &config).context(error)?;
config.hosts.insert(host.alias.clone(), host);
}
}
Ok(Some(config))
}
pub fn from_yaml(input: &str) -> Result<Config> {
let documents = match YamlLoader::load_from_str(input) {
Ok(data) => data,
Err(e) => {
bail!("Error while loading config file: {}", e);
}
};
let mut config = Config::default();
let config_yaml = match &documents[0] {
Yaml::Hash(h) => h,
_ => {
bail!("Root object in configuration file is no dictionary!");
}
};
config.prefix_length = {
let length = get_int_from(config_yaml, "prefix_length")?
.cloned()
.unwrap_or(config.prefix_length as i64);
check_prefix_length(length)?;
length as u8
};
config.auth = if let Some(Yaml::Hash(auth)) = config_yaml.get(&yaml_string("auth")) {
match Auth::from_yaml(&auth, None) {
Ok(auth) => auth,
Err(e) => {
bail!("Could not read global authentication settings: {}", e);
}
}
} else {
config.auth
};
config.default_host =
std::env::var("ASFA_HOST")
.ok()
.or(get_string_from(config_yaml, "default_host")?.cloned());
config.expire = get_string_from(config_yaml, "expire")?.cloned();
config.verify_via_hash = get_bool_from(config_yaml, "verify_via_hash")?
.cloned()
.unwrap_or(config.verify_via_hash);
match config_yaml.get(&yaml_string("hosts")) {
Some(Yaml::Hash(dict)) => {
for entry in dict.clone().entries() {
let alias = match entry.key() {
Yaml::String(alias) => alias.to_string(),
invalid => {
warn!("Found invalid alias for host entry: {:?}", invalid);
continue;
}
};
let host_yaml = entry.get();
let host = Host::from_yaml_with_config(alias.clone(), host_yaml, &config)?;
config.hosts.insert(alias, host);
}
}
// Some(Yaml::Array(a)) => a,
Some(_) => {
bail!("'hosts' entry in config file needs to be dictionary mapping host-alias to configuration!");
}
None => {
debug!("No 'hosts'-entry in config file.");
}
};
Ok(config)
}
pub fn get_host<T: AsRef<str>>(&self, alias: Option<T>) -> Result<&Host> {
match alias
.as_ref()
.map(|a| a.as_ref())
.or_else(|| self.default_host.as_deref())
{
None => match self.hosts.len() {
0 => {
bail!("No hosts configured, define some!");
}
1 => Ok(self.hosts.values().next().unwrap()),
_ => {
bail!("More than one host entry defined but neither `default_host` set in config or --config given via command line.");
}
},
Some(alias) => Ok(self
.hosts
.get(alias)
.with_context(|| format!("Did not find alias: {}", alias))?),
}
}
}
impl Host {
fn | (alias: String, input: &Yaml) -> Result<Host> {
Self::from_yaml_with_config(alias, input, &Config::default())
}
fn from_yaml_with_config(alias: String, input: &Yaml, config: &Config) -> Result<Host> {
log::trace!("Reading host: {}", alias);
if let Yaml::Hash(dict) = input {
let url = get_required(dict, "url", get_string_from)?.clone();
let hostname = get_string_from(dict, "hostname")?.cloned();
let user = get_string_from(dict, "user")?.cloned();
let expire = get_string_from(dict, "expire")?
.cloned()
.or_else(|| config.expire.clone());
let folder = expanduser(get_required(dict, "folder", get_string_from)?)?;
let group = get_string_from(dict, "group")?.cloned();
let auth = match get_dict_from(dict, "auth")? {
Some(auth) => Auth::from_yaml(auth, Some(&config.auth))?,
None => config.auth.clone(),
};
let prefix_length = match get_int_from(dict, "prefix_length")? {
Some(prefix) => {
check_prefix_length(*prefix)?;
*prefix as u8
}
None => config.prefix_length,
};
let password = get_string_from(dict, "password")?.cloned();
Ok(Host {
alias,
auth,
expire,
folder,
group,
hostname,
password,
prefix_length,
url,
user,
})
} else {
bail!("Invalid yaml data for Host-alias '{}'", alias);
}
}
pub fn get_url(&self, file: &str) -> Result<String> {
Ok(format!(
"{}/{}",
&self.url,
utf8_percent_encode(file, CONTROLS_ENHANCED)
))
}
}
impl Auth {
fn from_yaml(dict: &Hash, default: Option<&Auth>) -> Result<Auth, InvalidYamlTypeError> {
let auth_default = Self::default();
let default = default.unwrap_or(&auth_default);
let use_agent = get_bool_from(dict, "use_agent")?
.cloned()
.unwrap_or(default.use_agent);
let interactive = get_bool_from(dict, "interactive")?
.cloned()
.unwrap_or(default.interactive);
let private_key_file = get_string_from(dict, "private_key_file")?
.cloned()
.or_else(|| default.private_key_file.clone());
let private_key_file_password = get_string_from(dict, "private_key_file_password")?
.cloned()
.or_else(|| default.private_key_file_password.clone());
let from_openssh = get_bool_from(dict, "from_openssh")?
.cloned()
.unwrap_or(default.from_openssh);
Ok(Auth {
from_openssh,
interactive,
private_key_file,
private_key_file_password,
use_agent,
})
}
}
impl Default for Auth {
fn default() -> Self {
Auth {
from_openssh: true,
interactive: true,
private_key_file: None,
private_key_file_password: None,
use_agent: true,
}
}
}
fn check_prefix_length(length: i64) -> Result<()> {
if !(8..=128).contains(&length) {
bail! {"Prefix needs to be between 8 and 128 characters."};
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::util;
#[test]
fn load_example_config() {
util::test::init().unwrap();
let cfg = crate::cfg::Config::load("example-config/asfa")
.unwrap()
.unwrap();
log::debug!("Loaded: {:?}", cfg);
assert_eq!(&cfg.hosts.len(), &2);
assert_eq!(&cfg.default_host.clone().unwrap(), &"my-remote-site");
assert_eq!(
&cfg.get_host(Some("my-remote-site-2")).unwrap().hostname,
&Some("my-hostname-2.eu".to_string())
);
}
}
| from_yaml | identifier_name |
error.rs | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use core::{convert::TryInto, fmt, ptr::NonNull, task::Poll};
use errno::{errno, Errno};
use libc::c_char;
use s2n_tls_sys::*;
use std::{convert::TryFrom, ffi::CStr};
#[non_exhaustive]
#[derive(Debug, PartialEq)]
pub enum ErrorType {
UnknownErrorType,
NoError,
IOError,
ConnectionClosed,
Blocked,
Alert,
ProtocolError,
InternalError,
UsageError,
Application,
}
#[non_exhaustive]
#[derive(Debug, PartialEq)]
pub enum ErrorSource {
Library,
Bindings,
Application,
}
impl From<libc::c_int> for ErrorType {
fn from(input: libc::c_int) -> Self {
match input as s2n_error_type::Type {
s2n_error_type::OK => ErrorType::NoError,
s2n_error_type::IO => ErrorType::IOError,
s2n_error_type::CLOSED => ErrorType::ConnectionClosed,
s2n_error_type::BLOCKED => ErrorType::Blocked,
s2n_error_type::ALERT => ErrorType::Alert,
s2n_error_type::PROTO => ErrorType::ProtocolError,
s2n_error_type::INTERNAL => ErrorType::InternalError,
s2n_error_type::USAGE => ErrorType::UsageError,
_ => ErrorType::UnknownErrorType,
}
}
}
enum Context {
InvalidInput,
MissingWaker,
Code(s2n_status_code::Type, Errno),
Application(Box<dyn std::error::Error + Send + Sync + 'static>),
}
pub struct Error(Context);
pub trait Fallible {
type Output;
fn into_result(self) -> Result<Self::Output, Error>;
}
impl Fallible for s2n_status_code::Type {
type Output = s2n_status_code::Type;
fn into_result(self) -> Result<Self::Output, Error> {
if self >= s2n_status_code::SUCCESS {
Ok(self)
} else {
Err(Error::capture())
}
}
}
impl Fallible for isize {
type Output = usize;
fn into_result(self) -> Result<Self::Output, Error> {
// Negative values can't be converted to a real size
// and instead indicate an error.
self.try_into().map_err(|_| Error::capture())
}
}
impl Fallible for u64 {
type Output = Self;
/// Converts a u64 to a Result by checking for u64::MAX.
///
/// If a method that returns an unsigned int is fallible,
/// then the -1 error result wraps around to u64::MAX.
///
/// For a u64 to be Fallible, a result of u64::MAX must not be
/// possible without an error. For example, [`s2n_connection_get_delay`]
/// can't return u64::MAX as a valid result because
/// s2n-tls blinding delays are limited to 30s, or a return value of 3^10 ns,
/// which is significantly less than u64::MAX. [`s2n_connection_get_delay`]
/// would therefore only return u64::MAX for a -1 error result.
fn into_result(self) -> Result<Self::Output, Error> {
if self != Self::MAX {
Ok(self)
} else {
Err(Error::capture())
}
}
}
impl<T> Fallible for *mut T {
type Output = NonNull<T>;
fn into_result(self) -> Result<Self::Output, Error> {
if let Some(value) = NonNull::new(self) {
Ok(value)
} else {
Err(Error::capture())
}
}
}
impl<T> Fallible for *const T {
type Output = *const T;
fn into_result(self) -> Result<Self::Output, Error> {
if !self.is_null() {
Ok(self)
} else {
Err(Error::capture())
}
}
}
pub trait Pollable {
type Output;
fn into_poll(self) -> Poll<Result<Self::Output, Error>>;
}
impl<T: Fallible> Pollable for T {
type Output = T::Output;
fn into_poll(self) -> Poll<Result<Self::Output, Error>> {
match self.into_result() {
Ok(r) => Ok(r).into(),
Err(err) if err.is_retryable() => Poll::Pending,
Err(err) => Err(err).into(),
}
}
}
impl Error {
pub(crate) const INVALID_INPUT: Error = Self(Context::InvalidInput);
pub(crate) const MISSING_WAKER: Error = Self(Context::MissingWaker);
/// Converts an io::Error into an s2n-tls Error
pub fn io_error(err: std::io::Error) -> Error {
let errno = err.raw_os_error().unwrap_or(1);
errno::set_errno(errno::Errno(errno));
s2n_status_code::FAILURE.into_result().unwrap_err()
}
/// An error occurred while running application code.
///
/// Can be emitted from [`crate::callbacks::ConnectionFuture::poll()`] to indicate
/// async task failure.
pub fn application(error: Box<dyn std::error::Error + Send + Sync + 'static>) -> Self {
Self(Context::Application(error))
}
fn capture() -> Self {
unsafe {
let s2n_errno = s2n_errno_location();
let code = *s2n_errno;
// https://github.com/aws/s2n-tls/blob/main/docs/USAGE-GUIDE.md#error-handling
//# To avoid possible confusion, s2n_errno should be cleared after processing
//# an error: s2n_errno = S2N_ERR_T_OK
*s2n_errno = s2n_error_type::OK as _;
Self(Context::Code(code, errno()))
}
}
pub fn name(&self) -> &'static str {
match self.0 {
Context::InvalidInput => "InvalidInput",
Context::MissingWaker => "MissingWaker",
Context::Application(_) => "ApplicationError",
Context::Code(code, _) => unsafe {
// Safety: we assume the string has a valid encoding coming from s2n
cstr_to_str(s2n_strerror_name(code))
},
}
}
pub fn message(&self) -> &'static str {
match self.0 {
Context::InvalidInput => "A parameter was incorrect",
Context::MissingWaker => {
"Tried to perform an asynchronous operation without a configured waker"
}
Context::Application(_) => "An error occurred while executing application code",
Context::Code(code, _) => unsafe {
// Safety: we assume the string has a valid encoding coming from s2n
cstr_to_str(s2n_strerror(code, core::ptr::null()))
},
}
}
pub fn debug(&self) -> Option<&'static str> {
match self.0 {
Context::InvalidInput | Context::MissingWaker | Context::Application(_) => None,
Context::Code(code, _) => unsafe {
let debug_info = s2n_strerror_debug(code, core::ptr::null());
// The debug string should be set to a constant static string
// when an error occurs, but because it starts out as NULL
// we should defend against mistakes.
if debug_info.is_null() {
None
} else {
// If the string is not null, then we can assume that
// it is constant and static.
Some(cstr_to_str(debug_info))
}
},
}
} | match self.0 {
Context::InvalidInput | Context::MissingWaker => ErrorType::UsageError,
Context::Application(_) => ErrorType::Application,
Context::Code(code, _) => unsafe { ErrorType::from(s2n_error_get_type(code)) },
}
}
pub fn source(&self) -> ErrorSource {
match self.0 {
Context::InvalidInput | Context::MissingWaker => ErrorSource::Bindings,
Context::Application(_) => ErrorSource::Application,
Context::Code(_, _) => ErrorSource::Library,
}
}
#[allow(clippy::borrowed_box)]
/// Returns an [`std::error::Error`] if the error source was [`ErrorSource::Application`],
/// otherwise returns None.
pub fn application_error(&self) -> Option<&Box<dyn std::error::Error + Send + Sync + 'static>> {
if let Self(Context::Application(err)) = self {
Some(err)
} else {
None
}
}
pub fn is_retryable(&self) -> bool {
matches!(self.kind(), ErrorType::Blocked)
}
}
#[cfg(feature = "quic")]
impl Error {
/// s2n-tls does not send specific errors.
///
/// However, we can attempt to map local errors into the alerts
/// that we would have sent if we sent alerts.
///
/// This API is currently incomplete and should not be relied upon.
pub fn alert(&self) -> Option<u8> {
match self.0 {
Context::InvalidInput | Context::MissingWaker | Context::Application(_) => None,
Context::Code(code, _) => {
let mut alert = 0;
let r = unsafe { s2n_error_get_alert(code, &mut alert) };
match r.into_result() {
Ok(_) => Some(alert),
Err(_) => None,
}
}
}
}
}
/// # Safety
///
/// The caller must ensure the char pointer must contain a valid
/// UTF-8 string from a trusted source
unsafe fn cstr_to_str(v: *const c_char) -> &'static str {
let slice = CStr::from_ptr(v);
let bytes = slice.to_bytes();
core::str::from_utf8_unchecked(bytes)
}
impl TryFrom<std::io::Error> for Error {
type Error = Error;
fn try_from(value: std::io::Error) -> Result<Self, Self::Error> {
let io_inner = value.into_inner().ok_or(Error::INVALID_INPUT)?;
io_inner
.downcast::<Self>()
.map(|error| *error)
.map_err(|_| Error::INVALID_INPUT)
}
}
impl From<Error> for std::io::Error {
fn from(input: Error) -> Self {
if let Context::Code(_, errno) = input.0 {
if ErrorType::IOError == input.kind() {
let bare = std::io::Error::from_raw_os_error(errno.0);
return std::io::Error::new(bare.kind(), input);
}
}
std::io::Error::new(std::io::ErrorKind::Other, input)
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = f.debug_struct("Error");
if let Context::Code(code, _) = self.0 {
s.field("code", &code);
}
s.field("name", &self.name());
s.field("message", &self.message());
s.field("kind", &self.kind());
s.field("source", &self.source());
if let Some(debug) = self.debug() {
s.field("debug", &debug);
}
// "errno" is only known to be meaningful for IOErrors.
// However, it has occasionally proved useful for debugging
// other errors, so include it for all errors.
if let Context::Code(_, errno) = self.0 {
s.field("errno", &errno.to_string());
}
s.finish()
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Self(Context::Application(err)) = self {
err.fmt(f)
} else {
f.write_str(self.message())
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
// implement `source` in the same way `std::io::Error` implements it:
// https://doc.rust-lang.org/std/io/struct.Error.html#method.source
if let Self(Context::Application(err)) = self {
err.source()
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{enums::Version, testing::client_hello::CustomError};
use errno::set_errno;
const FAILURE: isize = -1;
// This relies on an implementation detail of s2n-tls errors,
// and could make these tests brittle. However, the alternative
// is a real handshake producing a real IO error, so just updating
// this value if the definition of an IO error changes might be easier.
const S2N_IO_ERROR_CODE: s2n_status_code::Type = 1 << 26;
#[test]
fn s2n_io_error_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
set_errno(Errno(libc::ECONNRESET));
unsafe {
let s2n_errno_ptr = s2n_errno_location();
*s2n_errno_ptr = S2N_IO_ERROR_CODE;
}
let s2n_error = FAILURE.into_result().unwrap_err();
assert_eq!(ErrorType::IOError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::ConnectionReset, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn s2n_error_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
set_errno(Errno(libc::ECONNRESET));
unsafe {
let s2n_errno_ptr = s2n_errno_location();
*s2n_errno_ptr = S2N_IO_ERROR_CODE - 1;
}
let s2n_error = FAILURE.into_result().unwrap_err();
assert_ne!(ErrorType::IOError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::Other, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn invalid_input_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
let s2n_error = Version::try_from(0).unwrap_err();
assert_eq!(ErrorType::UsageError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::Other, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn error_source() -> Result<(), Box<dyn std::error::Error>> {
let bindings_error = Version::try_from(0).unwrap_err();
assert_eq!(ErrorSource::Bindings, bindings_error.source());
let library_error = FAILURE.into_result().unwrap_err();
assert_eq!(ErrorSource::Library, library_error.source());
Ok(())
}
#[test]
fn application_error() {
// test single level errors
{
let error = Error::application(Box::new(CustomError));
let app_error = error.application_error().unwrap();
let _custom_error = app_error.downcast_ref::<CustomError>().unwrap();
}
// make sure nested errors work
{
let io_error = std::io::Error::new(std::io::ErrorKind::Other, CustomError);
let error = Error::application(Box::new(io_error));
let app_error = error.application_error().unwrap();
let io_error = app_error.downcast_ref::<std::io::Error>().unwrap();
let _custom_error = io_error
.get_ref()
.unwrap()
.downcast_ref::<CustomError>()
.unwrap();
}
}
} |
pub fn kind(&self) -> ErrorType { | random_line_split |
error.rs | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use core::{convert::TryInto, fmt, ptr::NonNull, task::Poll};
use errno::{errno, Errno};
use libc::c_char;
use s2n_tls_sys::*;
use std::{convert::TryFrom, ffi::CStr};
#[non_exhaustive]
#[derive(Debug, PartialEq)]
pub enum ErrorType {
UnknownErrorType,
NoError,
IOError,
ConnectionClosed,
Blocked,
Alert,
ProtocolError,
InternalError,
UsageError,
Application,
}
#[non_exhaustive]
#[derive(Debug, PartialEq)]
pub enum ErrorSource {
Library,
Bindings,
Application,
}
impl From<libc::c_int> for ErrorType {
fn from(input: libc::c_int) -> Self |
}
enum Context {
InvalidInput,
MissingWaker,
Code(s2n_status_code::Type, Errno),
Application(Box<dyn std::error::Error + Send + Sync + 'static>),
}
pub struct Error(Context);
pub trait Fallible {
type Output;
fn into_result(self) -> Result<Self::Output, Error>;
}
impl Fallible for s2n_status_code::Type {
type Output = s2n_status_code::Type;
fn into_result(self) -> Result<Self::Output, Error> {
if self >= s2n_status_code::SUCCESS {
Ok(self)
} else {
Err(Error::capture())
}
}
}
impl Fallible for isize {
type Output = usize;
fn into_result(self) -> Result<Self::Output, Error> {
// Negative values can't be converted to a real size
// and instead indicate an error.
self.try_into().map_err(|_| Error::capture())
}
}
impl Fallible for u64 {
type Output = Self;
/// Converts a u64 to a Result by checking for u64::MAX.
///
/// If a method that returns an unsigned int is fallible,
/// then the -1 error result wraps around to u64::MAX.
///
/// For a u64 to be Fallible, a result of u64::MAX must not be
/// possible without an error. For example, [`s2n_connection_get_delay`]
/// can't return u64::MAX as a valid result because
/// s2n-tls blinding delays are limited to 30s, or a return value of 3^10 ns,
/// which is significantly less than u64::MAX. [`s2n_connection_get_delay`]
/// would therefore only return u64::MAX for a -1 error result.
fn into_result(self) -> Result<Self::Output, Error> {
if self != Self::MAX {
Ok(self)
} else {
Err(Error::capture())
}
}
}
impl<T> Fallible for *mut T {
type Output = NonNull<T>;
fn into_result(self) -> Result<Self::Output, Error> {
if let Some(value) = NonNull::new(self) {
Ok(value)
} else {
Err(Error::capture())
}
}
}
impl<T> Fallible for *const T {
type Output = *const T;
fn into_result(self) -> Result<Self::Output, Error> {
if !self.is_null() {
Ok(self)
} else {
Err(Error::capture())
}
}
}
pub trait Pollable {
type Output;
fn into_poll(self) -> Poll<Result<Self::Output, Error>>;
}
impl<T: Fallible> Pollable for T {
type Output = T::Output;
fn into_poll(self) -> Poll<Result<Self::Output, Error>> {
match self.into_result() {
Ok(r) => Ok(r).into(),
Err(err) if err.is_retryable() => Poll::Pending,
Err(err) => Err(err).into(),
}
}
}
impl Error {
pub(crate) const INVALID_INPUT: Error = Self(Context::InvalidInput);
pub(crate) const MISSING_WAKER: Error = Self(Context::MissingWaker);
/// Converts an io::Error into an s2n-tls Error
pub fn io_error(err: std::io::Error) -> Error {
let errno = err.raw_os_error().unwrap_or(1);
errno::set_errno(errno::Errno(errno));
s2n_status_code::FAILURE.into_result().unwrap_err()
}
/// An error occurred while running application code.
///
/// Can be emitted from [`crate::callbacks::ConnectionFuture::poll()`] to indicate
/// async task failure.
pub fn application(error: Box<dyn std::error::Error + Send + Sync + 'static>) -> Self {
Self(Context::Application(error))
}
fn capture() -> Self {
unsafe {
let s2n_errno = s2n_errno_location();
let code = *s2n_errno;
// https://github.com/aws/s2n-tls/blob/main/docs/USAGE-GUIDE.md#error-handling
//# To avoid possible confusion, s2n_errno should be cleared after processing
//# an error: s2n_errno = S2N_ERR_T_OK
*s2n_errno = s2n_error_type::OK as _;
Self(Context::Code(code, errno()))
}
}
pub fn name(&self) -> &'static str {
match self.0 {
Context::InvalidInput => "InvalidInput",
Context::MissingWaker => "MissingWaker",
Context::Application(_) => "ApplicationError",
Context::Code(code, _) => unsafe {
// Safety: we assume the string has a valid encoding coming from s2n
cstr_to_str(s2n_strerror_name(code))
},
}
}
pub fn message(&self) -> &'static str {
match self.0 {
Context::InvalidInput => "A parameter was incorrect",
Context::MissingWaker => {
"Tried to perform an asynchronous operation without a configured waker"
}
Context::Application(_) => "An error occurred while executing application code",
Context::Code(code, _) => unsafe {
// Safety: we assume the string has a valid encoding coming from s2n
cstr_to_str(s2n_strerror(code, core::ptr::null()))
},
}
}
pub fn debug(&self) -> Option<&'static str> {
match self.0 {
Context::InvalidInput | Context::MissingWaker | Context::Application(_) => None,
Context::Code(code, _) => unsafe {
let debug_info = s2n_strerror_debug(code, core::ptr::null());
// The debug string should be set to a constant static string
// when an error occurs, but because it starts out as NULL
// we should defend against mistakes.
if debug_info.is_null() {
None
} else {
// If the string is not null, then we can assume that
// it is constant and static.
Some(cstr_to_str(debug_info))
}
},
}
}
pub fn kind(&self) -> ErrorType {
match self.0 {
Context::InvalidInput | Context::MissingWaker => ErrorType::UsageError,
Context::Application(_) => ErrorType::Application,
Context::Code(code, _) => unsafe { ErrorType::from(s2n_error_get_type(code)) },
}
}
pub fn source(&self) -> ErrorSource {
match self.0 {
Context::InvalidInput | Context::MissingWaker => ErrorSource::Bindings,
Context::Application(_) => ErrorSource::Application,
Context::Code(_, _) => ErrorSource::Library,
}
}
#[allow(clippy::borrowed_box)]
/// Returns an [`std::error::Error`] if the error source was [`ErrorSource::Application`],
/// otherwise returns None.
pub fn application_error(&self) -> Option<&Box<dyn std::error::Error + Send + Sync + 'static>> {
if let Self(Context::Application(err)) = self {
Some(err)
} else {
None
}
}
pub fn is_retryable(&self) -> bool {
matches!(self.kind(), ErrorType::Blocked)
}
}
#[cfg(feature = "quic")]
impl Error {
/// s2n-tls does not send specific errors.
///
/// However, we can attempt to map local errors into the alerts
/// that we would have sent if we sent alerts.
///
/// This API is currently incomplete and should not be relied upon.
pub fn alert(&self) -> Option<u8> {
match self.0 {
Context::InvalidInput | Context::MissingWaker | Context::Application(_) => None,
Context::Code(code, _) => {
let mut alert = 0;
let r = unsafe { s2n_error_get_alert(code, &mut alert) };
match r.into_result() {
Ok(_) => Some(alert),
Err(_) => None,
}
}
}
}
}
/// # Safety
///
/// The caller must ensure the char pointer must contain a valid
/// UTF-8 string from a trusted source
unsafe fn cstr_to_str(v: *const c_char) -> &'static str {
let slice = CStr::from_ptr(v);
let bytes = slice.to_bytes();
core::str::from_utf8_unchecked(bytes)
}
impl TryFrom<std::io::Error> for Error {
type Error = Error;
fn try_from(value: std::io::Error) -> Result<Self, Self::Error> {
let io_inner = value.into_inner().ok_or(Error::INVALID_INPUT)?;
io_inner
.downcast::<Self>()
.map(|error| *error)
.map_err(|_| Error::INVALID_INPUT)
}
}
impl From<Error> for std::io::Error {
fn from(input: Error) -> Self {
if let Context::Code(_, errno) = input.0 {
if ErrorType::IOError == input.kind() {
let bare = std::io::Error::from_raw_os_error(errno.0);
return std::io::Error::new(bare.kind(), input);
}
}
std::io::Error::new(std::io::ErrorKind::Other, input)
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = f.debug_struct("Error");
if let Context::Code(code, _) = self.0 {
s.field("code", &code);
}
s.field("name", &self.name());
s.field("message", &self.message());
s.field("kind", &self.kind());
s.field("source", &self.source());
if let Some(debug) = self.debug() {
s.field("debug", &debug);
}
// "errno" is only known to be meaningful for IOErrors.
// However, it has occasionally proved useful for debugging
// other errors, so include it for all errors.
if let Context::Code(_, errno) = self.0 {
s.field("errno", &errno.to_string());
}
s.finish()
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Self(Context::Application(err)) = self {
err.fmt(f)
} else {
f.write_str(self.message())
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
// implement `source` in the same way `std::io::Error` implements it:
// https://doc.rust-lang.org/std/io/struct.Error.html#method.source
if let Self(Context::Application(err)) = self {
err.source()
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{enums::Version, testing::client_hello::CustomError};
use errno::set_errno;
const FAILURE: isize = -1;
// This relies on an implementation detail of s2n-tls errors,
// and could make these tests brittle. However, the alternative
// is a real handshake producing a real IO error, so just updating
// this value if the definition of an IO error changes might be easier.
const S2N_IO_ERROR_CODE: s2n_status_code::Type = 1 << 26;
#[test]
fn s2n_io_error_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
set_errno(Errno(libc::ECONNRESET));
unsafe {
let s2n_errno_ptr = s2n_errno_location();
*s2n_errno_ptr = S2N_IO_ERROR_CODE;
}
let s2n_error = FAILURE.into_result().unwrap_err();
assert_eq!(ErrorType::IOError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::ConnectionReset, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn s2n_error_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
set_errno(Errno(libc::ECONNRESET));
unsafe {
let s2n_errno_ptr = s2n_errno_location();
*s2n_errno_ptr = S2N_IO_ERROR_CODE - 1;
}
let s2n_error = FAILURE.into_result().unwrap_err();
assert_ne!(ErrorType::IOError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::Other, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn invalid_input_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
let s2n_error = Version::try_from(0).unwrap_err();
assert_eq!(ErrorType::UsageError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::Other, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn error_source() -> Result<(), Box<dyn std::error::Error>> {
let bindings_error = Version::try_from(0).unwrap_err();
assert_eq!(ErrorSource::Bindings, bindings_error.source());
let library_error = FAILURE.into_result().unwrap_err();
assert_eq!(ErrorSource::Library, library_error.source());
Ok(())
}
#[test]
fn application_error() {
// test single level errors
{
let error = Error::application(Box::new(CustomError));
let app_error = error.application_error().unwrap();
let _custom_error = app_error.downcast_ref::<CustomError>().unwrap();
}
// make sure nested errors work
{
let io_error = std::io::Error::new(std::io::ErrorKind::Other, CustomError);
let error = Error::application(Box::new(io_error));
let app_error = error.application_error().unwrap();
let io_error = app_error.downcast_ref::<std::io::Error>().unwrap();
let _custom_error = io_error
.get_ref()
.unwrap()
.downcast_ref::<CustomError>()
.unwrap();
}
}
}
| {
match input as s2n_error_type::Type {
s2n_error_type::OK => ErrorType::NoError,
s2n_error_type::IO => ErrorType::IOError,
s2n_error_type::CLOSED => ErrorType::ConnectionClosed,
s2n_error_type::BLOCKED => ErrorType::Blocked,
s2n_error_type::ALERT => ErrorType::Alert,
s2n_error_type::PROTO => ErrorType::ProtocolError,
s2n_error_type::INTERNAL => ErrorType::InternalError,
s2n_error_type::USAGE => ErrorType::UsageError,
_ => ErrorType::UnknownErrorType,
}
} | identifier_body |
error.rs | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use core::{convert::TryInto, fmt, ptr::NonNull, task::Poll};
use errno::{errno, Errno};
use libc::c_char;
use s2n_tls_sys::*;
use std::{convert::TryFrom, ffi::CStr};
#[non_exhaustive]
#[derive(Debug, PartialEq)]
pub enum ErrorType {
UnknownErrorType,
NoError,
IOError,
ConnectionClosed,
Blocked,
Alert,
ProtocolError,
InternalError,
UsageError,
Application,
}
#[non_exhaustive]
#[derive(Debug, PartialEq)]
pub enum ErrorSource {
Library,
Bindings,
Application,
}
impl From<libc::c_int> for ErrorType {
fn from(input: libc::c_int) -> Self {
match input as s2n_error_type::Type {
s2n_error_type::OK => ErrorType::NoError,
s2n_error_type::IO => ErrorType::IOError,
s2n_error_type::CLOSED => ErrorType::ConnectionClosed,
s2n_error_type::BLOCKED => ErrorType::Blocked,
s2n_error_type::ALERT => ErrorType::Alert,
s2n_error_type::PROTO => ErrorType::ProtocolError,
s2n_error_type::INTERNAL => ErrorType::InternalError,
s2n_error_type::USAGE => ErrorType::UsageError,
_ => ErrorType::UnknownErrorType,
}
}
}
enum Context {
InvalidInput,
MissingWaker,
Code(s2n_status_code::Type, Errno),
Application(Box<dyn std::error::Error + Send + Sync + 'static>),
}
pub struct Error(Context);
pub trait Fallible {
type Output;
fn into_result(self) -> Result<Self::Output, Error>;
}
impl Fallible for s2n_status_code::Type {
type Output = s2n_status_code::Type;
fn into_result(self) -> Result<Self::Output, Error> {
if self >= s2n_status_code::SUCCESS {
Ok(self)
} else {
Err(Error::capture())
}
}
}
impl Fallible for isize {
type Output = usize;
fn into_result(self) -> Result<Self::Output, Error> {
// Negative values can't be converted to a real size
// and instead indicate an error.
self.try_into().map_err(|_| Error::capture())
}
}
impl Fallible for u64 {
type Output = Self;
/// Converts a u64 to a Result by checking for u64::MAX.
///
/// If a method that returns an unsigned int is fallible,
/// then the -1 error result wraps around to u64::MAX.
///
/// For a u64 to be Fallible, a result of u64::MAX must not be
/// possible without an error. For example, [`s2n_connection_get_delay`]
/// can't return u64::MAX as a valid result because
/// s2n-tls blinding delays are limited to 30s, or a return value of 3^10 ns,
/// which is significantly less than u64::MAX. [`s2n_connection_get_delay`]
/// would therefore only return u64::MAX for a -1 error result.
fn into_result(self) -> Result<Self::Output, Error> {
if self != Self::MAX {
Ok(self)
} else {
Err(Error::capture())
}
}
}
impl<T> Fallible for *mut T {
type Output = NonNull<T>;
fn into_result(self) -> Result<Self::Output, Error> {
if let Some(value) = NonNull::new(self) {
Ok(value)
} else {
Err(Error::capture())
}
}
}
impl<T> Fallible for *const T {
type Output = *const T;
fn into_result(self) -> Result<Self::Output, Error> {
if !self.is_null() {
Ok(self)
} else {
Err(Error::capture())
}
}
}
pub trait Pollable {
type Output;
fn into_poll(self) -> Poll<Result<Self::Output, Error>>;
}
impl<T: Fallible> Pollable for T {
type Output = T::Output;
fn into_poll(self) -> Poll<Result<Self::Output, Error>> {
match self.into_result() {
Ok(r) => Ok(r).into(),
Err(err) if err.is_retryable() => Poll::Pending,
Err(err) => Err(err).into(),
}
}
}
impl Error {
pub(crate) const INVALID_INPUT: Error = Self(Context::InvalidInput);
pub(crate) const MISSING_WAKER: Error = Self(Context::MissingWaker);
/// Converts an io::Error into an s2n-tls Error
pub fn io_error(err: std::io::Error) -> Error {
let errno = err.raw_os_error().unwrap_or(1);
errno::set_errno(errno::Errno(errno));
s2n_status_code::FAILURE.into_result().unwrap_err()
}
/// An error occurred while running application code.
///
/// Can be emitted from [`crate::callbacks::ConnectionFuture::poll()`] to indicate
/// async task failure.
pub fn application(error: Box<dyn std::error::Error + Send + Sync + 'static>) -> Self {
Self(Context::Application(error))
}
fn capture() -> Self {
unsafe {
let s2n_errno = s2n_errno_location();
let code = *s2n_errno;
// https://github.com/aws/s2n-tls/blob/main/docs/USAGE-GUIDE.md#error-handling
//# To avoid possible confusion, s2n_errno should be cleared after processing
//# an error: s2n_errno = S2N_ERR_T_OK
*s2n_errno = s2n_error_type::OK as _;
Self(Context::Code(code, errno()))
}
}
pub fn name(&self) -> &'static str {
match self.0 {
Context::InvalidInput => "InvalidInput",
Context::MissingWaker => "MissingWaker",
Context::Application(_) => "ApplicationError",
Context::Code(code, _) => unsafe {
// Safety: we assume the string has a valid encoding coming from s2n
cstr_to_str(s2n_strerror_name(code))
},
}
}
pub fn message(&self) -> &'static str {
match self.0 {
Context::InvalidInput => "A parameter was incorrect",
Context::MissingWaker => {
"Tried to perform an asynchronous operation without a configured waker"
}
Context::Application(_) => "An error occurred while executing application code",
Context::Code(code, _) => unsafe {
// Safety: we assume the string has a valid encoding coming from s2n
cstr_to_str(s2n_strerror(code, core::ptr::null()))
},
}
}
pub fn debug(&self) -> Option<&'static str> {
match self.0 {
Context::InvalidInput | Context::MissingWaker | Context::Application(_) => None,
Context::Code(code, _) => unsafe {
let debug_info = s2n_strerror_debug(code, core::ptr::null());
// The debug string should be set to a constant static string
// when an error occurs, but because it starts out as NULL
// we should defend against mistakes.
if debug_info.is_null() {
None
} else {
// If the string is not null, then we can assume that
// it is constant and static.
Some(cstr_to_str(debug_info))
}
},
}
}
pub fn kind(&self) -> ErrorType {
match self.0 {
Context::InvalidInput | Context::MissingWaker => ErrorType::UsageError,
Context::Application(_) => ErrorType::Application,
Context::Code(code, _) => unsafe { ErrorType::from(s2n_error_get_type(code)) },
}
}
pub fn source(&self) -> ErrorSource {
match self.0 {
Context::InvalidInput | Context::MissingWaker => ErrorSource::Bindings,
Context::Application(_) => ErrorSource::Application,
Context::Code(_, _) => ErrorSource::Library,
}
}
#[allow(clippy::borrowed_box)]
/// Returns an [`std::error::Error`] if the error source was [`ErrorSource::Application`],
/// otherwise returns None.
pub fn application_error(&self) -> Option<&Box<dyn std::error::Error + Send + Sync + 'static>> {
if let Self(Context::Application(err)) = self {
Some(err)
} else {
None
}
}
pub fn | (&self) -> bool {
matches!(self.kind(), ErrorType::Blocked)
}
}
#[cfg(feature = "quic")]
impl Error {
/// s2n-tls does not send specific errors.
///
/// However, we can attempt to map local errors into the alerts
/// that we would have sent if we sent alerts.
///
/// This API is currently incomplete and should not be relied upon.
pub fn alert(&self) -> Option<u8> {
match self.0 {
Context::InvalidInput | Context::MissingWaker | Context::Application(_) => None,
Context::Code(code, _) => {
let mut alert = 0;
let r = unsafe { s2n_error_get_alert(code, &mut alert) };
match r.into_result() {
Ok(_) => Some(alert),
Err(_) => None,
}
}
}
}
}
/// # Safety
///
/// The caller must ensure the char pointer must contain a valid
/// UTF-8 string from a trusted source
unsafe fn cstr_to_str(v: *const c_char) -> &'static str {
let slice = CStr::from_ptr(v);
let bytes = slice.to_bytes();
core::str::from_utf8_unchecked(bytes)
}
impl TryFrom<std::io::Error> for Error {
type Error = Error;
fn try_from(value: std::io::Error) -> Result<Self, Self::Error> {
let io_inner = value.into_inner().ok_or(Error::INVALID_INPUT)?;
io_inner
.downcast::<Self>()
.map(|error| *error)
.map_err(|_| Error::INVALID_INPUT)
}
}
impl From<Error> for std::io::Error {
fn from(input: Error) -> Self {
if let Context::Code(_, errno) = input.0 {
if ErrorType::IOError == input.kind() {
let bare = std::io::Error::from_raw_os_error(errno.0);
return std::io::Error::new(bare.kind(), input);
}
}
std::io::Error::new(std::io::ErrorKind::Other, input)
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = f.debug_struct("Error");
if let Context::Code(code, _) = self.0 {
s.field("code", &code);
}
s.field("name", &self.name());
s.field("message", &self.message());
s.field("kind", &self.kind());
s.field("source", &self.source());
if let Some(debug) = self.debug() {
s.field("debug", &debug);
}
// "errno" is only known to be meaningful for IOErrors.
// However, it has occasionally proved useful for debugging
// other errors, so include it for all errors.
if let Context::Code(_, errno) = self.0 {
s.field("errno", &errno.to_string());
}
s.finish()
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Self(Context::Application(err)) = self {
err.fmt(f)
} else {
f.write_str(self.message())
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
// implement `source` in the same way `std::io::Error` implements it:
// https://doc.rust-lang.org/std/io/struct.Error.html#method.source
if let Self(Context::Application(err)) = self {
err.source()
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{enums::Version, testing::client_hello::CustomError};
use errno::set_errno;
const FAILURE: isize = -1;
// This relies on an implementation detail of s2n-tls errors,
// and could make these tests brittle. However, the alternative
// is a real handshake producing a real IO error, so just updating
// this value if the definition of an IO error changes might be easier.
const S2N_IO_ERROR_CODE: s2n_status_code::Type = 1 << 26;
#[test]
fn s2n_io_error_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
set_errno(Errno(libc::ECONNRESET));
unsafe {
let s2n_errno_ptr = s2n_errno_location();
*s2n_errno_ptr = S2N_IO_ERROR_CODE;
}
let s2n_error = FAILURE.into_result().unwrap_err();
assert_eq!(ErrorType::IOError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::ConnectionReset, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn s2n_error_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
set_errno(Errno(libc::ECONNRESET));
unsafe {
let s2n_errno_ptr = s2n_errno_location();
*s2n_errno_ptr = S2N_IO_ERROR_CODE - 1;
}
let s2n_error = FAILURE.into_result().unwrap_err();
assert_ne!(ErrorType::IOError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::Other, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn invalid_input_to_std_io_error() -> Result<(), Box<dyn std::error::Error>> {
let s2n_error = Version::try_from(0).unwrap_err();
assert_eq!(ErrorType::UsageError, s2n_error.kind());
let io_error = std::io::Error::from(s2n_error);
assert_eq!(std::io::ErrorKind::Other, io_error.kind());
assert!(io_error.into_inner().is_some());
Ok(())
}
#[test]
fn error_source() -> Result<(), Box<dyn std::error::Error>> {
let bindings_error = Version::try_from(0).unwrap_err();
assert_eq!(ErrorSource::Bindings, bindings_error.source());
let library_error = FAILURE.into_result().unwrap_err();
assert_eq!(ErrorSource::Library, library_error.source());
Ok(())
}
#[test]
fn application_error() {
// test single level errors
{
let error = Error::application(Box::new(CustomError));
let app_error = error.application_error().unwrap();
let _custom_error = app_error.downcast_ref::<CustomError>().unwrap();
}
// make sure nested errors work
{
let io_error = std::io::Error::new(std::io::ErrorKind::Other, CustomError);
let error = Error::application(Box::new(io_error));
let app_error = error.application_error().unwrap();
let io_error = app_error.downcast_ref::<std::io::Error>().unwrap();
let _custom_error = io_error
.get_ref()
.unwrap()
.downcast_ref::<CustomError>()
.unwrap();
}
}
}
| is_retryable | identifier_name |
_utils.py | """
Various internal utilities for Pheres
"""
from __future__ import annotations
import functools
import inspect
import json
import re
import types
import typing
from contextlib import contextmanager
from types import FrameType
from typing import (
Annotated,
Any,
Callable,
Generic,
Iterable,
List,
Literal,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
# Type Aliases
AnyClass = TypeVar("AnyClass", bound=type)
TypeHint = Union[ # pylint: disable=unsubscriptable-object
Type[type],
Type[Any],
Type[TypeVar],
# Type[Generic],
Type[Annotated],
Type[Tuple],
Type[Callable],
]
Namespace = dict[str, Any]
TypeT = TypeVar("TypeT", *typing.get_args(TypeHint))
U = TypeVar("U")
V = TypeVar("V")
class Virtual:
|
class Subscriptable(Generic[U, V]):
"""
Decorator to make a subscriptable instance from a __getitem__ function
Usage:
@Subscriptable
def my_subscriptable(key):
return key
assert my_subscriptable[8] == 8
"""
__slots__ = ("_func",)
def __init__(self, func: Callable[[U], V]) -> None:
self._func = func
# self.__doc__ = func.__doc__
def __call__(self):
raise SyntaxError("Use brackets '[]' instead")
def __getitem__(self, arg: U) -> V:
return self._func(arg)
def docstring(
docstring: str = None, *, pre: str = None, post: str = None
) -> Callable[[U], U]:
"""
Decorator to modify the docstring of an object.
For all provided strings, unused empty lines are removed, and the indentation
of the first non-empty line is removed from all lines if possible. This allows
better indentation when used as a decorator.
Unused empty lines means initial enpty lines for ``pre``, and final empty lines
for ``post``.
Arguments:
docstring: replaces the docstring of the object
pre: adds the string at the start of the object original docstring
post: adds the strings at the end of the object original docstring
"""
def edit_docstring(obj: U) -> U:
obj.__doc__ = "".join(
(
clean_docstring(pre or "", unused="pre"),
clean_docstring(docstring or (obj.__doc__ or "")),
clean_docstring(post or "", unused="post"),
)
)
return obj
return edit_docstring
# from https://stackoverflow.com/questions/5189699/how-to-make-a-class-property
class ClassPropertyDescriptor:
"""
Descriptor for class properties
"""
__slots__ = ("fget", "fset")
def __init__(
self,
fget: Union[classmethod, staticmethod],
fset: Union[classmethod, staticmethod] = None,
):
self.fget = fget
self.fset = fset
def __get__(self, obj: U, cls: Type[U] = None) -> V:
if cls is None:
cls = type(obj)
return self.fget.__get__(obj, cls)()
def __set__(self, obj: U, value: V):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
@property
def __isabstractmethod__(self):
return any(
getattr(f, "__isabstractmethod__", False) for f in (self.fget, self.fset)
)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(
func: Union[Callable, classmethod, staticmethod]
) -> ClassPropertyDescriptor:
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@overload
def autoformat(
cls: None, /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Callable[[Type[U]], Type[U]]:
...
@overload
def autoformat(
cls: Type[U], /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Type[U]:
...
def autoformat(
cls: Type[U] = None,
/,
params: Union[str, Iterable[str]] = ( # pylint: disable=unsubscriptable-object
"message",
"msg",
),
):
"""
Class decorator to autoformat string arguments in the __init__ method
Modify the class __init__ method in place by wrapping it. The wrapped class
will call the format() method of arguments specified in `params` that exist
in the original signature, passing all other arguments are dictionary to
str.format()
Arguments:
params -- names of the arguments to autoformats
Usage:
@autoformat
class MyException(Exception):
def __init__(self, elem, msg="{elem} is invalid"):
super().__init__(msg)
self.msg = msg
self.elem = elem
assert MyException(8).msg == "8 is invalid"
"""
if isinstance(params, str):
params = (params,)
if cls is None:
return functools.partial(autoformat, params=params)
orig_init = cls.__init__
signature = inspect.signature(orig_init)
params = signature.parameters.keys() & set(params)
@functools.wraps(orig_init)
def init(*args, **kwargs):
bounds = signature.bind(*args, **kwargs)
bounds.apply_defaults()
pre_formatted = {
name: bounds.arguments.pop(name)
for name in params
if name in bounds.arguments
}
formatted = {
name: string.format(**bounds.arguments)
for name, string in pre_formatted.items()
}
for name, arg in formatted.items():
bounds.arguments[name] = arg
return orig_init(*bounds.args, **bounds.kwargs)
# init.__signature__ = signature
setattr(cls, "__init__", init)
return cls
class Variable(str):
def __repr__(self) -> str:
return self
def __str__(self) -> str:
return self
def _sig_without(sig: inspect.Signature, param: Union[int, str]) -> inspect.Signature:
"""Removes a parameter from a Signature object
If param is an int, remove the parameter at that position, else
remove any paramater with that name
"""
if isinstance(param, int):
params = list(sig.parameters.values())
params.pop(param)
else:
params = [p for name, p in sig.parameters.items() if name != param]
return sig.replace(parameters=params)
def _sig_merge(lsig: inspect.Signature, rsig: inspect.Signature) -> inspect.Signature:
"""Merges two signature object, dropping the return annotations"""
return inspect.Signature(
sorted(
list(lsig.parameters.values()) + list(rsig.parameters.values()),
key=lambda param: param.kind,
)
)
def _sig_to_def(sig: inspect.Signature) -> str:
return str(sig).split("->", 1)[0].strip()[1:-1]
def _sig_to_call(sig: inspect.Signature) -> str:
l = []
for p in sig.parameters.values():
if p.kind is inspect.Parameter.KEYWORD_ONLY:
l.append(f"{p.name}={p.name}")
else:
l.append(p.name)
return ", ".join(l)
def post_init(cls: Type[U]) -> Type[U]:
"""
Class decorator to automatically support __post_init__() on classes
This is useful for @attr.s decorated classes, because __attr_post_init__() doesn't
support additional arguments.
This decorators wraps the class __init__ in a new function that accept merged arguments,
and dispatch them to __init__ and then __post_init__()
"""
if not isinstance(cls, type):
raise TypeError("Can only decorate classes")
if not hasattr(cls, "__post_init__"):
raise TypeError("The class must have a __post_init__() method")
# Ignore the first argument which is the "self" argument
sig = init_sig = _sig_without(inspect.signature(cls.__init__), 0)
previous = [(cls, "__init__", sig)]
for parent in reversed(cls.__mro__):
if hasattr(parent, "__post_init__"):
post_sig = _sig_without(
inspect.signature(getattr(parent, "__post_init__")), 0
)
try:
sig = _sig_merge(sig, post_sig)
except Exception as err:
# find the incompatibility
for parent, method, psig in previous:
try:
_sig_merge(psig, post_sig)
except Exception:
break
else:
raise TypeError(
"__post_init__ signature is incompatible with the class"
) from err
raise TypeError(
f"__post_init__() is incompatible with {parent.__qualname__}{method}()"
) from err
# No exception
previous.append((parent, "__post_init__", post_sig))
# handles type annotations and defaults
# inspired by the dataclasses modules
params = list(sig.parameters.values())
localns = (
{
f"__type_{p.name}": p.annotation
for p in params
if p.annotation is not inspect.Parameter.empty
}
| {
f"__default_{p.name}": p.default
for p in params
if p.default is not inspect.Parameter.empty
}
| cls.__dict__
)
for i, p in enumerate(params):
if p.default is not inspect.Parameter.empty:
p = p.replace(default=Variable(f"__default_{p.name}"))
if p.annotation is not inspect.Parameter.empty:
p = p.replace(annotation=f"__type_{p.name}")
params[i] = p
new_sig = inspect.Signature(params)
# Build the new __init__ source code
self_ = "self" if "self" not in sig.parameters else "__post_init_self"
init_lines = [
f"def __init__({self_}, {_sig_to_def(new_sig)}) -> None:",
f"__original_init({self_}, {_sig_to_call(init_sig)})",
]
for parent, method, psig in previous[1:]:
if hasattr(parent, "__post_init__"):
if parent is not cls:
init_lines.append(
f"super({parent.__qualname__}, {self_}).{method}({_sig_to_call(psig)})"
)
else:
init_lines.append(f"{self_}.{method}({_sig_to_call(psig)})")
init_src = "\n ".join(init_lines)
# Build the factory function source code
local_vars = ", ".join(localns.keys())
factory_src = (
f"def __make_init__(__original_init, {local_vars}):\n"
f" {init_src}\n"
" return __init__"
)
# Create new __init__ with the factory
globalns = inspect.getmodule(cls).__dict__
ns: dict[str, Any] = {}
exec(factory_src, globalns, ns)
init = ns["__make_init__"](cls.__init__, **localns)
self_param = inspect.Parameter(self_, inspect.Parameter.POSITIONAL_ONLY)
init.__signature__ = inspect.Signature(
parameters=[self_param] + list(sig.parameters.values()), return_annotation=None
)
setattr(cls, "__init__", init)
return cls
@contextmanager
def on_error(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code doesn't raise
"""
try:
yield yield_
except Exception:
func(*args, **kwargs)
raise
@contextmanager
def on_success(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code raises an Exception
"""
try:
yield yield_
except Exception:
raise
else:
func(*args, **kwargs)
def clean_docstring(doc: str, unused: Literal["pre", "post"] = None) -> str:
"""
Removes initial empty lines and shared indentation
Arguments:
doc: docstring to clean up
unused: whether to remove statring or endind empty lines
Returns:
The cleaned docstring
"""
doc = doc.split("\n")
if unused == "pre":
try:
index = next(i for i, l in enumerate(doc) if l.strip())
doc = doc[index:]
except StopIteration:
doc = []
elif unused == "post":
try:
index = next(i for i, l in enumerate(reversed(doc)) if l.strip())
doc = doc[: len(doc) - index]
except StopIteration:
doc = []
if doc:
first_line = doc[0]
index = len(first_line) - len(first_line.lstrip())
indent = first_line[:index]
if all(l.startswith(indent) for l in doc if l.strip()):
doc = [(l[index:] if l.strip() else l) for l in doc]
return "\n".join(doc)
def split(func, iterable):
"""split an iterable based on the truth value of the function for element
Arguments
func -- a callable to apply to each element in the iterable
iterable -- an iterable of element to split
Returns
falsy, truthy - two tuple, the first with element e of the itrable where
func(e) return false, the second with element of the iterable that are True
"""
falsy, truthy = [], []
for e in iterable:
if func(e):
truthy.append(e)
else:
falsy.append(e)
return tuple(falsy), tuple(truthy)
def sync_filter(func, *iterables):
"""
Filter multiple iterable at once, selecting values at index i
such that func(iterables[0][i], iterables[1][i], ...) is True
"""
return tuple(zip(*tuple(i for i in zip(*iterables) if func(*i)))) or ((),) * len(
iterables
)
def get_outer_frame() -> Optional[FrameType]:
frame = inspect.currentframe()
if frame is not None:
frame = frame.f_back
if frame is not None:
return frame.f_back
def get_outer_namespaces() -> Tuple[Namespace, Namespace]:
"""
Get the globals and locals from the context that called the function
calling this utility
Returns:
globals, locals
"""
frame = inspect.currentframe()
if frame:
frame = frame.f_back
if frame:
frame = frame.f_back
if frame:
return frame.f_globals or {}, frame.f_locals or {}
return {}, {}
def get_args(
tp: TypeHint, *, globalns: Namespace = None, localns: Namespace = None
) -> Tuple[TypeHint, ...]:
if globalns is not None or localns is not None:
return typing.get_args(typing._eval_type(tp, globalns, localns))
return typing.get_args(tp)
# Adapted version of typing._type_repr
def type_repr(tp) -> str:
"""Return the repr() of objects, special casing types and tuples"""
from pheres._typing import JSONArray, JSONObject, JSONValue
if isinstance(tp, tuple):
return ", ".join(map(type_repr, tp))
if isinstance(tp, type):
if tp.__module__ == "builtins":
return tp.__qualname__
return f"{tp.__module__}.{tp.__qualname__}"
if tp is Ellipsis:
return "..."
if isinstance(tp, types.FunctionType):
return tp.__name__
if tp is JSONValue:
return "JSONValue"
if tp is JSONArray:
return "JSONArray"
if tp is JSONObject:
return "JSONObject"
return repr(tp)
def get_class_namespaces(cls: type) -> tuple[Namespace, Namespace]:
"""
Return the module a class is defined in and its internal dictionary
Returns:
globals, locals
"""
return inspect.getmodule(cls).__dict__, cls.__dict__ | {cls.__name__: cls}
def get_updated_class(cls: AnyClass) -> AnyClass:
module = inspect.getmodule(cls)
if module is not None:
return getattr(module, cls.__name__)
return cls
| """
Mixin class to make a class non-heritable and non-instanciable
"""
__slots__ = ("__weakref__",)
def __init__(self):
raise TypeError("Cannot instanciate virtual class")
def __init_subclass__(cls, *args, **kwargs):
if Virtual not in cls.__bases__:
raise TypeError("Cannot subclass virtual class")
super().__init_subclass__(*args, **kwargs) | identifier_body |
_utils.py | """
Various internal utilities for Pheres
"""
from __future__ import annotations
import functools
import inspect
import json
import re
import types
import typing
from contextlib import contextmanager
from types import FrameType
from typing import (
Annotated,
Any,
Callable,
Generic,
Iterable,
List,
Literal,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
# Type Aliases
AnyClass = TypeVar("AnyClass", bound=type)
TypeHint = Union[ # pylint: disable=unsubscriptable-object
Type[type],
Type[Any],
Type[TypeVar],
# Type[Generic],
Type[Annotated],
Type[Tuple],
Type[Callable],
]
Namespace = dict[str, Any]
TypeT = TypeVar("TypeT", *typing.get_args(TypeHint))
U = TypeVar("U")
V = TypeVar("V")
class Virtual:
"""
Mixin class to make a class non-heritable and non-instanciable
"""
__slots__ = ("__weakref__",)
def __init__(self):
raise TypeError("Cannot instanciate virtual class")
def __init_subclass__(cls, *args, **kwargs):
if Virtual not in cls.__bases__:
raise TypeError("Cannot subclass virtual class")
super().__init_subclass__(*args, **kwargs)
class Subscriptable(Generic[U, V]):
"""
Decorator to make a subscriptable instance from a __getitem__ function
Usage:
@Subscriptable
def my_subscriptable(key):
return key
assert my_subscriptable[8] == 8
"""
__slots__ = ("_func",)
def __init__(self, func: Callable[[U], V]) -> None:
self._func = func
# self.__doc__ = func.__doc__
def __call__(self):
raise SyntaxError("Use brackets '[]' instead")
def __getitem__(self, arg: U) -> V:
return self._func(arg)
def docstring(
docstring: str = None, *, pre: str = None, post: str = None
) -> Callable[[U], U]:
"""
Decorator to modify the docstring of an object.
For all provided strings, unused empty lines are removed, and the indentation
of the first non-empty line is removed from all lines if possible. This allows
better indentation when used as a decorator.
Unused empty lines means initial enpty lines for ``pre``, and final empty lines
for ``post``.
Arguments:
docstring: replaces the docstring of the object
pre: adds the string at the start of the object original docstring
post: adds the strings at the end of the object original docstring
"""
def edit_docstring(obj: U) -> U:
obj.__doc__ = "".join(
(
clean_docstring(pre or "", unused="pre"),
clean_docstring(docstring or (obj.__doc__ or "")),
clean_docstring(post or "", unused="post"),
)
)
return obj
return edit_docstring
# from https://stackoverflow.com/questions/5189699/how-to-make-a-class-property
class | :
"""
Descriptor for class properties
"""
__slots__ = ("fget", "fset")
def __init__(
self,
fget: Union[classmethod, staticmethod],
fset: Union[classmethod, staticmethod] = None,
):
self.fget = fget
self.fset = fset
def __get__(self, obj: U, cls: Type[U] = None) -> V:
if cls is None:
cls = type(obj)
return self.fget.__get__(obj, cls)()
def __set__(self, obj: U, value: V):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
@property
def __isabstractmethod__(self):
return any(
getattr(f, "__isabstractmethod__", False) for f in (self.fget, self.fset)
)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(
func: Union[Callable, classmethod, staticmethod]
) -> ClassPropertyDescriptor:
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@overload
def autoformat(
cls: None, /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Callable[[Type[U]], Type[U]]:
...
@overload
def autoformat(
cls: Type[U], /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Type[U]:
...
def autoformat(
cls: Type[U] = None,
/,
params: Union[str, Iterable[str]] = ( # pylint: disable=unsubscriptable-object
"message",
"msg",
),
):
"""
Class decorator to autoformat string arguments in the __init__ method
Modify the class __init__ method in place by wrapping it. The wrapped class
will call the format() method of arguments specified in `params` that exist
in the original signature, passing all other arguments are dictionary to
str.format()
Arguments:
params -- names of the arguments to autoformats
Usage:
@autoformat
class MyException(Exception):
def __init__(self, elem, msg="{elem} is invalid"):
super().__init__(msg)
self.msg = msg
self.elem = elem
assert MyException(8).msg == "8 is invalid"
"""
if isinstance(params, str):
params = (params,)
if cls is None:
return functools.partial(autoformat, params=params)
orig_init = cls.__init__
signature = inspect.signature(orig_init)
params = signature.parameters.keys() & set(params)
@functools.wraps(orig_init)
def init(*args, **kwargs):
bounds = signature.bind(*args, **kwargs)
bounds.apply_defaults()
pre_formatted = {
name: bounds.arguments.pop(name)
for name in params
if name in bounds.arguments
}
formatted = {
name: string.format(**bounds.arguments)
for name, string in pre_formatted.items()
}
for name, arg in formatted.items():
bounds.arguments[name] = arg
return orig_init(*bounds.args, **bounds.kwargs)
# init.__signature__ = signature
setattr(cls, "__init__", init)
return cls
class Variable(str):
def __repr__(self) -> str:
return self
def __str__(self) -> str:
return self
def _sig_without(sig: inspect.Signature, param: Union[int, str]) -> inspect.Signature:
"""Removes a parameter from a Signature object
If param is an int, remove the parameter at that position, else
remove any paramater with that name
"""
if isinstance(param, int):
params = list(sig.parameters.values())
params.pop(param)
else:
params = [p for name, p in sig.parameters.items() if name != param]
return sig.replace(parameters=params)
def _sig_merge(lsig: inspect.Signature, rsig: inspect.Signature) -> inspect.Signature:
"""Merges two signature object, dropping the return annotations"""
return inspect.Signature(
sorted(
list(lsig.parameters.values()) + list(rsig.parameters.values()),
key=lambda param: param.kind,
)
)
def _sig_to_def(sig: inspect.Signature) -> str:
return str(sig).split("->", 1)[0].strip()[1:-1]
def _sig_to_call(sig: inspect.Signature) -> str:
l = []
for p in sig.parameters.values():
if p.kind is inspect.Parameter.KEYWORD_ONLY:
l.append(f"{p.name}={p.name}")
else:
l.append(p.name)
return ", ".join(l)
def post_init(cls: Type[U]) -> Type[U]:
"""
Class decorator to automatically support __post_init__() on classes
This is useful for @attr.s decorated classes, because __attr_post_init__() doesn't
support additional arguments.
This decorators wraps the class __init__ in a new function that accept merged arguments,
and dispatch them to __init__ and then __post_init__()
"""
if not isinstance(cls, type):
raise TypeError("Can only decorate classes")
if not hasattr(cls, "__post_init__"):
raise TypeError("The class must have a __post_init__() method")
# Ignore the first argument which is the "self" argument
sig = init_sig = _sig_without(inspect.signature(cls.__init__), 0)
previous = [(cls, "__init__", sig)]
for parent in reversed(cls.__mro__):
if hasattr(parent, "__post_init__"):
post_sig = _sig_without(
inspect.signature(getattr(parent, "__post_init__")), 0
)
try:
sig = _sig_merge(sig, post_sig)
except Exception as err:
# find the incompatibility
for parent, method, psig in previous:
try:
_sig_merge(psig, post_sig)
except Exception:
break
else:
raise TypeError(
"__post_init__ signature is incompatible with the class"
) from err
raise TypeError(
f"__post_init__() is incompatible with {parent.__qualname__}{method}()"
) from err
# No exception
previous.append((parent, "__post_init__", post_sig))
# handles type annotations and defaults
# inspired by the dataclasses modules
params = list(sig.parameters.values())
localns = (
{
f"__type_{p.name}": p.annotation
for p in params
if p.annotation is not inspect.Parameter.empty
}
| {
f"__default_{p.name}": p.default
for p in params
if p.default is not inspect.Parameter.empty
}
| cls.__dict__
)
for i, p in enumerate(params):
if p.default is not inspect.Parameter.empty:
p = p.replace(default=Variable(f"__default_{p.name}"))
if p.annotation is not inspect.Parameter.empty:
p = p.replace(annotation=f"__type_{p.name}")
params[i] = p
new_sig = inspect.Signature(params)
# Build the new __init__ source code
self_ = "self" if "self" not in sig.parameters else "__post_init_self"
init_lines = [
f"def __init__({self_}, {_sig_to_def(new_sig)}) -> None:",
f"__original_init({self_}, {_sig_to_call(init_sig)})",
]
for parent, method, psig in previous[1:]:
if hasattr(parent, "__post_init__"):
if parent is not cls:
init_lines.append(
f"super({parent.__qualname__}, {self_}).{method}({_sig_to_call(psig)})"
)
else:
init_lines.append(f"{self_}.{method}({_sig_to_call(psig)})")
init_src = "\n ".join(init_lines)
# Build the factory function source code
local_vars = ", ".join(localns.keys())
factory_src = (
f"def __make_init__(__original_init, {local_vars}):\n"
f" {init_src}\n"
" return __init__"
)
# Create new __init__ with the factory
globalns = inspect.getmodule(cls).__dict__
ns: dict[str, Any] = {}
exec(factory_src, globalns, ns)
init = ns["__make_init__"](cls.__init__, **localns)
self_param = inspect.Parameter(self_, inspect.Parameter.POSITIONAL_ONLY)
init.__signature__ = inspect.Signature(
parameters=[self_param] + list(sig.parameters.values()), return_annotation=None
)
setattr(cls, "__init__", init)
return cls
@contextmanager
def on_error(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code doesn't raise
"""
try:
yield yield_
except Exception:
func(*args, **kwargs)
raise
@contextmanager
def on_success(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code raises an Exception
"""
try:
yield yield_
except Exception:
raise
else:
func(*args, **kwargs)
def clean_docstring(doc: str, unused: Literal["pre", "post"] = None) -> str:
"""
Removes initial empty lines and shared indentation
Arguments:
doc: docstring to clean up
unused: whether to remove statring or endind empty lines
Returns:
The cleaned docstring
"""
doc = doc.split("\n")
if unused == "pre":
try:
index = next(i for i, l in enumerate(doc) if l.strip())
doc = doc[index:]
except StopIteration:
doc = []
elif unused == "post":
try:
index = next(i for i, l in enumerate(reversed(doc)) if l.strip())
doc = doc[: len(doc) - index]
except StopIteration:
doc = []
if doc:
first_line = doc[0]
index = len(first_line) - len(first_line.lstrip())
indent = first_line[:index]
if all(l.startswith(indent) for l in doc if l.strip()):
doc = [(l[index:] if l.strip() else l) for l in doc]
return "\n".join(doc)
def split(func, iterable):
"""split an iterable based on the truth value of the function for element
Arguments
func -- a callable to apply to each element in the iterable
iterable -- an iterable of element to split
Returns
falsy, truthy - two tuple, the first with element e of the itrable where
func(e) return false, the second with element of the iterable that are True
"""
falsy, truthy = [], []
for e in iterable:
if func(e):
truthy.append(e)
else:
falsy.append(e)
return tuple(falsy), tuple(truthy)
def sync_filter(func, *iterables):
"""
Filter multiple iterable at once, selecting values at index i
such that func(iterables[0][i], iterables[1][i], ...) is True
"""
return tuple(zip(*tuple(i for i in zip(*iterables) if func(*i)))) or ((),) * len(
iterables
)
def get_outer_frame() -> Optional[FrameType]:
frame = inspect.currentframe()
if frame is not None:
frame = frame.f_back
if frame is not None:
return frame.f_back
def get_outer_namespaces() -> Tuple[Namespace, Namespace]:
"""
Get the globals and locals from the context that called the function
calling this utility
Returns:
globals, locals
"""
frame = inspect.currentframe()
if frame:
frame = frame.f_back
if frame:
frame = frame.f_back
if frame:
return frame.f_globals or {}, frame.f_locals or {}
return {}, {}
def get_args(
tp: TypeHint, *, globalns: Namespace = None, localns: Namespace = None
) -> Tuple[TypeHint, ...]:
if globalns is not None or localns is not None:
return typing.get_args(typing._eval_type(tp, globalns, localns))
return typing.get_args(tp)
# Adapted version of typing._type_repr
def type_repr(tp) -> str:
"""Return the repr() of objects, special casing types and tuples"""
from pheres._typing import JSONArray, JSONObject, JSONValue
if isinstance(tp, tuple):
return ", ".join(map(type_repr, tp))
if isinstance(tp, type):
if tp.__module__ == "builtins":
return tp.__qualname__
return f"{tp.__module__}.{tp.__qualname__}"
if tp is Ellipsis:
return "..."
if isinstance(tp, types.FunctionType):
return tp.__name__
if tp is JSONValue:
return "JSONValue"
if tp is JSONArray:
return "JSONArray"
if tp is JSONObject:
return "JSONObject"
return repr(tp)
def get_class_namespaces(cls: type) -> tuple[Namespace, Namespace]:
"""
Return the module a class is defined in and its internal dictionary
Returns:
globals, locals
"""
return inspect.getmodule(cls).__dict__, cls.__dict__ | {cls.__name__: cls}
def get_updated_class(cls: AnyClass) -> AnyClass:
module = inspect.getmodule(cls)
if module is not None:
return getattr(module, cls.__name__)
return cls
| ClassPropertyDescriptor | identifier_name |
_utils.py | """
Various internal utilities for Pheres
"""
from __future__ import annotations
import functools
import inspect
import json
import re
import types
import typing
from contextlib import contextmanager
from types import FrameType
from typing import (
Annotated,
Any,
Callable,
Generic,
Iterable,
List,
Literal,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
# Type Aliases
AnyClass = TypeVar("AnyClass", bound=type)
TypeHint = Union[ # pylint: disable=unsubscriptable-object
Type[type],
Type[Any],
Type[TypeVar],
# Type[Generic],
Type[Annotated],
Type[Tuple],
Type[Callable],
]
Namespace = dict[str, Any]
TypeT = TypeVar("TypeT", *typing.get_args(TypeHint))
U = TypeVar("U")
V = TypeVar("V")
class Virtual:
"""
Mixin class to make a class non-heritable and non-instanciable
"""
__slots__ = ("__weakref__",)
def __init__(self):
raise TypeError("Cannot instanciate virtual class")
def __init_subclass__(cls, *args, **kwargs):
if Virtual not in cls.__bases__:
raise TypeError("Cannot subclass virtual class")
super().__init_subclass__(*args, **kwargs)
class Subscriptable(Generic[U, V]):
"""
Decorator to make a subscriptable instance from a __getitem__ function
Usage:
@Subscriptable
def my_subscriptable(key):
return key
assert my_subscriptable[8] == 8
"""
__slots__ = ("_func",)
def __init__(self, func: Callable[[U], V]) -> None:
self._func = func
# self.__doc__ = func.__doc__
def __call__(self):
raise SyntaxError("Use brackets '[]' instead")
def __getitem__(self, arg: U) -> V:
return self._func(arg)
def docstring(
docstring: str = None, *, pre: str = None, post: str = None
) -> Callable[[U], U]:
"""
Decorator to modify the docstring of an object.
For all provided strings, unused empty lines are removed, and the indentation
of the first non-empty line is removed from all lines if possible. This allows
better indentation when used as a decorator.
Unused empty lines means initial enpty lines for ``pre``, and final empty lines
for ``post``.
Arguments:
docstring: replaces the docstring of the object
pre: adds the string at the start of the object original docstring
post: adds the strings at the end of the object original docstring
"""
def edit_docstring(obj: U) -> U:
obj.__doc__ = "".join(
(
clean_docstring(pre or "", unused="pre"),
clean_docstring(docstring or (obj.__doc__ or "")),
clean_docstring(post or "", unused="post"),
)
)
return obj
return edit_docstring
# from https://stackoverflow.com/questions/5189699/how-to-make-a-class-property
class ClassPropertyDescriptor:
"""
Descriptor for class properties
"""
__slots__ = ("fget", "fset")
def __init__(
self,
fget: Union[classmethod, staticmethod],
fset: Union[classmethod, staticmethod] = None,
):
self.fget = fget
self.fset = fset
def __get__(self, obj: U, cls: Type[U] = None) -> V:
if cls is None:
cls = type(obj)
return self.fget.__get__(obj, cls)()
def __set__(self, obj: U, value: V):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
@property
def __isabstractmethod__(self):
return any(
getattr(f, "__isabstractmethod__", False) for f in (self.fget, self.fset)
)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(
func: Union[Callable, classmethod, staticmethod]
) -> ClassPropertyDescriptor:
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@overload
def autoformat(
cls: None, /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Callable[[Type[U]], Type[U]]:
...
@overload
def autoformat(
cls: Type[U], /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Type[U]:
...
def autoformat(
cls: Type[U] = None,
/,
params: Union[str, Iterable[str]] = ( # pylint: disable=unsubscriptable-object
"message",
"msg",
),
):
"""
Class decorator to autoformat string arguments in the __init__ method
Modify the class __init__ method in place by wrapping it. The wrapped class
will call the format() method of arguments specified in `params` that exist
in the original signature, passing all other arguments are dictionary to
str.format()
Arguments:
params -- names of the arguments to autoformats
Usage:
@autoformat
class MyException(Exception):
def __init__(self, elem, msg="{elem} is invalid"):
super().__init__(msg)
self.msg = msg
self.elem = elem
assert MyException(8).msg == "8 is invalid"
"""
if isinstance(params, str):
params = (params,)
if cls is None:
return functools.partial(autoformat, params=params)
orig_init = cls.__init__
signature = inspect.signature(orig_init)
params = signature.parameters.keys() & set(params)
@functools.wraps(orig_init)
def init(*args, **kwargs):
bounds = signature.bind(*args, **kwargs)
bounds.apply_defaults()
pre_formatted = {
name: bounds.arguments.pop(name)
for name in params
if name in bounds.arguments
}
formatted = {
name: string.format(**bounds.arguments)
for name, string in pre_formatted.items()
}
for name, arg in formatted.items():
bounds.arguments[name] = arg
return orig_init(*bounds.args, **bounds.kwargs)
# init.__signature__ = signature
setattr(cls, "__init__", init)
return cls
class Variable(str):
def __repr__(self) -> str:
return self
def __str__(self) -> str:
return self
def _sig_without(sig: inspect.Signature, param: Union[int, str]) -> inspect.Signature:
"""Removes a parameter from a Signature object
If param is an int, remove the parameter at that position, else
remove any paramater with that name
"""
if isinstance(param, int):
params = list(sig.parameters.values())
params.pop(param)
else:
params = [p for name, p in sig.parameters.items() if name != param]
return sig.replace(parameters=params)
def _sig_merge(lsig: inspect.Signature, rsig: inspect.Signature) -> inspect.Signature:
"""Merges two signature object, dropping the return annotations"""
return inspect.Signature(
sorted(
list(lsig.parameters.values()) + list(rsig.parameters.values()),
key=lambda param: param.kind,
) | )
def _sig_to_def(sig: inspect.Signature) -> str:
return str(sig).split("->", 1)[0].strip()[1:-1]
def _sig_to_call(sig: inspect.Signature) -> str:
l = []
for p in sig.parameters.values():
if p.kind is inspect.Parameter.KEYWORD_ONLY:
l.append(f"{p.name}={p.name}")
else:
l.append(p.name)
return ", ".join(l)
def post_init(cls: Type[U]) -> Type[U]:
"""
Class decorator to automatically support __post_init__() on classes
This is useful for @attr.s decorated classes, because __attr_post_init__() doesn't
support additional arguments.
This decorators wraps the class __init__ in a new function that accept merged arguments,
and dispatch them to __init__ and then __post_init__()
"""
if not isinstance(cls, type):
raise TypeError("Can only decorate classes")
if not hasattr(cls, "__post_init__"):
raise TypeError("The class must have a __post_init__() method")
# Ignore the first argument which is the "self" argument
sig = init_sig = _sig_without(inspect.signature(cls.__init__), 0)
previous = [(cls, "__init__", sig)]
for parent in reversed(cls.__mro__):
if hasattr(parent, "__post_init__"):
post_sig = _sig_without(
inspect.signature(getattr(parent, "__post_init__")), 0
)
try:
sig = _sig_merge(sig, post_sig)
except Exception as err:
# find the incompatibility
for parent, method, psig in previous:
try:
_sig_merge(psig, post_sig)
except Exception:
break
else:
raise TypeError(
"__post_init__ signature is incompatible with the class"
) from err
raise TypeError(
f"__post_init__() is incompatible with {parent.__qualname__}{method}()"
) from err
# No exception
previous.append((parent, "__post_init__", post_sig))
# handles type annotations and defaults
# inspired by the dataclasses modules
params = list(sig.parameters.values())
localns = (
{
f"__type_{p.name}": p.annotation
for p in params
if p.annotation is not inspect.Parameter.empty
}
| {
f"__default_{p.name}": p.default
for p in params
if p.default is not inspect.Parameter.empty
}
| cls.__dict__
)
for i, p in enumerate(params):
if p.default is not inspect.Parameter.empty:
p = p.replace(default=Variable(f"__default_{p.name}"))
if p.annotation is not inspect.Parameter.empty:
p = p.replace(annotation=f"__type_{p.name}")
params[i] = p
new_sig = inspect.Signature(params)
# Build the new __init__ source code
self_ = "self" if "self" not in sig.parameters else "__post_init_self"
init_lines = [
f"def __init__({self_}, {_sig_to_def(new_sig)}) -> None:",
f"__original_init({self_}, {_sig_to_call(init_sig)})",
]
for parent, method, psig in previous[1:]:
if hasattr(parent, "__post_init__"):
if parent is not cls:
init_lines.append(
f"super({parent.__qualname__}, {self_}).{method}({_sig_to_call(psig)})"
)
else:
init_lines.append(f"{self_}.{method}({_sig_to_call(psig)})")
init_src = "\n ".join(init_lines)
# Build the factory function source code
local_vars = ", ".join(localns.keys())
factory_src = (
f"def __make_init__(__original_init, {local_vars}):\n"
f" {init_src}\n"
" return __init__"
)
# Create new __init__ with the factory
globalns = inspect.getmodule(cls).__dict__
ns: dict[str, Any] = {}
exec(factory_src, globalns, ns)
init = ns["__make_init__"](cls.__init__, **localns)
self_param = inspect.Parameter(self_, inspect.Parameter.POSITIONAL_ONLY)
init.__signature__ = inspect.Signature(
parameters=[self_param] + list(sig.parameters.values()), return_annotation=None
)
setattr(cls, "__init__", init)
return cls
@contextmanager
def on_error(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code doesn't raise
"""
try:
yield yield_
except Exception:
func(*args, **kwargs)
raise
@contextmanager
def on_success(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code raises an Exception
"""
try:
yield yield_
except Exception:
raise
else:
func(*args, **kwargs)
def clean_docstring(doc: str, unused: Literal["pre", "post"] = None) -> str:
"""
Removes initial empty lines and shared indentation
Arguments:
doc: docstring to clean up
unused: whether to remove statring or endind empty lines
Returns:
The cleaned docstring
"""
doc = doc.split("\n")
if unused == "pre":
try:
index = next(i for i, l in enumerate(doc) if l.strip())
doc = doc[index:]
except StopIteration:
doc = []
elif unused == "post":
try:
index = next(i for i, l in enumerate(reversed(doc)) if l.strip())
doc = doc[: len(doc) - index]
except StopIteration:
doc = []
if doc:
first_line = doc[0]
index = len(first_line) - len(first_line.lstrip())
indent = first_line[:index]
if all(l.startswith(indent) for l in doc if l.strip()):
doc = [(l[index:] if l.strip() else l) for l in doc]
return "\n".join(doc)
def split(func, iterable):
"""split an iterable based on the truth value of the function for element
Arguments
func -- a callable to apply to each element in the iterable
iterable -- an iterable of element to split
Returns
falsy, truthy - two tuple, the first with element e of the itrable where
func(e) return false, the second with element of the iterable that are True
"""
falsy, truthy = [], []
for e in iterable:
if func(e):
truthy.append(e)
else:
falsy.append(e)
return tuple(falsy), tuple(truthy)
def sync_filter(func, *iterables):
"""
Filter multiple iterable at once, selecting values at index i
such that func(iterables[0][i], iterables[1][i], ...) is True
"""
return tuple(zip(*tuple(i for i in zip(*iterables) if func(*i)))) or ((),) * len(
iterables
)
def get_outer_frame() -> Optional[FrameType]:
frame = inspect.currentframe()
if frame is not None:
frame = frame.f_back
if frame is not None:
return frame.f_back
def get_outer_namespaces() -> Tuple[Namespace, Namespace]:
"""
Get the globals and locals from the context that called the function
calling this utility
Returns:
globals, locals
"""
frame = inspect.currentframe()
if frame:
frame = frame.f_back
if frame:
frame = frame.f_back
if frame:
return frame.f_globals or {}, frame.f_locals or {}
return {}, {}
def get_args(
tp: TypeHint, *, globalns: Namespace = None, localns: Namespace = None
) -> Tuple[TypeHint, ...]:
if globalns is not None or localns is not None:
return typing.get_args(typing._eval_type(tp, globalns, localns))
return typing.get_args(tp)
# Adapted version of typing._type_repr
def type_repr(tp) -> str:
"""Return the repr() of objects, special casing types and tuples"""
from pheres._typing import JSONArray, JSONObject, JSONValue
if isinstance(tp, tuple):
return ", ".join(map(type_repr, tp))
if isinstance(tp, type):
if tp.__module__ == "builtins":
return tp.__qualname__
return f"{tp.__module__}.{tp.__qualname__}"
if tp is Ellipsis:
return "..."
if isinstance(tp, types.FunctionType):
return tp.__name__
if tp is JSONValue:
return "JSONValue"
if tp is JSONArray:
return "JSONArray"
if tp is JSONObject:
return "JSONObject"
return repr(tp)
def get_class_namespaces(cls: type) -> tuple[Namespace, Namespace]:
"""
Return the module a class is defined in and its internal dictionary
Returns:
globals, locals
"""
return inspect.getmodule(cls).__dict__, cls.__dict__ | {cls.__name__: cls}
def get_updated_class(cls: AnyClass) -> AnyClass:
module = inspect.getmodule(cls)
if module is not None:
return getattr(module, cls.__name__)
return cls | random_line_split | |
_utils.py | """
Various internal utilities for Pheres
"""
from __future__ import annotations
import functools
import inspect
import json
import re
import types
import typing
from contextlib import contextmanager
from types import FrameType
from typing import (
Annotated,
Any,
Callable,
Generic,
Iterable,
List,
Literal,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
# Type Aliases
AnyClass = TypeVar("AnyClass", bound=type)
TypeHint = Union[ # pylint: disable=unsubscriptable-object
Type[type],
Type[Any],
Type[TypeVar],
# Type[Generic],
Type[Annotated],
Type[Tuple],
Type[Callable],
]
Namespace = dict[str, Any]
TypeT = TypeVar("TypeT", *typing.get_args(TypeHint))
U = TypeVar("U")
V = TypeVar("V")
class Virtual:
"""
Mixin class to make a class non-heritable and non-instanciable
"""
__slots__ = ("__weakref__",)
def __init__(self):
raise TypeError("Cannot instanciate virtual class")
def __init_subclass__(cls, *args, **kwargs):
if Virtual not in cls.__bases__:
raise TypeError("Cannot subclass virtual class")
super().__init_subclass__(*args, **kwargs)
class Subscriptable(Generic[U, V]):
"""
Decorator to make a subscriptable instance from a __getitem__ function
Usage:
@Subscriptable
def my_subscriptable(key):
return key
assert my_subscriptable[8] == 8
"""
__slots__ = ("_func",)
def __init__(self, func: Callable[[U], V]) -> None:
self._func = func
# self.__doc__ = func.__doc__
def __call__(self):
raise SyntaxError("Use brackets '[]' instead")
def __getitem__(self, arg: U) -> V:
return self._func(arg)
def docstring(
docstring: str = None, *, pre: str = None, post: str = None
) -> Callable[[U], U]:
"""
Decorator to modify the docstring of an object.
For all provided strings, unused empty lines are removed, and the indentation
of the first non-empty line is removed from all lines if possible. This allows
better indentation when used as a decorator.
Unused empty lines means initial enpty lines for ``pre``, and final empty lines
for ``post``.
Arguments:
docstring: replaces the docstring of the object
pre: adds the string at the start of the object original docstring
post: adds the strings at the end of the object original docstring
"""
def edit_docstring(obj: U) -> U:
obj.__doc__ = "".join(
(
clean_docstring(pre or "", unused="pre"),
clean_docstring(docstring or (obj.__doc__ or "")),
clean_docstring(post or "", unused="post"),
)
)
return obj
return edit_docstring
# from https://stackoverflow.com/questions/5189699/how-to-make-a-class-property
class ClassPropertyDescriptor:
"""
Descriptor for class properties
"""
__slots__ = ("fget", "fset")
def __init__(
self,
fget: Union[classmethod, staticmethod],
fset: Union[classmethod, staticmethod] = None,
):
self.fget = fget
self.fset = fset
def __get__(self, obj: U, cls: Type[U] = None) -> V:
if cls is None:
cls = type(obj)
return self.fget.__get__(obj, cls)()
def __set__(self, obj: U, value: V):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
@property
def __isabstractmethod__(self):
return any(
getattr(f, "__isabstractmethod__", False) for f in (self.fget, self.fset)
)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(
func: Union[Callable, classmethod, staticmethod]
) -> ClassPropertyDescriptor:
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@overload
def autoformat(
cls: None, /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Callable[[Type[U]], Type[U]]:
...
@overload
def autoformat(
cls: Type[U], /, params: Union[str, Iterable[str]] = ("message", "msg")
) -> Type[U]:
...
def autoformat(
cls: Type[U] = None,
/,
params: Union[str, Iterable[str]] = ( # pylint: disable=unsubscriptable-object
"message",
"msg",
),
):
"""
Class decorator to autoformat string arguments in the __init__ method
Modify the class __init__ method in place by wrapping it. The wrapped class
will call the format() method of arguments specified in `params` that exist
in the original signature, passing all other arguments are dictionary to
str.format()
Arguments:
params -- names of the arguments to autoformats
Usage:
@autoformat
class MyException(Exception):
def __init__(self, elem, msg="{elem} is invalid"):
super().__init__(msg)
self.msg = msg
self.elem = elem
assert MyException(8).msg == "8 is invalid"
"""
if isinstance(params, str):
params = (params,)
if cls is None:
return functools.partial(autoformat, params=params)
orig_init = cls.__init__
signature = inspect.signature(orig_init)
params = signature.parameters.keys() & set(params)
@functools.wraps(orig_init)
def init(*args, **kwargs):
bounds = signature.bind(*args, **kwargs)
bounds.apply_defaults()
pre_formatted = {
name: bounds.arguments.pop(name)
for name in params
if name in bounds.arguments
}
formatted = {
name: string.format(**bounds.arguments)
for name, string in pre_formatted.items()
}
for name, arg in formatted.items():
bounds.arguments[name] = arg
return orig_init(*bounds.args, **bounds.kwargs)
# init.__signature__ = signature
setattr(cls, "__init__", init)
return cls
class Variable(str):
def __repr__(self) -> str:
return self
def __str__(self) -> str:
return self
def _sig_without(sig: inspect.Signature, param: Union[int, str]) -> inspect.Signature:
"""Removes a parameter from a Signature object
If param is an int, remove the parameter at that position, else
remove any paramater with that name
"""
if isinstance(param, int):
params = list(sig.parameters.values())
params.pop(param)
else:
params = [p for name, p in sig.parameters.items() if name != param]
return sig.replace(parameters=params)
def _sig_merge(lsig: inspect.Signature, rsig: inspect.Signature) -> inspect.Signature:
"""Merges two signature object, dropping the return annotations"""
return inspect.Signature(
sorted(
list(lsig.parameters.values()) + list(rsig.parameters.values()),
key=lambda param: param.kind,
)
)
def _sig_to_def(sig: inspect.Signature) -> str:
return str(sig).split("->", 1)[0].strip()[1:-1]
def _sig_to_call(sig: inspect.Signature) -> str:
l = []
for p in sig.parameters.values():
if p.kind is inspect.Parameter.KEYWORD_ONLY:
l.append(f"{p.name}={p.name}")
else:
l.append(p.name)
return ", ".join(l)
def post_init(cls: Type[U]) -> Type[U]:
"""
Class decorator to automatically support __post_init__() on classes
This is useful for @attr.s decorated classes, because __attr_post_init__() doesn't
support additional arguments.
This decorators wraps the class __init__ in a new function that accept merged arguments,
and dispatch them to __init__ and then __post_init__()
"""
if not isinstance(cls, type):
raise TypeError("Can only decorate classes")
if not hasattr(cls, "__post_init__"):
raise TypeError("The class must have a __post_init__() method")
# Ignore the first argument which is the "self" argument
sig = init_sig = _sig_without(inspect.signature(cls.__init__), 0)
previous = [(cls, "__init__", sig)]
for parent in reversed(cls.__mro__):
if hasattr(parent, "__post_init__"):
post_sig = _sig_without(
inspect.signature(getattr(parent, "__post_init__")), 0
)
try:
sig = _sig_merge(sig, post_sig)
except Exception as err:
# find the incompatibility
for parent, method, psig in previous:
try:
_sig_merge(psig, post_sig)
except Exception:
break
else:
raise TypeError(
"__post_init__ signature is incompatible with the class"
) from err
raise TypeError(
f"__post_init__() is incompatible with {parent.__qualname__}{method}()"
) from err
# No exception
previous.append((parent, "__post_init__", post_sig))
# handles type annotations and defaults
# inspired by the dataclasses modules
params = list(sig.parameters.values())
localns = (
{
f"__type_{p.name}": p.annotation
for p in params
if p.annotation is not inspect.Parameter.empty
}
| {
f"__default_{p.name}": p.default
for p in params
if p.default is not inspect.Parameter.empty
}
| cls.__dict__
)
for i, p in enumerate(params):
if p.default is not inspect.Parameter.empty:
p = p.replace(default=Variable(f"__default_{p.name}"))
if p.annotation is not inspect.Parameter.empty:
p = p.replace(annotation=f"__type_{p.name}")
params[i] = p
new_sig = inspect.Signature(params)
# Build the new __init__ source code
self_ = "self" if "self" not in sig.parameters else "__post_init_self"
init_lines = [
f"def __init__({self_}, {_sig_to_def(new_sig)}) -> None:",
f"__original_init({self_}, {_sig_to_call(init_sig)})",
]
for parent, method, psig in previous[1:]:
if hasattr(parent, "__post_init__"):
if parent is not cls:
init_lines.append(
f"super({parent.__qualname__}, {self_}).{method}({_sig_to_call(psig)})"
)
else:
init_lines.append(f"{self_}.{method}({_sig_to_call(psig)})")
init_src = "\n ".join(init_lines)
# Build the factory function source code
local_vars = ", ".join(localns.keys())
factory_src = (
f"def __make_init__(__original_init, {local_vars}):\n"
f" {init_src}\n"
" return __init__"
)
# Create new __init__ with the factory
globalns = inspect.getmodule(cls).__dict__
ns: dict[str, Any] = {}
exec(factory_src, globalns, ns)
init = ns["__make_init__"](cls.__init__, **localns)
self_param = inspect.Parameter(self_, inspect.Parameter.POSITIONAL_ONLY)
init.__signature__ = inspect.Signature(
parameters=[self_param] + list(sig.parameters.values()), return_annotation=None
)
setattr(cls, "__init__", init)
return cls
@contextmanager
def on_error(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code doesn't raise
"""
try:
yield yield_
except Exception:
func(*args, **kwargs)
raise
@contextmanager
def on_success(func, *args, yield_=None, **kwargs):
"""
Context manager that calls a function if the managed code raises an Exception
"""
try:
yield yield_
except Exception:
raise
else:
func(*args, **kwargs)
def clean_docstring(doc: str, unused: Literal["pre", "post"] = None) -> str:
"""
Removes initial empty lines and shared indentation
Arguments:
doc: docstring to clean up
unused: whether to remove statring or endind empty lines
Returns:
The cleaned docstring
"""
doc = doc.split("\n")
if unused == "pre":
|
elif unused == "post":
try:
index = next(i for i, l in enumerate(reversed(doc)) if l.strip())
doc = doc[: len(doc) - index]
except StopIteration:
doc = []
if doc:
first_line = doc[0]
index = len(first_line) - len(first_line.lstrip())
indent = first_line[:index]
if all(l.startswith(indent) for l in doc if l.strip()):
doc = [(l[index:] if l.strip() else l) for l in doc]
return "\n".join(doc)
def split(func, iterable):
"""split an iterable based on the truth value of the function for element
Arguments
func -- a callable to apply to each element in the iterable
iterable -- an iterable of element to split
Returns
falsy, truthy - two tuple, the first with element e of the itrable where
func(e) return false, the second with element of the iterable that are True
"""
falsy, truthy = [], []
for e in iterable:
if func(e):
truthy.append(e)
else:
falsy.append(e)
return tuple(falsy), tuple(truthy)
def sync_filter(func, *iterables):
"""
Filter multiple iterable at once, selecting values at index i
such that func(iterables[0][i], iterables[1][i], ...) is True
"""
return tuple(zip(*tuple(i for i in zip(*iterables) if func(*i)))) or ((),) * len(
iterables
)
def get_outer_frame() -> Optional[FrameType]:
frame = inspect.currentframe()
if frame is not None:
frame = frame.f_back
if frame is not None:
return frame.f_back
def get_outer_namespaces() -> Tuple[Namespace, Namespace]:
"""
Get the globals and locals from the context that called the function
calling this utility
Returns:
globals, locals
"""
frame = inspect.currentframe()
if frame:
frame = frame.f_back
if frame:
frame = frame.f_back
if frame:
return frame.f_globals or {}, frame.f_locals or {}
return {}, {}
def get_args(
tp: TypeHint, *, globalns: Namespace = None, localns: Namespace = None
) -> Tuple[TypeHint, ...]:
if globalns is not None or localns is not None:
return typing.get_args(typing._eval_type(tp, globalns, localns))
return typing.get_args(tp)
# Adapted version of typing._type_repr
def type_repr(tp) -> str:
"""Return the repr() of objects, special casing types and tuples"""
from pheres._typing import JSONArray, JSONObject, JSONValue
if isinstance(tp, tuple):
return ", ".join(map(type_repr, tp))
if isinstance(tp, type):
if tp.__module__ == "builtins":
return tp.__qualname__
return f"{tp.__module__}.{tp.__qualname__}"
if tp is Ellipsis:
return "..."
if isinstance(tp, types.FunctionType):
return tp.__name__
if tp is JSONValue:
return "JSONValue"
if tp is JSONArray:
return "JSONArray"
if tp is JSONObject:
return "JSONObject"
return repr(tp)
def get_class_namespaces(cls: type) -> tuple[Namespace, Namespace]:
"""
Return the module a class is defined in and its internal dictionary
Returns:
globals, locals
"""
return inspect.getmodule(cls).__dict__, cls.__dict__ | {cls.__name__: cls}
def get_updated_class(cls: AnyClass) -> AnyClass:
module = inspect.getmodule(cls)
if module is not None:
return getattr(module, cls.__name__)
return cls
| try:
index = next(i for i, l in enumerate(doc) if l.strip())
doc = doc[index:]
except StopIteration:
doc = [] | conditional_block |
monotone_stack.go | package copypasta
/* 单调栈 Monotone Stack
【图解单调栈】两种方法,两张图秒懂
https://leetcode.cn/problems/next-greater-node-in-linked-list/solution/tu-jie-dan-diao-zhan-liang-chong-fang-fa-v9ab/
举例:返回每个元素两侧严格大于它的元素位置(不存在则为 -1 或 n)
如何理解:把数组想象成一列山峰,站在 a[i] 的山顶仰望两侧的山峰,是看不到高山背后的矮山的,只能看到一座座更高的山峰
这就启发我们引入一个底大顶小的单调栈,入栈时不断比较栈顶元素直到找到一个比当前元素大的
技巧:事先压入一个边界元素到栈底,这样保证循环时栈一定不会为空,从而简化逻辑
一些转换:
若区间 [l,r] 的最大值等于 a[r],则 l 必须 > left[r]
若区间 [l,r] 的最大值等于 a[l],则 r 必须 < right[l]
这一结论可以用于思考一些双变量的题目
https://oi-wiki.org/ds/monotonous-stack/
https://cp-algorithms.com/data_structures/stack_queue_modification.html
#### 单调栈
- [496. 下一个更大元素 I](https://leetcode.cn/problems/next-greater-element-i/)(单调栈模板题)
- [503. 下一个更大元素 II](https://leetcode.cn/problems/next-greater-element-ii/)
- [2454. 下一个更大元素 IV](https://leetcode.cn/problems/next-greater-element-iv/)
- [456. 132 模式](https://leetcode.cn/problems/132-pattern/)
- [739. 每日温度](https://leetcode.cn/problems/daily-temperatures/)
- [901. 股票价格跨度](https://leetcode.cn/problems/online-stock-span/)
- [1019. 链表中的下一个更大节点](https://leetcode.cn/problems/next-greater-node-in-linked-list/)
- [1124. 表现良好的最长时间段](https://leetcode.cn/problems/longest-well-performing-interval/)
- [1475. 商品折扣后的最终价格](https://leetcode.cn/problems/final-prices-with-a-special-discount-in-a-shop/)
- [2289. 使数组按非递减顺序排列](https://leetcode.cn/problems/steps-to-make-array-non-decreasing/)
#### 矩形系列
- [84. 柱状图中最大的矩形](https://leetcode.cn/problems/largest-rectangle-in-histogram/)
- [85. 最大矩形](https://leetcode.cn/problems/maximal-rectangle/)
- [1504. 统计全 1 子矩形](https://leetcode.cn/problems/count-submatrices-with-all-ones/)
#### 字典序最小
- [316. 去除重复字母](https://leetcode.cn/problems/remove-duplicate-letters/)
- [316 扩展:重复个数不超过 limit](https://leetcode.cn/contest/tianchi2022/problems/ev2bru/)
- [402. 移掉 K 位数字](https://leetcode.cn/problems/remove-k-digits/)
- [321. 拼接最大数](https://leetcode.cn/problems/create-maximum-number/)
#### 贡献法
- [907. 子数组的最小值之和](https://leetcode.cn/problems/sum-of-subarray-minimums/)
- [1856. 子数组最小乘积的最大值](https://leetcode.cn/problems/maximum-subarray-min-product/)
- [2104. 子数组范围和](https://leetcode.cn/problems/sum-of-subarray-ranges/)
- [2281. 巫师的总力量和](https://leetcode.cn/problems/sum-of-total-strength-of-wizards/)
- [2818. 操作使得分最大](https://leetcode.cn/problems/apply-operations-to-maximize-score/)
模板题
https://www.luogu.com.cn/problem/P5788
https://www.luogu.com.cn/problem/P2866 http://poj.org/problem?id=3250
NEERC05,UVa 1619 https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&category=825&page=show_problem&problem=4494
转换 https://codeforces.com/problemset/problem/280/B
转换 LC2289 https://leetcode.cn/problems/steps-to-make-array-non-decreasing/
max >= sum https://codeforces.com/problemset/problem/1691/D
LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
你从单调栈学到了什么思想?LC1944 https://leetcode.cn/problems/number-of-visible-people-in-a-queue/
下下个最大元素 LC2454 https://leetcode.cn/problems/next-greater-element-iv/
- 应用 https://atcoder.jp/contests/abc140/tasks/abc140_e
max(最小值*子数组和) LC1856 https://leetcode.cn/problems/maximum-subarray-min-product/
字典序最小
LC316 https://leetcode.cn/problems/remove-duplicate-letters/
- 扩展:重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
LC402 https://leetcode.cn/problems/remove-k-digits/
LC321 https://leetcode.cn/problems/create-maximum-number/
计算贡献(所有子数组的……的和)
最小值 LC907 https://leetcode.cn/problems/sum-of-subarray-minimums/
最大值-最小值 LC2104 https://leetcode.cn/problems/sum-of-subarray-ranges/
最小值*和 LC2281 https://leetcode.cn/problems/sum-of-total-strength-of-wizards/
第二大 https://atcoder.jp/contests/abc140/tasks/abc140_e
与 DP 结合
https://codeforces.com/problemset/problem/5/E
https://codeforces.com/problemset/problem/1313/C2
https://codeforces.com/problemset/problem/1407/D
结合线段树,或者巧妙地在单调栈中去维护最值 https://codeforces.com/problemset/problem/1483/C
按照最大值分类讨论 LC1335 https://leetcode.cn/problems/minimum-difficulty-of-a-job-schedule/
LC2355 https://leetcode.cn/problems/maximum-number-of-books-you-can-take/
其他
LC42 接雨水 https://leetcode-cn.com/problems/trapping-rain-water/
评注:接雨水有三种不同的解法(DP、单调栈和双指针),其中双指针是 DP 的空间优化写法,讲解见 https://www.bilibili.com/video/BV1Qg411q7ia/
本质上是两种计算策略:计算每个下标处的接水量(纵向累加),计算一段高度对应的接水宽度(横向累加)
LC84 柱状图中最大的矩形 https://leetcode-cn.com/problems/largest-rectangle-in-histogram/ http://poj.org/problem?id=2559 http://poj.org/problem?id=2082
LC85 最大全 1 矩形(实现见下面的 maximalRectangleArea)https://leetcode-cn.com/problems/maximal-rectangle/ 原题为 http://poj.org/problem?id=3494
LC1504 全 1 矩形个数(实现见下面的 numSubmat)https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
LC768 https://leetcode.cn/problems/max-chunks-to-make-sorted-ii/
LC2735 https://leetcode.cn/problems/collecting-chocolates/solutions/2305119/xian-xing-zuo-fa-by-heltion-ypdx/
LC2736 https://leetcode.cn/problems/maximum-sum-queries/
后缀数组+不同矩形对应方案数之和 https://codeforces.com/edu/course/2/lesson/2/5/practice/contest/269656/problem/D
与 bitOpTrickCnt 结合(见 bits.go)https://codeforces.com/problemset/problem/875/D
已知部分 right 还原全部 right;已知 right 还原 a https://codeforces.com/problemset/problem/1158/C
*/
func monotoneStack(a []int) ([]int, []int) {
const mod int = 1e9 + 7
// 考察局部最小
// 如果有相同元素,需要把某一侧循环内的符号改成小于等于
// 求左侧严格小于 a[i] 的最近位置 left[i],这样 a[i] 就是区间 [left[i]+1,i] 内最小的元素(之一)
// 如果改成求左侧小于等于,那么 a[i] 就是区间 [left[i]+1,i] 内独一无二的最小元素
// 不存在时 left[i] = -1
// 虽然写了个二重循环,但站在每个元素的视角看,这个元素在二重循环中最多入栈出栈各一次,因此整个二重循环的时间复杂度为 O(n)
n := len(a)
left := make([]int, n)
st := []int{-1} // 栈底哨兵,在栈为空时可以直接把 left[i] 赋值为 -1
for i, v := range a {
// 求左侧 < v : >=
// 求左侧 <= v : >
// 求左侧 > v : <=
// 求左侧 >= v : <
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里的符号和要求的是反过来的
st = st[:len(st)-1]
}
// 不断弹出 >= v 的,那么循环结束后栈顶就是 < v 的
left[i] = st[len(st)-1]
st = append(st, i)
}
// 求右侧严格小于 a[i] 的最近位置 right[i],这样 a[i] 就是区间 [i,right[i]-1] 内最小的元素(之一)
// 如果改成求右侧小于等于,那么 a[i] 就是区间 [i,right[i]-1] 内独一无二的最小元素
// 不存在时 right[i] = n
right := make([]int, n)
st = []int{n}
for i := n - 1; i >= 0; i-- {
v := a[i]
for len(st) > 1 && a[st[len(st)-1]] >= v { // 同上
st = st[:len(st)-1]
}
right[i] = st[len(st)-1]
st = append(st, i)
}
sum := make([]int, n+1) // int64
for i, v := range a {
sum[i+1] = (sum[i] + v) % mod
}
// EXTRA:计算贡献(注意取模时避免出现负数)
for i, v := range a {
_ = v
//l, r := left[i]+1, right[i] // [l,r) 左闭右开
tot := (i - left[i]) * (right[i] - i)
_ = tot
//tot := (sum[r] + mod - sum[l]) % mod
}
{
// TIPS: 如果有一侧定义成小于等于,还可以一次遍历求出 left 和 right
left := make([]int, n)
right := make([]int, n)
| ans {
ans = area
}
}
}
return
}
// 全 1 矩形个数
// LC1504 https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
// 参考 https://leetcode.com/problems/count-submatrices-with-all-ones/discuss/720265/Java-Detailed-Explanation-From-O(MNM)-to-O(MN)-by-using-Stack
func numSubmat(mat [][]int) (ans int) {
m := len(mat[0])
heights := make([]int, m)
for _, row := range mat {
sum := make([]int, m)
type pair struct{ h, j int }
stack := []pair{{-1, -1}}
for j, v := range row {
if v == 0 {
heights[j] = 0
} else {
heights[j]++
}
h := heights[j]
for {
if top := stack[len(stack)-1]; top.h < h {
if pre := top.j; pre < 0 {
sum[j] = (j + 1) * h
} else {
sum[j] = sum[pre] + (j-pre)*h
}
ans += sum[j]
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
}
return
}
// 字典序最小的无重复字符的子序列,包含原串所有字符
// LC316 https://leetcode.cn/problems/remove-duplicate-letters/
// https://atcoder.jp/contests/abc299/tasks/abc299_g
// EXTRA: 重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
func removeDuplicateLetters(s string) string {
left := ['z' + 1]int{}
for _, c := range s {
left[c]++
}
st := []rune{}
inSt := ['z' + 1]bool{}
for _, c := range s {
left[c]--
if inSt[c] {
continue
}
for len(st) > 0 && c < st[len(st)-1] && left[st[len(st)-1]] > 0 {
top := st[len(st)-1]
st = st[:len(st)-1]
inSt[top] = false // top > c,且 top 后面还有,那么可以重新加进来
}
st = append(st, c)
inSt[c] = true
}
return string(st)
}
// 求 a 的最长的子数组,其元素和大于 lowerSum
// 返回任意一个符合要求的子数组的左右端点(闭区间)
// 如果不存在,返回 [-1,-1]
// 讲解:https://leetcode.cn/problems/longest-well-performing-interval/solution/liang-chong-zuo-fa-liang-zhang-tu-miao-d-hysl/
// LC962 https://leetcode.cn/problems/maximum-width-ramp/
// LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
// 有点相关 http://codeforces.com/problemset/problem/1788/E
func longestSubarrayWithLowerSum(a []int, lowerSum int) (int, int) {
n := len(a)
sum := make([]int, n+1)
st := []int{0}
for j, v := range a {
j++
sum[j] = sum[j-1] + v
if sum[j] < sum[st[len(st)-1]] {
st = append(st, j)
}
}
l, r := -1, 0
for i := n; i > 0; i-- {
for len(st) > 0 && sum[i]-sum[st[len(st)-1]] > lowerSum {
j := st[len(st)-1]
st = st[:len(st)-1]
if l < 0 || i-j < r-l {
l, r = j, i
}
}
}
r-- // 闭区间
return l, r
}
| for i := range right {
right[i] = n
}
st := []int{-1}
for i, v := range a {
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里是 right 小于等于
right[st[len(st)-1]] = i
st = st[:len(st)-1]
}
left[i] = st[len(st)-1]
st = append(st, i)
}
}
// EXTRA: 求所有长为 i 的子区间的最小值的最大值
// https://codeforces.com/problemset/problem/547/B LC1950 https://leetcode-cn.com/problems/maximum-of-minimum-values-in-all-subarrays/
{
ans := make([]int, n+1)
for i := range ans {
ans[i] = -2e9
}
for i, v := range a {
sz := right[i] - left[i] - 1
if v > ans[sz] {
ans[sz] = v
}
}
for i := n - 1; i > 0; i-- {
if ans[i+1] > ans[i] {
ans[i] = ans[i+1]
}
}
// ans[1:]
}
return left, right
}
// 注:若输入的是一个 1~n 的排列,求两侧大于/小于位置有更简单的写法
// 用双向链表思考(代码实现时用的数组):
// - 把 perm 转换成双向链表,按元素值**从小到大**遍历 perm[i],那么 perm[i] 左右两侧的就是大于 perm[i] 的元素
// - 算完 perm[i] 后把 perm[i] 从链表中删掉
// 为避免判断下标越界,传入的 perm 虽然下标是从 0 开始的,但视作从 1 开始(不存在时表示为 0 或 n+1)
// https://codeforces.com/contest/1156/problem/E
// https://atcoder.jp/contests/abc140/tasks/abc140_e
func permLR(perm []int) ([]int, []int) {
n := len(perm)
pos := make([]int, n+1)
left := make([]int, n+2)
right := make([]int, n+1)
for i := 1; i <= n; i++ {
pos[perm[i-1]] = i
left[i], right[i] = i-1, i+1
}
right[0] = 1
left[n+1] = n // 哨兵(本题不需要这两行,但是某些题目需要,比如 https://codeforces.com/problemset/problem/1154/E)
del := func(i int) {
l, r := left[i], right[i]
right[l] = r
left[r] = l
}
// 正序遍历求出的是两侧大于位置
// 倒序遍历求出的是两侧小于位置
for v := 1; v <= n; v++ {
i := pos[v]
l, r := left[i], right[i]
// do ...
_, _ = l, r
del(i) // 从链表中删除 v
}
return left, right
}
// 最大全 1 矩形
// LC85 https://leetcode-cn.com/problems/maximal-rectangle/
func maximalRectangleArea(mat [][]int) (ans int) {
const target = 1
n, m := len(mat), len(mat[0])
heights := make([][]int, n) // heights[i][j] 表示从 (i,j) 往上看的高度(连续 1 的长度),mat[i][j] = 0 时为 0
for i, row := range mat {
heights[i] = make([]int, m)
for j, v := range row {
if v == target {
if i == 0 {
heights[i][j] = 1
} else {
heights[i][j] = heights[i-1][j] + 1
}
}
}
}
// 然后枚举每一行,就变成 LC84 这题了
type pair struct{ h, i int }
for _, hs := range heights {
left := make([]int, m)
stack := []pair{{-1, -1}}
for j, h := range hs {
for {
if top := stack[len(stack)-1]; top.h < h {
left[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
right := make([]int, m)
stack = []pair{{-1, m}}
for j := m - 1; j >= 0; j-- {
h := hs[j]
for {
if top := stack[len(stack)-1]; top.h < h {
right[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
for j, h := range hs {
if area := (right[j] - left[j] - 1) * h; area > | identifier_body |
monotone_stack.go | package copypasta
/* 单调栈 Monotone Stack
【图解单调栈】两种方法,两张图秒懂
https://leetcode.cn/problems/next-greater-node-in-linked-list/solution/tu-jie-dan-diao-zhan-liang-chong-fang-fa-v9ab/
举例:返回每个元素两侧严格大于它的元素位置(不存在则为 -1 或 n)
如何理解:把数组想象成一列山峰,站在 a[i] 的山顶仰望两侧的山峰,是看不到高山背后的矮山的,只能看到一座座更高的山峰
这就启发我们引入一个底大顶小的单调栈,入栈时不断比较栈顶元素直到找到一个比当前元素大的
技巧:事先压入一个边界元素到栈底,这样保证循环时栈一定不会为空,从而简化逻辑
一些转换:
若区间 [l,r] 的最大值等于 a[r],则 l 必须 > left[r]
若区间 [l,r] 的最大值等于 a[l],则 r 必须 < right[l]
这一结论可以用于思考一些双变量的题目
https://oi-wiki.org/ds/monotonous-stack/
https://cp-algorithms.com/data_structures/stack_queue_modification.html
#### 单调栈
- [496. 下一个更大元素 I](https://leetcode.cn/problems/next-greater-element-i/)(单调栈模板题)
- [503. 下一个更大元素 II](https://leetcode.cn/problems/next-greater-element-ii/)
- [2454. 下一个更大元素 IV](https://leetcode.cn/problems/next-greater-element-iv/)
- [456. 132 模式](https://leetcode.cn/problems/132-pattern/)
- [739. 每日温度](https://leetcode.cn/problems/daily-temperatures/)
- [901. 股票价格跨度](https://leetcode.cn/problems/online-stock-span/)
- [1019. 链表中的下一个更大节点](https://leetcode.cn/problems/next-greater-node-in-linked-list/)
- [1124. 表现良好的最长时间段](https://leetcode.cn/problems/longest-well-performing-interval/)
- [1475. 商品折扣后的最终价格](https://leetcode.cn/problems/final-prices-with-a-special-discount-in-a-shop/)
- [2289. 使数组按非递减顺序排列](https://leetcode.cn/problems/steps-to-make-array-non-decreasing/)
#### 矩形系列
- [84. 柱状图中最大的矩形](https://leetcode.cn/problems/largest-rectangle-in-histogram/)
- [85. 最大矩形](https://leetcode.cn/problems/maximal-rectangle/)
- [1504. 统计全 1 子矩形](https://leetcode.cn/problems/count-submatrices-with-all-ones/)
#### 字典序最小
- [316. 去除重复字母](https://leetcode.cn/problems/remove-duplicate-letters/)
- [316 扩展:重复个数不超过 limit](https://leetcode.cn/contest/tianchi2022/problems/ev2bru/)
- [402. 移掉 K 位数字](https://leetcode.cn/problems/remove-k-digits/)
- [321. 拼接最大数](https://leetcode.cn/problems/create-maximum-number/)
#### 贡献法
- [907. 子数组的最小值之和](https://leetcode.cn/problems/sum-of-subarray-minimums/)
- [1856. 子数组最小乘积的最大值](https://leetcode.cn/problems/maximum-subarray-min-product/)
- [2104. 子数组范围和](https://leetcode.cn/problems/sum-of-subarray-ranges/)
- [2281. 巫师的总力量和](https://leetcode.cn/problems/sum-of-total-strength-of-wizards/)
- [2818. 操作使得分最大](https://leetcode.cn/problems/apply-operations-to-maximize-score/)
模板题
https://www.luogu.com.cn/problem/P5788
https://www.luogu.com.cn/problem/P2866 http://poj.org/problem?id=3250
NEERC05,UVa 1619 https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&category=825&page=show_problem&problem=4494
转换 https://codeforces.com/problemset/problem/280/B
转换 LC2289 https://leetcode.cn/problems/steps-to-make-array-non-decreasing/
max >= sum https://codeforces.com/problemset/problem/1691/D
LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
你从单调栈学到了什么思想?LC1944 https://leetcode.cn/problems/number-of-visible-people-in-a-queue/
下下个最大元素 LC2454 https://leetcode.cn/problems/next-greater-element-iv/
- 应用 https://atcoder.jp/contests/abc140/tasks/abc140_e
max(最小值*子数组和) LC1856 https://leetcode.cn/problems/maximum-subarray-min-product/
字典序最小
LC316 https://leetcode.cn/problems/remove-duplicate-letters/
- 扩展:重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
LC402 https://leetcode.cn/problems/remove-k-digits/
LC321 https://leetcode.cn/problems/create-maximum-number/
计算贡献(所有子数组的……的和)
最小值 LC907 https://leetcode.cn/problems/sum-of-subarray-minimums/
最大值-最小值 LC2104 https://leetcode.cn/problems/sum-of-subarray-ranges/
最小值*和 LC2281 https://leetcode.cn/problems/sum-of-total-strength-of-wizards/
第二大 https://atcoder.jp/contests/abc140/tasks/abc140_e
与 DP 结合
https://codeforces.com/problemset/problem/5/E
https://codeforces.com/problemset/problem/1313/C2
https://codeforces.com/problemset/problem/1407/D
结合线段树,或者巧妙地在单调栈中去维护最值 https://codeforces.com/problemset/problem/1483/C
按照最大值分类讨论 LC1335 https://leetcode.cn/problems/minimum-difficulty-of-a-job-schedule/
LC2355 https://leetcode.cn/problems/maximum-number-of-books-you-can-take/
其他
LC42 接雨水 https://leetcode-cn.com/problems/trapping-rain-water/
评注:接雨水有三种不同的解法(DP、单调栈和双指针),其中双指针是 DP 的空间优化写法,讲解见 https://www.bilibili.com/video/BV1Qg411q7ia/
本质上是两种计算策略:计算每个下标处的接水量(纵向累加),计算一段高度对应的接水宽度(横向累加)
LC84 柱状图中最大的矩形 https://leetcode-cn.com/problems/largest-rectangle-in-histogram/ http://poj.org/problem?id=2559 http://poj.org/problem?id=2082
LC85 最大全 1 矩形(实现见下面的 maximalRectangleArea)https://leetcode-cn.com/problems/maximal-rectangle/ 原题为 http://poj.org/problem?id=3494
LC1504 全 1 矩形个数(实现见下面的 numSubmat)https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
LC768 https://leetcode.cn/problems/max-chunks-to-make-sorted-ii/
LC2735 https://leetcode.cn/problems/collecting-chocolates/solutions/2305119/xian-xing-zuo-fa-by-heltion-ypdx/
LC2736 https://leetcode.cn/problems/maximum-sum-queries/
后缀数组+不同矩形对应方案数之和 https://codeforces.com/edu/course/2/lesson/2/5/practice/contest/269656/problem/D
与 bitOpTrickCnt 结合(见 bits.go)https://codeforces.com/problemset/problem/875/D
已知部分 right 还原全部 right;已知 right 还原 a https://codeforces.com/problemset/problem/1158/C
*/
func monotoneStack(a []int) ([]int, []int) {
const mod int = 1e9 + 7
// 考察局部最小
// 如果有相同元素,需要把某一侧循环内的符号改成小于等于
// 求左侧严格小于 a[i] 的最近位置 left[i],这样 a[i] 就是区间 [left[i]+1,i] 内最小的元素(之一)
// 如果改成求左侧小于等于,那么 a[i] 就是区间 [left[i]+1,i] 内独一无二的最小元素
// 不存在时 left[i] = -1
// 虽然写了个二重循环,但站在每个元素的视角看,这个元素在二重循环中最多入栈出栈各一次,因此整个二重循环的时间复杂度为 O(n)
n := len(a)
left := make([]int, n)
st := []int{-1} // 栈底哨兵,在栈为空时可以直接把 left[i] 赋值为 -1
for i, v := range a {
// 求左侧 < v : >=
// 求左侧 <= v : >
// 求左侧 > v : <=
// 求左侧 >= v : <
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里的符号和要求的是反过来的
st = st[:len(st)-1]
}
// 不断弹出 >= v 的,那么循环结束后栈顶就是 < v 的
left[i] = st[len(st)-1]
st = append(st, i)
}
// 求右侧严格小于 a[i] 的最近位置 right[i],这样 a[i] 就是区间 [i,right[i]-1] 内最小的元素(之一)
// 如果改成求右侧小于等于,那么 a[i] 就是区间 [i,right[i]-1] 内独一无二的最小元素
// 不存在时 right[i] = n
right := make([]int, n)
st = []int{n}
for i := n - 1; i >= 0; i-- {
v := a[i]
for len(st) > 1 && a[st[len(st)-1]] >= v { // 同上
st = st[:len(st)-1]
}
right[i] = st[len(st)-1]
st = append(st, i)
}
sum := make([]int, n+1) // int64
for i, v := range a {
sum[i+1] = (sum[i] + v) % mod
}
// EXTRA:计算贡献(注意取模时避免出现负数)
for i, v := range a {
_ = v
//l, r := left[i]+1, right[i] // [l,r) 左闭右开 | }
{
// TIPS: 如果有一侧定义成小于等于,还可以一次遍历求出 left 和 right
left := make([]int, n)
right := make([]int, n)
for i := range right {
right[i] = n
}
st := []int{-1}
for i, v := range a {
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里是 right 小于等于
right[st[len(st)-1]] = i
st = st[:len(st)-1]
}
left[i] = st[len(st)-1]
st = append(st, i)
}
}
// EXTRA: 求所有长为 i 的子区间的最小值的最大值
// https://codeforces.com/problemset/problem/547/B LC1950 https://leetcode-cn.com/problems/maximum-of-minimum-values-in-all-subarrays/
{
ans := make([]int, n+1)
for i := range ans {
ans[i] = -2e9
}
for i, v := range a {
sz := right[i] - left[i] - 1
if v > ans[sz] {
ans[sz] = v
}
}
for i := n - 1; i > 0; i-- {
if ans[i+1] > ans[i] {
ans[i] = ans[i+1]
}
}
// ans[1:]
}
return left, right
}
// 注:若输入的是一个 1~n 的排列,求两侧大于/小于位置有更简单的写法
// 用双向链表思考(代码实现时用的数组):
// - 把 perm 转换成双向链表,按元素值**从小到大**遍历 perm[i],那么 perm[i] 左右两侧的就是大于 perm[i] 的元素
// - 算完 perm[i] 后把 perm[i] 从链表中删掉
// 为避免判断下标越界,传入的 perm 虽然下标是从 0 开始的,但视作从 1 开始(不存在时表示为 0 或 n+1)
// https://codeforces.com/contest/1156/problem/E
// https://atcoder.jp/contests/abc140/tasks/abc140_e
func permLR(perm []int) ([]int, []int) {
n := len(perm)
pos := make([]int, n+1)
left := make([]int, n+2)
right := make([]int, n+1)
for i := 1; i <= n; i++ {
pos[perm[i-1]] = i
left[i], right[i] = i-1, i+1
}
right[0] = 1
left[n+1] = n // 哨兵(本题不需要这两行,但是某些题目需要,比如 https://codeforces.com/problemset/problem/1154/E)
del := func(i int) {
l, r := left[i], right[i]
right[l] = r
left[r] = l
}
// 正序遍历求出的是两侧大于位置
// 倒序遍历求出的是两侧小于位置
for v := 1; v <= n; v++ {
i := pos[v]
l, r := left[i], right[i]
// do ...
_, _ = l, r
del(i) // 从链表中删除 v
}
return left, right
}
// 最大全 1 矩形
// LC85 https://leetcode-cn.com/problems/maximal-rectangle/
func maximalRectangleArea(mat [][]int) (ans int) {
const target = 1
n, m := len(mat), len(mat[0])
heights := make([][]int, n) // heights[i][j] 表示从 (i,j) 往上看的高度(连续 1 的长度),mat[i][j] = 0 时为 0
for i, row := range mat {
heights[i] = make([]int, m)
for j, v := range row {
if v == target {
if i == 0 {
heights[i][j] = 1
} else {
heights[i][j] = heights[i-1][j] + 1
}
}
}
}
// 然后枚举每一行,就变成 LC84 这题了
type pair struct{ h, i int }
for _, hs := range heights {
left := make([]int, m)
stack := []pair{{-1, -1}}
for j, h := range hs {
for {
if top := stack[len(stack)-1]; top.h < h {
left[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
right := make([]int, m)
stack = []pair{{-1, m}}
for j := m - 1; j >= 0; j-- {
h := hs[j]
for {
if top := stack[len(stack)-1]; top.h < h {
right[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
for j, h := range hs {
if area := (right[j] - left[j] - 1) * h; area > ans {
ans = area
}
}
}
return
}
// 全 1 矩形个数
// LC1504 https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
// 参考 https://leetcode.com/problems/count-submatrices-with-all-ones/discuss/720265/Java-Detailed-Explanation-From-O(MNM)-to-O(MN)-by-using-Stack
func numSubmat(mat [][]int) (ans int) {
m := len(mat[0])
heights := make([]int, m)
for _, row := range mat {
sum := make([]int, m)
type pair struct{ h, j int }
stack := []pair{{-1, -1}}
for j, v := range row {
if v == 0 {
heights[j] = 0
} else {
heights[j]++
}
h := heights[j]
for {
if top := stack[len(stack)-1]; top.h < h {
if pre := top.j; pre < 0 {
sum[j] = (j + 1) * h
} else {
sum[j] = sum[pre] + (j-pre)*h
}
ans += sum[j]
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
}
return
}
// 字典序最小的无重复字符的子序列,包含原串所有字符
// LC316 https://leetcode.cn/problems/remove-duplicate-letters/
// https://atcoder.jp/contests/abc299/tasks/abc299_g
// EXTRA: 重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
func removeDuplicateLetters(s string) string {
left := ['z' + 1]int{}
for _, c := range s {
left[c]++
}
st := []rune{}
inSt := ['z' + 1]bool{}
for _, c := range s {
left[c]--
if inSt[c] {
continue
}
for len(st) > 0 && c < st[len(st)-1] && left[st[len(st)-1]] > 0 {
top := st[len(st)-1]
st = st[:len(st)-1]
inSt[top] = false // top > c,且 top 后面还有,那么可以重新加进来
}
st = append(st, c)
inSt[c] = true
}
return string(st)
}
// 求 a 的最长的子数组,其元素和大于 lowerSum
// 返回任意一个符合要求的子数组的左右端点(闭区间)
// 如果不存在,返回 [-1,-1]
// 讲解:https://leetcode.cn/problems/longest-well-performing-interval/solution/liang-chong-zuo-fa-liang-zhang-tu-miao-d-hysl/
// LC962 https://leetcode.cn/problems/maximum-width-ramp/
// LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
// 有点相关 http://codeforces.com/problemset/problem/1788/E
func longestSubarrayWithLowerSum(a []int, lowerSum int) (int, int) {
n := len(a)
sum := make([]int, n+1)
st := []int{0}
for j, v := range a {
j++
sum[j] = sum[j-1] + v
if sum[j] < sum[st[len(st)-1]] {
st = append(st, j)
}
}
l, r := -1, 0
for i := n; i > 0; i-- {
for len(st) > 0 && sum[i]-sum[st[len(st)-1]] > lowerSum {
j := st[len(st)-1]
st = st[:len(st)-1]
if l < 0 || i-j < r-l {
l, r = j, i
}
}
}
r-- // 闭区间
return l, r
} | tot := (i - left[i]) * (right[i] - i)
_ = tot
//tot := (sum[r] + mod - sum[l]) % mod | random_line_split |
monotone_stack.go | package copypasta
/* 单调栈 Monotone Stack
【图解单调栈】两种方法,两张图秒懂
https://leetcode.cn/problems/next-greater-node-in-linked-list/solution/tu-jie-dan-diao-zhan-liang-chong-fang-fa-v9ab/
举例:返回每个元素两侧严格大于它的元素位置(不存在则为 -1 或 n)
如何理解:把数组想象成一列山峰,站在 a[i] 的山顶仰望两侧的山峰,是看不到高山背后的矮山的,只能看到一座座更高的山峰
这就启发我们引入一个底大顶小的单调栈,入栈时不断比较栈顶元素直到找到一个比当前元素大的
技巧:事先压入一个边界元素到栈底,这样保证循环时栈一定不会为空,从而简化逻辑
一些转换:
若区间 [l,r] 的最大值等于 a[r],则 l 必须 > left[r]
若区间 [l,r] 的最大值等于 a[l],则 r 必须 < right[l]
这一结论可以用于思考一些双变量的题目
https://oi-wiki.org/ds/monotonous-stack/
https://cp-algorithms.com/data_structures/stack_queue_modification.html
#### 单调栈
- [496. 下一个更大元素 I](https://leetcode.cn/problems/next-greater-element-i/)(单调栈模板题)
- [503. 下一个更大元素 II](https://leetcode.cn/problems/next-greater-element-ii/)
- [2454. 下一个更大元素 IV](https://leetcode.cn/problems/next-greater-element-iv/)
- [456. 132 模式](https://leetcode.cn/problems/132-pattern/)
- [739. 每日温度](https://leetcode.cn/problems/daily-temperatures/)
- [901. 股票价格跨度](https://leetcode.cn/problems/online-stock-span/)
- [1019. 链表中的下一个更大节点](https://leetcode.cn/problems/next-greater-node-in-linked-list/)
- [1124. 表现良好的最长时间段](https://leetcode.cn/problems/longest-well-performing-interval/)
- [1475. 商品折扣后的最终价格](https://leetcode.cn/problems/final-prices-with-a-special-discount-in-a-shop/)
- [2289. 使数组按非递减顺序排列](https://leetcode.cn/problems/steps-to-make-array-non-decreasing/)
#### 矩形系列
- [84. 柱状图中最大的矩形](https://leetcode.cn/problems/largest-rectangle-in-histogram/)
- [85. 最大矩形](https://leetcode.cn/problems/maximal-rectangle/)
- [1504. 统计全 1 子矩形](https://leetcode.cn/problems/count-submatrices-with-all-ones/)
#### 字典序最小
- [316. 去除重复字母](https://leetcode.cn/problems/remove-duplicate-letters/)
- [316 扩展:重复个数不超过 limit](https://leetcode.cn/contest/tianchi2022/problems/ev2bru/)
- [402. 移掉 K 位数字](https://leetcode.cn/problems/remove-k-digits/)
- [321. 拼接最大数](https://leetcode.cn/problems/create-maximum-number/)
#### 贡献法
- [907. 子数组的最小值之和](https://leetcode.cn/problems/sum-of-subarray-minimums/)
- [1856. 子数组最小乘积的最大值](https://leetcode.cn/problems/maximum-subarray-min-product/)
- [2104. 子数组范围和](https://leetcode.cn/problems/sum-of-subarray-ranges/)
- [2281. 巫师的总力量和](https://leetcode.cn/problems/sum-of-total-strength-of-wizards/)
- [2818. 操作使得分最大](https://leetcode.cn/problems/apply-operations-to-maximize-score/)
模板题
https://www.luogu.com.cn/problem/P5788
https://www.luogu.com.cn/problem/P2866 http://poj.org/problem?id=3250
NEERC05,UVa 1619 https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&category=825&page=show_problem&problem=4494
转换 https://codeforces.com/problemset/problem/280/B
转换 LC2289 https://leetcode.cn/problems/steps-to-make-array-non-decreasing/
max >= sum https://codeforces.com/problemset/problem/1691/D
LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
你从单调栈学到了什么思想?LC1944 https://leetcode.cn/problems/number-of-visible-people-in-a-queue/
下下个最大元素 LC2454 https://leetcode.cn/problems/next-greater-element-iv/
- 应用 https://atcoder.jp/contests/abc140/tasks/abc140_e
max(最小值*子数组和) LC1856 https://leetcode.cn/problems/maximum-subarray-min-product/
字典序最小
LC316 https://leetcode.cn/problems/remove-duplicate-letters/
- 扩展:重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
LC402 https://leetcode.cn/problems/remove-k-digits/
LC321 https://leetcode.cn/problems/create-maximum-number/
计算贡献(所有子数组的……的和)
最小值 LC907 https://leetcode.cn/problems/sum-of-subarray-minimums/
最大值-最小值 LC2104 https://leetcode.cn/problems/sum-of-subarray-ranges/
最小值*和 LC2281 https://leetcode.cn/problems/sum-of-total-strength-of-wizards/
第二大 https://atcoder.jp/contests/abc140/tasks/abc140_e
与 DP 结合
https://codeforces.com/problemset/problem/5/E
https://codeforces.com/problemset/problem/1313/C2
https://codeforces.com/problemset/problem/1407/D
结合线段树,或者巧妙地在单调栈中去维护最值 https://codeforces.com/problemset/problem/1483/C
按照最大值分类讨论 LC1335 https://leetcode.cn/problems/minimum-difficulty-of-a-job-schedule/
LC2355 https://leetcode.cn/problems/maximum-number-of-books-you-can-take/
其他
LC42 接雨水 https://leetcode-cn.com/problems/trapping-rain-water/
评注:接雨水有三种不同的解法(DP、单调栈和双指针),其中双指针是 DP 的空间优化写法,讲解见 https://www.bilibili.com/video/BV1Qg411q7ia/
本质上是两种计算策略:计算每个下标处的接水量(纵向累加),计算一段高度对应的接水宽度(横向累加)
LC84 柱状图中最大的矩形 https://leetcode-cn.com/problems/largest-rectangle-in-histogram/ http://poj.org/problem?id=2559 http://poj.org/problem?id=2082
LC85 最大全 1 矩形(实现见下面的 maximalRectangleArea)https://leetcode-cn.com/problems/maximal-rectangle/ 原题为 http://poj.org/problem?id=3494
LC1504 全 1 矩形个数(实现见下面的 numSubmat)https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
LC768 https://leetcode.cn/problems/max-chunks-to-make-sorted-ii/
LC2735 https://leetcode.cn/problems/collecting-chocolates/solutions/2305119/xian-xing-zuo-fa-by-heltion-ypdx/
LC2736 https://leetcode.cn/problems/maximum-sum-queries/
后缀数组+不同矩形对应方案数之和 https://codeforces.com/edu/course/2/lesson/2/5/practice/contest/269656/problem/D
与 bitOpTrickCnt 结合(见 bits.go)https://codeforces.com/problemset/problem/875/D
已知部分 right 还原全部 right;已知 right 还原 a https://codeforces.com/problemset/problem/1158/C
*/
func monotoneStack(a []int) ([]int, []int) {
const mod int = 1e9 + 7
// 考察局部最小
// 如果有相同元素,需要把某一侧循环内的符号改成小于等于
// 求左侧严格小于 a[i] 的最近位置 left[i],这样 a[i] 就是区间 [left[i]+1,i] 内最小的元素(之一)
// 如果改成求左侧小于等于,那么 a[i] 就是区间 [left[i]+1,i] 内独一无二的最小元素
// 不存在时 left[i] = -1
// 虽然写了个二重循环,但站在每个元素的视角看,这个元素在二重循环中最多入栈出栈各一次,因此整个二重循环的时间复杂度为 O(n)
n := len(a)
left := make([]int, n)
st := []int{-1} // 栈底哨兵,在栈为空时可以直接把 left[i] 赋值为 -1
for i, v := range a {
// 求左侧 < v : >=
// 求左侧 <= v : >
// 求左侧 > v : <=
// 求左侧 >= v : <
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里的符号和要求的是反过来的
st = st[:len(st)-1]
}
// 不断弹出 >= v 的,那么循环结束后栈顶就是 < v 的
left[i] = st[len(st)-1]
st = append(st, i)
}
// 求右侧严格小于 a[i] 的最近位置 right[i],这样 a[i] 就是区间 [i,right[i]-1] 内最小的元素(之一)
// 如果改成求右侧小于等于,那么 a[i] 就是区间 [i,right[i]-1] 内独一无二的最小元素
// 不存在时 right[i] = n
right := make([]int, n)
st = []int{n}
for i := n - 1; i >= 0; i-- {
v := a[i]
for len(st) > 1 && a[st[len(st)-1]] >= v { // 同上
st = st[:len(st)-1]
}
right[i] = st[len(st)-1]
st = append(st, i)
}
sum := make([]int, n+1) // int64
for i, v := range a {
sum[i+1] = (sum[i] + v) % mod
}
// EXTRA:计算贡献(注意取模时避免出现负数)
for i, v := range a {
_ = v
//l, r := left[i]+1, right[i] // [l,r) 左闭右开
tot := (i - left[i]) * (right[i] - i)
_ = tot
//tot := (sum[r] + mod - sum[l]) % mod
}
{
// TIPS: 如果有一侧定义成小于等于,还可以一次遍历求出 left 和 right
left := make([]int, n)
right := make([]int, n)
for i := range right {
right[i] = n
}
st := []int{-1}
for i, v := range a {
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里是 right 小于等于
right[st[len(st)-1]] = i
st = st[:len(st)-1]
}
left[i] = st[len(st)-1]
st = append(st, i)
}
}
// EXTRA: 求所有长为 i 的子区间的最小值的最大值
// https://codeforces.com/problemset/problem/547/B LC1950 https://leetcode-cn.com/problems/maximum-of-minimum-values-in-all-subarrays/
{
ans := make([]int, n+1)
for i := range ans {
ans[i] = -2e9
}
for i, v := range a {
sz := right[i] - left[i] - 1
if v > ans[sz] {
ans[sz] = v
}
}
for i := n - 1; i > 0; i-- {
if ans[i+1] > ans[i] {
ans[i] = ans[i+1]
}
}
// ans[1:]
}
return left, right
}
// 注:若输入的是一个 1~n 的排列,求两侧大于/小于位置有更简单的写法
// 用双向链表思考(代码实现时用的数组):
// - 把 perm 转换成双向链表,按元素值**从小到大**遍历 perm[i],那么 perm[i] 左右两侧的就是大于 perm[i] 的元素
// - 算完 perm[i] 后把 perm[i] 从链表中删掉
// 为避免判断下标越界,传入的 perm 虽然下标是从 0 开始的,但视作从 1 开始(不存在时表示为 0 或 n+1)
// https://codeforces.com/contest/1156/problem/E
// https://atcoder.jp/contests/abc140/tasks/abc140_e
func permLR(perm []int) ([]int, []int) {
n := len(perm)
pos := make([]int, n+1)
left := make([]int, n+2)
right := make([]int, n+1)
for i := 1; i <= n; i++ {
pos[perm[i-1]] = i
left[i], right[i] = i-1, i+1
}
right[0] = 1
left[n+1] = n // 哨兵(本题不需要这两行,但是某些题目需要,比如 https://codeforces.com/problemset/problem/1154/E)
del := func(i int) {
l, r := left[i], right[i]
right[l] = r
left[r] = l
}
// 正序遍历求出的是两侧大于位置
// 倒序遍历求出的是两侧小于位置
for v := 1; v <= n; v++ {
i := pos[v]
l, r := left[i], right[i]
// do ...
_, _ = l, r
del(i) // 从链表中删除 v
}
return left, right
}
// 最大全 1 矩形
// LC85 https://leetcode-cn.com/problems/maximal-rectangle/
func maximalRectangleArea(mat [][]int) (ans int) {
const target = 1
n, m := len(mat), len(mat[0])
heights := make([][]int, n) // heights[i][j] 表示从 (i,j) 往上看的高度(连续 1 的长度),mat[i][j] = 0 时为 0
for i, row := range mat {
heights[i] = make([]int, m)
for j, v := range row {
if v == target {
if i == 0 {
heights[i][j] = 1
} else {
heights[i][j] = heights[i-1][j] + 1
}
}
}
}
// 然后枚举每一行,就变成 LC84 这题了
type pair struct{ h, i int }
for _, hs := range heights {
left := make([]int, m)
stack := []pair{{-1, -1}}
for j, h := range hs {
for {
if top := stack[len(stack)-1]; top.h < h {
left[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
right := make([]int, m)
stack = []pair{{-1, m}}
for j := m - 1; j >= 0; j-- {
h := hs[j]
for {
if top := stack[len(stack)-1]; top.h < h {
right[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
for j, h := range hs {
if area := (right[j] - left[j] - 1) * h; area > ans {
ans = area
}
}
}
return
}
// 全 1 矩形个数
// LC1504 https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
// 参考 https://leetcode.com/problems/count-submatrices-with-all-ones/discuss/720265/Java-Detailed-Explanation-From-O(MNM)-to-O(MN)-by-using-Stack
func numSubmat(mat [][]int) (ans int) {
m := len(mat[0])
heights := make([]int, m)
for _, row := range mat {
sum := make([]int, m)
type pair struct{ h, j int }
stack := []pair{{-1, -1}}
for j, v := range row {
if v == 0 {
heights[j] = 0
} else {
heights[j]++
}
h := heights[j]
for {
if top := stack[len(stack)-1]; top.h < h {
if pre := top.j; pre < 0 {
sum[j] = (j + 1) * h
} else {
sum[j] = sum[pre] + (j-pre)*h
}
ans += sum[j]
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
}
return
}
// 字典序最小的无重复字符的子序列,包含原串所有字符
// LC316 https://leetcode.cn/problems/remove-duplicate-letters/
// https://atcoder.jp/contests/abc299/tasks/abc299_g
// EXTRA: 重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
func removeDuplicateLetters(s string) string {
left := ['z' + 1]int{}
for _, c := range s {
left[c]++
}
st := []rune{}
inSt := ['z' + 1]bool{}
for _, c := range s {
left[c]--
if inSt[c] {
continue
}
for len(st) > 0 && c < st[len(st)-1] && left[st[len(st)-1]] > 0 {
top := st[len(st)-1]
st = st[:len(st)-1]
inSt[top] = false // top > c,且 top 后面还有,那么可以重新加进来
}
st = append(st, c)
inSt[c] = true
}
return string(st)
}
// 求 a 的最长的子数组,其元素和大于 lowerSum
// 返回任意一个符合要求的子数组的左右端点(闭区间)
// 如果不存在,返回 [-1,-1]
// 讲解:https://leetcode.cn/problems/longest-well-performing-interval/solution/liang-chong-zuo-fa-liang-zhang-tu-miao-d-hysl/
// LC962 https://leetcode.cn/problems/maximum-width-ramp/
// LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
// 有点相关 http://codeforces.com/problemset/problem/1788/E
func longestSubarrayWithLowerSum(a []int, lowerSum int) (int, int) {
n := len(a)
sum := make([]int, n+1)
st := []int{0}
for j, v := range a {
j++
sum[j] = sum[j-1] + v
if sum[j] < sum[st[len(st)-1]] {
st = append(st, j)
}
}
l, r := -1, 0
for i := n; i > 0; i-- {
for len(st) > 0 && sum[i]-sum[st[len(st)-1]] > lowerSum {
j := st[len(st)-1]
st = st[:len(st)-1]
if l < 0 || i-j < r-l {
l, r = j, i
}
}
}
r-- // 闭区间
return l, r
}
| identifier_name | ||
monotone_stack.go | package copypasta
/* 单调栈 Monotone Stack
【图解单调栈】两种方法,两张图秒懂
https://leetcode.cn/problems/next-greater-node-in-linked-list/solution/tu-jie-dan-diao-zhan-liang-chong-fang-fa-v9ab/
举例:返回每个元素两侧严格大于它的元素位置(不存在则为 -1 或 n)
如何理解:把数组想象成一列山峰,站在 a[i] 的山顶仰望两侧的山峰,是看不到高山背后的矮山的,只能看到一座座更高的山峰
这就启发我们引入一个底大顶小的单调栈,入栈时不断比较栈顶元素直到找到一个比当前元素大的
技巧:事先压入一个边界元素到栈底,这样保证循环时栈一定不会为空,从而简化逻辑
一些转换:
若区间 [l,r] 的最大值等于 a[r],则 l 必须 > left[r]
若区间 [l,r] 的最大值等于 a[l],则 r 必须 < right[l]
这一结论可以用于思考一些双变量的题目
https://oi-wiki.org/ds/monotonous-stack/
https://cp-algorithms.com/data_structures/stack_queue_modification.html
#### 单调栈
- [496. 下一个更大元素 I](https://leetcode.cn/problems/next-greater-element-i/)(单调栈模板题)
- [503. 下一个更大元素 II](https://leetcode.cn/problems/next-greater-element-ii/)
- [2454. 下一个更大元素 IV](https://leetcode.cn/problems/next-greater-element-iv/)
- [456. 132 模式](https://leetcode.cn/problems/132-pattern/)
- [739. 每日温度](https://leetcode.cn/problems/daily-temperatures/)
- [901. 股票价格跨度](https://leetcode.cn/problems/online-stock-span/)
- [1019. 链表中的下一个更大节点](https://leetcode.cn/problems/next-greater-node-in-linked-list/)
- [1124. 表现良好的最长时间段](https://leetcode.cn/problems/longest-well-performing-interval/)
- [1475. 商品折扣后的最终价格](https://leetcode.cn/problems/final-prices-with-a-special-discount-in-a-shop/)
- [2289. 使数组按非递减顺序排列](https://leetcode.cn/problems/steps-to-make-array-non-decreasing/)
#### 矩形系列
- [84. 柱状图中最大的矩形](https://leetcode.cn/problems/largest-rectangle-in-histogram/)
- [85. 最大矩形](https://leetcode.cn/problems/maximal-rectangle/)
- [1504. 统计全 1 子矩形](https://leetcode.cn/problems/count-submatrices-with-all-ones/)
#### 字典序最小
- [316. 去除重复字母](https://leetcode.cn/problems/remove-duplicate-letters/)
- [316 扩展:重复个数不超过 limit](https://leetcode.cn/contest/tianchi2022/problems/ev2bru/)
- [402. 移掉 K 位数字](https://leetcode.cn/problems/remove-k-digits/)
- [321. 拼接最大数](https://leetcode.cn/problems/create-maximum-number/)
#### 贡献法
- [907. 子数组的最小值之和](https://leetcode.cn/problems/sum-of-subarray-minimums/)
- [1856. 子数组最小乘积的最大值](https://leetcode.cn/problems/maximum-subarray-min-product/)
- [2104. 子数组范围和](https://leetcode.cn/problems/sum-of-subarray-ranges/)
- [2281. 巫师的总力量和](https://leetcode.cn/problems/sum-of-total-strength-of-wizards/)
- [2818. 操作使得分最大](https://leetcode.cn/problems/apply-operations-to-maximize-score/)
模板题
https://www.luogu.com.cn/problem/P5788
https://www.luogu.com.cn/problem/P2866 http://poj.org/problem?id=3250
NEERC05,UVa 1619 https://onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&category=825&page=show_problem&problem=4494
转换 https://codeforces.com/problemset/problem/280/B
转换 LC2289 https://leetcode.cn/problems/steps-to-make-array-non-decreasing/
max >= sum https://codeforces.com/problemset/problem/1691/D
LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
你从单调栈学到了什么思想?LC1944 https://leetcode.cn/problems/number-of-visible-people-in-a-queue/
下下个最大元素 LC2454 https://leetcode.cn/problems/next-greater-element-iv/
- 应用 https://atcoder.jp/contests/abc140/tasks/abc140_e
max(最小值*子数组和) LC1856 https://leetcode.cn/problems/maximum-subarray-min-product/
字典序最小
LC316 https://leetcode.cn/problems/remove-duplicate-letters/
- 扩展:重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
LC402 https://leetcode.cn/problems/remove-k-digits/
LC321 https://leetcode.cn/problems/create-maximum-number/
计算贡献(所有子数组的……的和)
最小值 LC907 https://leetcode.cn/problems/sum-of-subarray-minimums/
最大值-最小值 LC2104 https://leetcode.cn/problems/sum-of-subarray-ranges/
最小值*和 LC2281 https://leetcode.cn/problems/sum-of-total-strength-of-wizards/
第二大 https://atcoder.jp/contests/abc140/tasks/abc140_e
与 DP 结合
https://codeforces.com/problemset/problem/5/E
https://codeforces.com/problemset/problem/1313/C2
https://codeforces.com/problemset/problem/1407/D
结合线段树,或者巧妙地在单调栈中去维护最值 https://codeforces.com/problemset/problem/1483/C
按照最大值分类讨论 LC1335 https://leetcode.cn/problems/minimum-difficulty-of-a-job-schedule/
LC2355 https://leetcode.cn/problems/maximum-number-of-books-you-can-take/
其他
LC42 接雨水 https://leetcode-cn.com/problems/trapping-rain-water/
评注:接雨水有三种不同的解法(DP、单调栈和双指针),其中双指针是 DP 的空间优化写法,讲解见 https://www.bilibili.com/video/BV1Qg411q7ia/
本质上是两种计算策略:计算每个下标处的接水量(纵向累加),计算一段高度对应的接水宽度(横向累加)
LC84 柱状图中最大的矩形 https://leetcode-cn.com/problems/largest-rectangle-in-histogram/ http://poj.org/problem?id=2559 http://poj.org/problem?id=2082
LC85 最大全 1 矩形(实现见下面的 maximalRectangleArea)https://leetcode-cn.com/problems/maximal-rectangle/ 原题为 http://poj.org/problem?id=3494
LC1504 全 1 矩形个数(实现见下面的 numSubmat)https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
LC768 https://leetcode.cn/problems/max-chunks-to-make-sorted-ii/
LC2735 https://leetcode.cn/problems/collecting-chocolates/solutions/2305119/xian-xing-zuo-fa-by-heltion-ypdx/
LC2736 https://leetcode.cn/problems/maximum-sum-queries/
后缀数组+不同矩形对应方案数之和 https://codeforces.com/edu/course/2/lesson/2/5/practice/contest/269656/problem/D
与 bitOpTrickCnt 结合(见 bits.go)https://codeforces.com/problemset/problem/875/D
已知部分 right 还原全部 right;已知 right 还原 a https://codeforces.com/problemset/problem/1158/C
*/
func monotoneStack(a []int) ([]int, []int) {
const mod int = 1e9 + 7
// 考察局部最小
// 如果有相同元素,需要把某一侧循环内的符号改成小于等于
// 求左侧严格小于 a[i] 的最近位置 left[i],这样 a[i] 就是区间 [left[i]+1,i] 内最小的元素(之一)
// 如果改成求左侧小于等于,那么 a[i] 就是区间 [left[i]+1,i] 内独一无二的最小元素
// 不存在时 left[i] = -1
// 虽然写了个二重循环,但站在每个元素的视角看,这个元素在二重循环中最多入栈出栈各一次,因此整个二重循环的时间复杂度为 O(n)
n := len(a)
left := make([]int, n)
st := []int{-1} // 栈底哨兵,在栈为空时可以直接把 left[i] 赋值为 -1
for i, v := range a {
// 求左侧 < v : >=
// 求左侧 <= v : >
// 求左侧 > v : <=
// 求左侧 >= v : <
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里的符号和要求的是反过来的
st = st[:len(st)-1]
}
// 不断弹出 >= v 的,那么循环结束后栈顶就是 < v 的
left[i] = st[len(st)-1]
st = append(st, i)
}
// 求右侧严格小于 a[i] 的最近位置 right[i],这样 a[i] 就是区间 [i,right[i]-1] 内最小的元素(之一)
// 如果改成求右侧小于等于,那么 a[i] 就是区间 [i,right[i]-1] 内独一无二的最小元素
// 不存在时 right[i] = n
right := make([]int, n)
st = []int{n}
for i := n - 1; i >= 0; i-- {
v := a[i]
for len(st) > 1 && a[st[len(st)-1]] >= v { // 同上
st = st[:len(st)-1]
}
right[i] = st[len(st)-1]
st = append(st, i)
}
sum := make([]int, n+1) // int64
for i, v := range a {
sum[i+1] = (sum[i] + v) % mod
}
// EXTRA:计算贡献(注意取模时避免出现负数)
for i, v := range a {
_ = v
//l, r := left[i]+1, right[i] // [l,r) 左闭右开
tot := (i - left[i]) * (right[i] - i)
_ = tot
//tot := (sum[r] + mod - sum[l]) % mod
}
{
// TIPS: 如果有一侧定义成小于等于,还可以一次遍历求出 left 和 right
left := make([]int, n)
right := make([]int, n)
for i := range right {
right[i] = n
}
st := []int{-1}
for i, v := range a {
for len(st) > 1 && a[st[len(st)-1]] >= v { // 这里是 right 小于等于
right[st[len(st)-1]] = i
st = st[:len(st)-1]
}
left[i] = st[len(st)-1]
st = append(st, i)
}
}
// EXTRA: 求所有长为 i 的子区间的最小值的最大值
// https://codeforces.com/problemset/problem/547/B LC1950 https://leetcode-cn.com/problems/maximum-of-minimum-values-in-all-subarrays/
{
ans := make([]int, n+1)
for i := range ans {
ans[i] = -2e9
}
for i, v := range a {
sz := right[i] - left[i] - 1
if v > ans[sz] {
ans[sz] = v
}
}
for i := n - 1; i > 0; i-- {
if ans[i+1] > ans[i] {
ans[i] = ans[i+1]
}
}
// ans[1:]
}
return left, right
}
// 注:若输入的是一个 1~n 的排列,求两侧大于/小于位置有更简单的写法
// 用双向链表思考(代码实现时用的数组):
// - 把 perm 转换成双向链表,按元素值**从小到大**遍历 perm[i],那么 perm[i] 左右两侧的就是大于 perm[i] 的元素
// - 算完 perm[i] 后把 perm[i] 从链表中删掉
// 为避免判断下标越界,传入的 perm 虽然下标是从 0 开始的,但视作从 1 开始(不存在时表示为 0 或 n+1)
// https://codeforces.com/contest/1156/problem/E
// https://atcoder.jp/contests/abc140/tasks/abc140_e
func permLR(perm []int) ([]int, []int) {
n := len(perm)
pos := make([]int, n+1)
left := make([]int, n+2)
right := make([]int, n+1)
for i := 1; i <= n; i++ {
pos[perm[i-1]] = i
left[i], right[i] = i-1, i+1
}
right[0] = 1
left[n+1] = n // 哨兵(本题不需要这两行,但是某些题目需要,比如 https://codeforces.com/problemset/problem/1154/E)
del := func(i int) {
l, r := left[i], right[i]
right[l] = r
left[r] = l
}
// 正序遍历求出的是两侧大于位置
// 倒序遍历求出的是两侧小于位置
for v := 1; v <= n; v++ {
i := pos[v]
l, r := left[i], right[i]
// do ...
_, _ = l, r
del(i) // 从链表中删除 v
}
return left, right
}
// 最大全 1 矩形
// LC85 https://leetcode-cn.com/problems/maximal-rectangle/
func maximalRectangleArea(mat [][]int) (ans int) {
const target = 1
n, m := len(mat), len(mat[0])
heights := make([][]int, n) // heights[i][j] 表示从 (i,j) 往上看的高度(连续 1 的长度),mat[i][j] = 0 时为 0
for i, row := range mat {
heights[i] = make([]int, m)
for j, v := range row {
if v == target {
if i == 0 {
heights[i][j] = 1
} else {
heights[i][j] = heights[i-1][j] + 1
}
}
}
}
// 然后枚举每一行,就变成 LC84 这题了
type pair struct{ h, i int }
for _, hs := range heights {
left := make([]int, m)
stack := []pair{{-1, -1}}
for j, h := range hs {
for {
if top := stack[len(stack)-1]; top.h < h {
left[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
right := make([]int, m)
stack = []pair{{-1, m}}
for j := m - 1; j >= 0; j-- {
h := hs[j]
for {
if top := stack[len(stack)-1]; top.h < h {
right[j] = top.i
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
for j, h := range hs {
if area := (right[j] - left[j] - 1) * h; area > ans {
ans = area
}
}
}
return
}
// 全 1 矩形个数
// LC1504 https://leetcode-cn.com/problems/count-submatrices-with-all-ones/
// 参考 https://leetcode.com/problems/count-submatrices-with-all-ones/discuss/720265/Java-Detailed-Explanation-From-O(MNM)-to-O(MN)-by-using-Stack
func numSubmat(mat [][]int) (ans int) {
m := len(mat[0])
heights := make([]int, m)
for _, row := range mat {
sum := make([]int, m)
type pair struct{ h, j int }
stack := []pair{{-1, -1}}
for j, v := range row {
if v == 0 {
heights[j] = 0
} else {
heights[j]++
}
h := heights[j]
for {
if top := stack[len(stack)-1]; top.h < h {
if pre := top.j; pre < 0 {
sum[j] = (j + 1) * h
} else {
sum[j] = sum[pre] + (j-pre)*h
}
ans += sum[j]
break
}
stack = stack[:len(stack)-1]
}
stack = append(stack, pair{h, j})
}
}
return
}
// 字典序最小的无重复字符的子序列,包含原串所有字符
// LC316 https://leetcode.cn/problems/remove-duplicate-letters/
// https://atcoder.jp/contests/abc299/tasks/abc299_g
// EXTRA: 重复个数不超过 limit https://leetcode.cn/contest/tianchi2022/problems/ev2bru/
func removeDuplicateLetters(s string) string {
left := ['z' + 1]int{}
for _, c := range s {
left[c]++
}
st := []rune{}
inSt := ['z' + 1]bool{}
for _, c := range s {
left[c]--
if inSt[c] {
continue
}
for len(st) > 0 && c < st[len(st)-1] && left[st[len(st)-1]] > 0 {
top := st[len(st)-1]
st = st[:len(st)-1]
inSt[top] = false // top > c,且 top 后面还有,那么可以重新加进来
}
st = append(st, c)
inSt[c] = true
}
return string(st)
}
// 求 a 的最长的子数组,其元素和大于 lowerSum
// 返回任意一个符合要求的子数组的左右端点(闭区间)
// 如果不存在,返回 [-1,-1]
// 讲解:https://leetcode.cn/problems/longest-well-performing-interval/solution/liang-chong-zuo-fa-liang-zhang-tu-miao-d-hysl/
// LC962 https://leetcode.cn/problems/maximum-width-ramp/
// LC1124 https://leetcode.cn/problems/longest-well-performing-interval/
// 有点相关 http://codeforces.com/problemset/problem/1788/E
func longestSubarrayWithLowerSum(a []int, lowerSum int) (int | }
}
l, r := -1, 0
for i := n; i > 0; i-- {
for len(st) > 0 && sum[i]-sum[st[len(st)-1]] > lowerSum {
j := st[len(st)-1]
st = st[:len(st)-1]
if l < 0 || i-j < r-l {
l, r = j, i
}
}
}
r-- // 闭区间
return l, r
}
| , int) {
n := len(a)
sum := make([]int, n+1)
st := []int{0}
for j, v := range a {
j++
sum[j] = sum[j-1] + v
if sum[j] < sum[st[len(st)-1]] {
st = append(st, j)
| conditional_block |
constants.rs | use std::os::raw::{c_int, c_uint};
// Standard return values from Symisc public interfaces
const SXRET_OK: c_int = 0; /* Not an error */
const SXERR_MEM: c_int = -1; /* Out of memory */
const SXERR_IO: c_int = -2; /* IO error */
const SXERR_EMPTY: c_int = -3; /* Empty field */
const SXERR_LOCKED: c_int = -4; /* Locked operation */
const SXERR_ORANGE: c_int = -5; /* Out of range value */
const SXERR_NOTFOUND: c_int = -6; /* Item not found */
const SXERR_LIMIT: c_int = -7; /* Limit reached */
const SXERR_MORE: c_int = -8; /* Need more input */
const SXERR_INVALID: c_int = -9; /* Invalid parameter */
const SXERR_ABORT: c_int = -10; /* User callback request an operation abort */
const SXERR_EXISTS: c_int = -11; /* Item exists */
const SXERR_SYNTAX: c_int = -12; /* Syntax error */
const SXERR_UNKNOWN: c_int = -13; /* Unknown error */
const SXERR_BUSY: c_int = -14; /* Busy operation */
const SXERR_OVERFLOW: c_int = -15; /* Stack or buffer overflow */
const SXERR_WILLBLOCK: c_int = -16; /* Operation will block */
const SXERR_NOTIMPLEMENTED: c_int = -17; /* Operation not implemented */
const SXERR_EOF: c_int = -18; /* End of input */
const SXERR_PERM: c_int = -19; /* Permission error */
const SXERR_NOOP: c_int = -20; /* No-op */
const SXERR_FORMAT: c_int = -21; /* Invalid format */
const SXERR_NEXT: c_int = -22; /* Not an error */
const SXERR_OS: c_int = -23; /* System call return an error */
const SXERR_CORRUPT: c_int = -24; /* Corrupted pointer */
const SXERR_CONTINUE: c_int = -25; /* Not an error: Operation in progress */
const SXERR_NOMATCH: c_int = -26; /* No match */
const SXERR_RESET: c_int = -27; /* Operation reset */
const SXERR_DONE: c_int = -28; /* Not an error */
const SXERR_SHORT: c_int = -29; /* Buffer too short */
const SXERR_PATH: c_int = -30; /* Path error */
const SXERR_TIMEOUT: c_int = -31; /* Timeout */
const SXERR_BIG: c_int = -32; /* Too big for processing */
const SXERR_RETRY: c_int = -33; /* Retry your call */
const SXERR_IGNORE: c_int = -63; /* Ignore */
// Standard UnQLite return values
/// Successful result
pub const UNQLITE_OK: c_int = SXRET_OK;
// Beginning of error codes
/// Out of memory
pub const UNQLITE_NOMEM: c_int = SXERR_MEM;
/// Another thread have released this instance
pub const UNQLITE_ABORT: c_int = SXERR_ABORT;
/// IO error
pub const UNQLITE_IOERR: c_int = SXERR_IO;
/// Corrupt pointer
pub const UNQLITE_CORRUPT: c_int = SXERR_CORRUPT;
/// Forbidden Operation
pub const UNQLITE_LOCKED: c_int = SXERR_LOCKED;
/// The database file is locked
pub const UNQLITE_BUSY: c_int = SXERR_BUSY;
/// Operation done
pub const UNQLITE_DONE: c_int = SXERR_DONE;
/// Permission error
pub const UNQLITE_PERM: c_int = SXERR_PERM;
/// Method not implemented by the underlying Key/Value storage engine
pub const UNQLITE_NOTIMPLEMENTED: c_int = SXERR_NOTIMPLEMENTED;
/// No such record
pub const UNQLITE_NOTFOUND: c_int = SXERR_NOTFOUND;
/// No such method
pub const UNQLITE_NOOP: c_int = SXERR_NOOP;
/// Invalid parameter
pub const UNQLITE_INVALID: c_int = SXERR_INVALID;
/// End Of Input
pub const UNQLITE_EOF: c_int = SXERR_EOF;
/// Unknown configuration option
pub const UNQLITE_UNKNOWN: c_int = SXERR_UNKNOWN;
/// Database limit reached
pub const UNQLITE_LIMIT: c_int = SXERR_LIMIT;
/// Record exists
pub const UNQLITE_EXISTS: c_int = SXERR_EXISTS;
/// Empty record
pub const UNQLITE_EMPTY: c_int = SXERR_EMPTY;
/// Compilation error
pub const UNQLITE_COMPILE_ERR: c_int = -70;
/// Virtual machine error
pub const UNQLITE_VM_ERR: c_int = -71;
/// Full database unlikely
pub const UNQLITE_FULL: c_int = -73;
/// Unable to open the database file
pub const UNQLITE_CANTOPEN: c_int = -74;
/// Read only Key/Value storage engine
pub const UNQLITE_READ_ONLY: c_int = -75;
/// Locking protocol error
pub const UNQLITE_LOCKERR: c_int = -76;
// end-of-error-codes
pub const UNQLITE_CONFIG_JX9_ERR_LOG: c_int = 1;
pub const UNQLITE_CONFIG_MAX_PAGE_CACHE: c_int = 2;
pub const UNQLITE_CONFIG_ERR_LOG: c_int = 3;
pub const UNQLITE_CONFIG_KV_ENGINE: c_int = 4;
pub const UNQLITE_CONFIG_DISABLE_AUTO_COMMIT: c_int = 5;
pub const UNQLITE_CONFIG_GET_KV_NAME: c_int = 6;
// UnQLite/Jx9 Virtual Machine Configuration Commands.
//
// The following set of constants are the available configuration verbs that can
// be used by the host-application to configure the Jx9 (Via UnQLite) Virtual machine.
// These constants must be passed as the second argument to the [unqlite_vm_config()]
// interface.
// Each options require a variable number of arguments.
// The [unqlite_vm_config()] interface will return UNQLITE_OK on success, any other return
// value indicates failure.
// There are many options but the most importants are: UNQLITE_VM_CONFIG_OUTPUT which install
// a VM output consumer callback, UNQLITE_VM_CONFIG_HTTP_REQUEST which parse and register
// a HTTP request and UNQLITE_VM_CONFIG_ARGV_ENTRY which populate the $argv array.
// For a full discussion on the configuration verbs and their expected parameters, please
// refer to this page:
// http://unqlite.org/c_api/unqlite_vm_config.html
//
/// TWO ARGUMENTS: int (*xConsumer)(const void *, unsigned int, void *), void *
pub const UNQLITE_VM_CONFIG_OUTPUT: c_int = 1;
/// ONE ARGUMENT: const char *zIncludePath
pub const UNQLITE_VM_CONFIG_IMPORT_PATH: c_int = 2;
/// NO ARGUMENTS: Report all run-time errors in the VM output
pub const UNQLITE_VM_CONFIG_ERR_REPORT: c_int = 3;
/// ONE ARGUMENT: int nMaxDepth
pub const UNQLITE_VM_CONFIG_RECURSION_DEPTH: c_int = 4;
/// ONE ARGUMENT: unsigned int *pLength
pub const UNQLITE_VM_OUTPUT_LENGTH: c_int = 5;
/// TWO ARGUMENTS: const char *zName, unqlite_value *pValue
pub const UNQLITE_VM_CONFIG_CREATE_VAR: c_int = 6;
/// TWO ARGUMENTS: const char *zRawRequest, int nRequestLength
pub const UNQLITE_VM_CONFIG_HTTP_REQUEST: c_int = 7;
/// THREE ARGUMENTS: const char *zKey, const char *zValue, int nLen
pub const UNQLITE_VM_CONFIG_SERVER_ATTR: c_int = 8;
/// THREE ARGUMENTS: const char *zKey, const char *zValue, int nLen
pub const UNQLITE_VM_CONFIG_ENV_ATTR: c_int = 9;
/// ONE ARGUMENT: unqlite_value **ppValue
pub const UNQLITE_VM_CONFIG_EXEC_VALUE: c_int = 10;
/// ONE ARGUMENT: const unqlite_io_stream *pStream
pub const UNQLITE_VM_CONFIG_IO_STREAM: c_int = 11;
/// ONE ARGUMENT: const char *zValue
pub const UNQLITE_VM_CONFIG_ARGV_ENTRY: c_int = 12;
/// TWO ARGUMENTS: const void **ppOut, unsigned int *pOutputLen
pub const UNQLITE_VM_CONFIG_EXTRACT_OUTPUT: c_int = 13;
// Storage engine configuration commands.
//
// The following set of constants are the available configuration verbs that can
// be used by the host-application to configure the underlying storage engine
// (i.e Hash, B+tree, R+tree).
//
// These constants must be passed as the first argument to [unqlite_kv_config()].
// Each options require a variable number of arguments.
// The [unqlite_kv_config()] interface will return UNQLITE_OK on success, any other return
// value indicates failure.
// For a full discussion on the configuration verbs and their expected parameters, please
// refer to this page:
// http://unqlite.org/c_api/unqlite_kv_config.html
//
/// ONE ARGUMENT: unsigned int (*xHash)(const void *,unsigned int)
pub const UNQLITE_KV_CONFIG_HASH_FUNC: c_int = 1;
/// ONE ARGUMENT: int (*xCmp)(const void *,const void *,unsigned int)
pub const UNQLITE_KV_CONFIG_CMP_FUNC: c_int = 2;
// Global Library Configuration Commands.
//
// The following set of constants are the available configuration verbs that can
// be used by the host-application to configure the whole library.
// These constants must be passed as the first argument to [unqlite_lib_config()].
//
// Each options require a variable number of arguments.
// The [unqlite_lib_config()] interface will return UNQLITE_OK on success, any other return
// value indicates failure.
// Notes:
// The default configuration is recommended for most applications and so the call to
// [unqlite_lib_config()] is usually not necessary. It is provided to support rare
// applications with unusual needs.
// The [unqlite_lib_config()] interface is not threadsafe. The application must insure that
// no other [unqlite_*()] interfaces are invoked by other threads while [unqlite_lib_config()]
// is running. Furthermore, [unqlite_lib_config()] may only be invoked prior to library
// initialization using [unqlite_lib_init()] or [unqlite_init()] or after shutdown
// by [unqlite_lib_shutdown()]. If [unqlite_lib_config()] is called after [unqlite_lib_init()]
// or [unqlite_init()] and before [unqlite_lib_shutdown()] then it will return UNQLITE_LOCKED.
// For a full discussion on the configuration verbs and their expected parameters, please
// refer to this page:
// http://unqlite.org/c_api/unqlite_lib.html
//
/// ONE ARGUMENT: const SyMemMethods *pMemMethods
pub const UNQLITE_LIB_CONFIG_USER_MALLOC: c_int = 1;
/// TWO ARGUMENTS: int (*xMemError)(void *), void *pUserData
pub const UNQLITE_LIB_CONFIG_MEM_ERR_CALLBACK: c_int = 2;
/// ONE ARGUMENT: const SyMutexMethods *pMutexMethods
pub const UNQLITE_LIB_CONFIG_USER_MUTEX: c_int = 3;
/// NO ARGUMENTS
pub const UNQLITE_LIB_CONFIG_THREAD_LEVEL_SINGLE: c_int = 4;
/// NO ARGUMENTS
pub const UNQLITE_LIB_CONFIG_THREAD_LEVEL_MULTI: c_int = 5;
/// ONE ARGUMENT: const unqlite_vfs *pVfs
pub const UNQLITE_LIB_CONFIG_VFS: c_int = 6;
/// ONE ARGUMENT: unqlite_kv_methods *pStorage
pub const UNQLITE_LIB_CONFIG_STORAGE_ENGINE: c_int = 7;
/// ONE ARGUMENT: int iPageSize
pub const UNQLITE_LIB_CONFIG_PAGE_SIZE: c_int = 8;
// These bit values are intended for use in the 3rd parameter to the [unqlite_open()] interface
// and in the 4th parameter to the xOpen method of the [unqlite_vfs] object.
//
/// Read only mode. Ok for [unqlite_open]
pub const UNQLITE_OPEN_READONLY: c_uint = 0x00000001;
/// Ok for [unqlite_open]
pub const UNQLITE_OPEN_READWRITE: c_uint = 0x00000002;
/// Ok for [unqlite_open]
pub const UNQLITE_OPEN_CREATE: c_uint = 0x00000004;
/// VFS only
pub const UNQLITE_OPEN_EXCLUSIVE: c_uint = 0x00000008;
/// VFS only
pub const UNQLITE_OPEN_TEMP_DB: c_uint = 0x00000010;
/// Ok for [unqlite_open]
pub const UNQLITE_OPEN_NOMUTEX: c_uint = 0x00000020;
/// Omit journaling for this database. Ok for [unqlite_open]
pub const UNQLITE_OPEN_OMIT_JOURNALING: c_uint = 0x00000040;
/// An in memory database. Ok for [unqlite_open]
pub const UNQLITE_OPEN_IN_MEMORY: c_uint = 0x00000080;
/// Obtain a memory view of the whole file. Ok for [unqlite_open]
pub const UNQLITE_OPEN_MMAP: c_uint = 0x00000100;
// Synchronization Type Flags
//
// When UnQLite invokes the xSync() method of an [unqlite_io_methods] object it uses
// a combination of these integer values as the second argument.
//
// When the UNQLITE_SYNC_DATAONLY flag is used, it means that the sync operation only
// needs to flush data to mass storage.: c_int = Inode information need not be flushed.
// If the lower four bits of the flag equal UNQLITE_SYNC_NORMAL, that means to use normal
// fsync() semantics. If the lower four bits equal UNQLITE_SYNC_FULL, that means to use
// Mac OS X style fullsync instead of fsync().
// | pub const UNQLITE_SYNC_NORMAL: c_int = 0x00002;
pub const UNQLITE_SYNC_FULL: c_int = 0x00003;
pub const UNQLITE_SYNC_DATAONLY: c_int = 0x00010;
// File Locking Levels
//
// UnQLite uses one of these integer values as the second
// argument to calls it makes to the xLock() and xUnlock() methods
// of an [unqlite_io_methods] object.
//
pub const UNQLITE_LOCK_NONE: c_int = 0;
pub const UNQLITE_LOCK_SHARED: c_int = 1;
pub const UNQLITE_LOCK_RESERVED: c_int = 2;
pub const UNQLITE_LOCK_PENDING: c_int = 3;
pub const UNQLITE_LOCK_EXCLUSIVE: c_int = 4;
// Flags for the xAccess VFS method
//
// These integer constants can be used as the third parameter to
// the xAccess method of an [unqlite_vfs] object. They determine
// what kind of permissions the xAccess method is looking for.
// With UNQLITE_ACCESS_EXISTS, the xAccess method
// simply checks whether the file exists.
// With UNQLITE_ACCESS_READWRITE, the xAccess method
// checks whether the named directory is both readable and writable
// (in other words, if files can be added, removed, and renamed within
// the directory).
// The UNQLITE_ACCESS_READWRITE constant is currently used only by the
// [temp_store_directory pragma], though this could change in a future
// release of UnQLite.
// With UNQLITE_ACCESS_READ, the xAccess method
// checks whether the file is readable. The UNQLITE_ACCESS_READ constant is
// currently unused, though it might be used in a future release of
// UnQLite.
//
pub const UNQLITE_ACCESS_EXISTS: c_int = 0;
pub const UNQLITE_ACCESS_READWRITE: c_int = 1;
pub const UNQLITE_ACCESS_READ: c_int = 2;
// Possible seek positions.
//
pub const UNQLITE_CURSOR_MATCH_EXACT: c_int = 1;
pub const UNQLITE_CURSOR_MATCH_LE: c_int = 2;
pub const UNQLITE_CURSOR_MATCH_GE: c_int = 3;
// UnQLite journal file suffix.
//
// #ifndef UNQLITE_JOURNAL_FILE_SUFFIX
pub const UNQLITE_JOURNAL_FILE_SUFFIX: &'static str = "_unqlite_journal";
// #endif
//
// Call Context - Error Message Serverity Level.
//
// The following constans are the allowed severity level that can
// passed as the second argument to the [unqlite_context_throw_error()] or
// [unqlite_context_throw_error_format()] interfaces.
// Refer to the official documentation for additional information.
//
/// Call context error such as unexpected number of arguments, invalid types and so on.
pub const UNQLITE_CTX_ERR: c_int = 1;
/// Call context Warning
pub const UNQLITE_CTX_WARNING: c_int = 2;
/// Call context Notice
pub const UNQLITE_CTX_NOTICE: c_int = 3; | random_line_split | |
segment.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: segment.proto
/*
Package segment is a generated protocol buffer package.
It is generated from these files:
segment.proto
It has these top-level messages:
Segment
*/
package segment
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
|
// road classes are based on OpenStreetMap usage of the "highway" tag.
// each value of the enumeration corresponds to one value of the tag,
// except for ClassServiceOther, which is used for service and other roads.
type Segment_RoadClass int32
const (
Segment_ClassMotorway Segment_RoadClass = 0
Segment_ClassTrunk Segment_RoadClass = 1
Segment_ClassPrimary Segment_RoadClass = 2
Segment_ClassSecondary Segment_RoadClass = 3
Segment_ClassTertiary Segment_RoadClass = 4
Segment_ClassUnclassified Segment_RoadClass = 5
Segment_ClassResidential Segment_RoadClass = 6
Segment_ClassServiceOther Segment_RoadClass = 7
)
var Segment_RoadClass_name = map[int32]string{
0: "ClassMotorway",
1: "ClassTrunk",
2: "ClassPrimary",
3: "ClassSecondary",
4: "ClassTertiary",
5: "ClassUnclassified",
6: "ClassResidential",
7: "ClassServiceOther",
}
var Segment_RoadClass_value = map[string]int32{
"ClassMotorway": 0,
"ClassTrunk": 1,
"ClassPrimary": 2,
"ClassSecondary": 3,
"ClassTertiary": 4,
"ClassUnclassified": 5,
"ClassResidential": 6,
"ClassServiceOther": 7,
}
func (x Segment_RoadClass) Enum() *Segment_RoadClass {
p := new(Segment_RoadClass)
*p = x
return p
}
func (x Segment_RoadClass) String() string {
return proto.EnumName(Segment_RoadClass_name, int32(x))
}
func (x *Segment_RoadClass) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_RoadClass_value, data, "Segment_RoadClass")
if err != nil {
return err
}
*x = Segment_RoadClass(value)
return nil
}
func (Segment_RoadClass) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
// form of way describes the physical attributes of the road.
type Segment_FormOfWay int32
const (
// use FowUndefined if you do not know what physical attributes the road
// has.
Segment_FowUndefined Segment_FormOfWay = 0
// use FowMotorway for motorways.
Segment_FowMotorway Segment_FormOfWay = 1
// use FowMultipleCarriageway for multiple carriageway roads. that is, when
// there are separate OSM ways for each direction of travel.
Segment_FowMultipleCarriageway Segment_FormOfWay = 2
// use FowSingleCarriageway for other roads.
Segment_FowSingleCarriageway Segment_FormOfWay = 3
// use FowRoundabout for roundabouts
Segment_FowRoundabout Segment_FormOfWay = 4
// use FowTrafficSquare for roads which enclose an area, but which are not
// roundabouts
Segment_FowTrafficSquare Segment_FormOfWay = 5
// use FowSlipRoad for slip roads, ramps and other links.
Segment_FowSlipRoad Segment_FormOfWay = 6
// use FowOther for roads which do not match any of the above definitions,
// but for which the form of way is known.
Segment_FowOther Segment_FormOfWay = 7
)
var Segment_FormOfWay_name = map[int32]string{
0: "FowUndefined",
1: "FowMotorway",
2: "FowMultipleCarriageway",
3: "FowSingleCarriageway",
4: "FowRoundabout",
5: "FowTrafficSquare",
6: "FowSlipRoad",
7: "FowOther",
}
var Segment_FormOfWay_value = map[string]int32{
"FowUndefined": 0,
"FowMotorway": 1,
"FowMultipleCarriageway": 2,
"FowSingleCarriageway": 3,
"FowRoundabout": 4,
"FowTrafficSquare": 5,
"FowSlipRoad": 6,
"FowOther": 7,
}
func (x Segment_FormOfWay) Enum() *Segment_FormOfWay {
p := new(Segment_FormOfWay)
*p = x
return p
}
func (x Segment_FormOfWay) String() string {
return proto.EnumName(Segment_FormOfWay_name, int32(x))
}
func (x *Segment_FormOfWay) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_FormOfWay_value, data, "Segment_FormOfWay")
if err != nil {
return err
}
*x = Segment_FormOfWay(value)
return nil
}
func (Segment_FormOfWay) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 1} }
type Segment struct {
// a segment is a list of at least two LocationReferences.
//
// all but the last LocationReference must contain a full set of data for
// each field, but the final one should consist of only a reference
// coordinate. any other information on the final LocationReference may be
// ignored.
//
// a segment with only a single LocationReference is invalid and may be
// ignored.
Lrps []*Segment_LocationReference `protobuf:"bytes,1,rep,name=lrps" json:"lrps,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment) Reset() { *m = Segment{} }
func (m *Segment) String() string { return proto.CompactTextString(m) }
func (*Segment) ProtoMessage() {}
func (*Segment) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0} }
func (m *Segment) GetLrps() []*Segment_LocationReference {
if m != nil {
return m.Lrps
}
return nil
}
type Segment_LatLng struct {
// lat & lng in EPSG:4326 multiplied by 10^7 and rounded to the nearest
// integer. this gives a precision of about 1.1cm (7/16ths of an inch)
// worst case at the equator.
Lat *int32 `protobuf:"fixed32,1,opt,name=lat" json:"lat,omitempty"`
Lng *int32 `protobuf:"fixed32,2,opt,name=lng" json:"lng,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LatLng) Reset() { *m = Segment_LatLng{} }
func (m *Segment_LatLng) String() string { return proto.CompactTextString(m) }
func (*Segment_LatLng) ProtoMessage() {}
func (*Segment_LatLng) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
func (m *Segment_LatLng) GetLat() int32 {
if m != nil && m.Lat != nil {
return *m.Lat
}
return 0
}
func (m *Segment_LatLng) GetLng() int32 {
if m != nil && m.Lng != nil {
return *m.Lng
}
return 0
}
// a segment consists of multiple LocationReferences, each of which describes
// the road at a particular reference coordinate, or properties of the road
// between the current LocationReference and the next.
//
// the first and last LocationReference reference coordinates will usually be at
// "true" intersections, which are intersections where there are multiple
// paths through the intersection. This excludes "false" intersections where
// two roads cross, but there are not multiple paths, such as overpasses,
// bridges, changes of road name or properties.
//
// ___
// | / \ _4---5--
// -1-----2 \ __/ |
// | 3__/ |
//
// in the example above, the Segment consists of 5 LocationReferences,
// numbered 1-5. locations 1 & 5 are at true intersections with other roads
// and 2, 3 & 4 are intermediate LocationReferences inserted due to the length
// of the road.
//
// Occasionally, a LocationReference can be inserted along a road (i.e., not at
// a true intersection) to break long road segments into multiple OSMLR
// segments.
//
type Segment_LocationReference struct {
// the reference coordinate.
Coord *Segment_LatLng `protobuf:"bytes,1,opt,name=coord" json:"coord,omitempty"`
// bearing in degrees clockwise from true north between 0 and 359 - will
// generally fit in a couple of bytes varint.
//
// the bearing should be calculated toward a point 20m along the road from
// the reference coordinate towards the next LocationReference. if this is
// the final LocationReference, then omit the bearing.
//
// each LocationReference, of which there may be several in this Segment,
// except for the last must have a bearing calculated from the reference
// coordinate of this LocationReference.
Bear *uint32 `protobuf:"varint,2,opt,name=bear" json:"bear,omitempty"`
// road class at the reference coordinate.
StartFrc *Segment_RoadClass `protobuf:"varint,3,opt,name=start_frc,json=startFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"start_frc,omitempty"`
// form of way at the reference coordinate.
StartFow *Segment_FormOfWay `protobuf:"varint,4,opt,name=start_fow,json=startFow,enum=opentraffic.osmlr.Segment_FormOfWay" json:"start_fow,omitempty"`
// lowest road class (most important road) between the start coordinate
// and the next LocationReference.
LeastFrc *Segment_RoadClass `protobuf:"varint,5,opt,name=least_frc,json=leastFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"least_frc,omitempty"`
// length in meters, rounded to the nearest meter. the maximum allowed
// length is 15km, but most segments will be much shorter, so a varint
// representation makes sense.
//
// if the length between successive LocationReferences is more than 15km
// then you MUST insert an intermediate LocationReference.
Length *uint32 `protobuf:"varint,6,opt,name=length" json:"length,omitempty"`
// Is this LRP at a node/intersection (true) or along a road (false)?
// This hint can be useful wen associating OSMLR to routing graphs
AtNode *bool `protobuf:"varint,7,opt,name=at_node,json=atNode" json:"at_node,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LocationReference) Reset() { *m = Segment_LocationReference{} }
func (m *Segment_LocationReference) String() string { return proto.CompactTextString(m) }
func (*Segment_LocationReference) ProtoMessage() {}
func (*Segment_LocationReference) Descriptor() ([]byte, []int) {
return fileDescriptorSegment, []int{0, 1}
}
func (m *Segment_LocationReference) GetCoord() *Segment_LatLng {
if m != nil {
return m.Coord
}
return nil
}
func (m *Segment_LocationReference) GetBear() uint32 {
if m != nil && m.Bear != nil {
return *m.Bear
}
return 0
}
func (m *Segment_LocationReference) GetStartFrc() Segment_RoadClass {
if m != nil && m.StartFrc != nil {
return *m.StartFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetStartFow() Segment_FormOfWay {
if m != nil && m.StartFow != nil {
return *m.StartFow
}
return Segment_FowUndefined
}
func (m *Segment_LocationReference) GetLeastFrc() Segment_RoadClass {
if m != nil && m.LeastFrc != nil {
return *m.LeastFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetLength() uint32 {
if m != nil && m.Length != nil {
return *m.Length
}
return 0
}
func (m *Segment_LocationReference) GetAtNode() bool {
if m != nil && m.AtNode != nil {
return *m.AtNode
}
return false
}
func init() {
proto.RegisterType((*Segment)(nil), "opentraffic.osmlr.Segment")
proto.RegisterType((*Segment_LatLng)(nil), "opentraffic.osmlr.Segment.LatLng")
proto.RegisterType((*Segment_LocationReference)(nil), "opentraffic.osmlr.Segment.LocationReference")
proto.RegisterEnum("opentraffic.osmlr.Segment_RoadClass", Segment_RoadClass_name, Segment_RoadClass_value)
proto.RegisterEnum("opentraffic.osmlr.Segment_FormOfWay", Segment_FormOfWay_name, Segment_FormOfWay_value)
}
func init() { proto.RegisterFile("segment.proto", fileDescriptorSegment) }
var fileDescriptorSegment = []byte{
// 529 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xcb, 0x6e, 0xdb, 0x3c,
0x10, 0x85, 0x23, 0x5f, 0xe4, 0x64, 0x72, 0xa3, 0x89, 0xfc, 0xf9, 0x85, 0xac, 0xdc, 0xa0, 0x0b,
0x2f, 0x12, 0x1b, 0xc8, 0xa6, 0xdb, 0xb6, 0x01, 0xb4, 0x4a, 0x9b, 0x42, 0x4a, 0x50, 0xa0, 0x9b,
0x80, 0x16, 0x47, 0x32, 0x51, 0x9a, 0xa3, 0x52, 0x54, 0x04, 0x3f, 0x53, 0xbb, 0xeb, 0xdb, 0xf4,
0x69, 0x0a, 0xd1, 0xaa, 0xd1, 0x22, 0x40, 0x8a, 0xae, 0x34, 0x3c, 0x3c, 0x73, 0xe6, 0x23, 0x05,
0xc2, 0x61, 0x85, 0xc5, 0x0a, 0x8d, 0x9b, 0x95, 0x96, 0x1c, 0xf1, 0x31, 0x95, 0x68, 0x9c, 0x15,
0x79, 0xae, 0xb2, 0x19, 0x55, 0x2b, 0x6d, 0xcf, 0xbf, 0x87, 0x30, 0x4a, 0x37, 0x26, 0xfe, 0x1a,
0x06, 0xda, 0x96, 0x55, 0x14, 0x4c, 0xfa, 0xd3, 0xfd, 0xab, 0x8b, 0xd9, 0x13, 0xf7, 0xac, 0x73,
0xce, 0x6e, 0x28, 0x13, 0x4e, 0x91, 0x49, 0x30, 0x47, 0x8b, 0x26, 0xc3, 0xc4, 0x77, 0x9e, 0x5d,
0x40, 0x78, 0x23, 0xdc, 0x8d, 0x29, 0x38, 0x83, 0xbe, 0x16, 0x2e, 0x0a, 0x26, 0xc1, 0xf4, 0x38,
0x69, 0x4b, 0xaf, 0x98, 0x22, 0xea, 0x75, 0x8a, 0x29, 0xce, 0x7e, 0xf4, 0x60, 0xfc, 0x24, 0x89,
0xbf, 0x82, 0x61, 0x46, 0x64, 0xa5, 0xef, 0xdd, 0xbf, 0x7a, 0xf1, 0x1c, 0x86, 0x9f, 0x95, 0x6c,
0xfc, 0x9c, 0xc3, 0x60, 0x81, 0xc2, 0xfa, 0x09, 0x87, 0x89, 0xaf, 0xf9, 0x1b, 0xd8, 0xab, 0x9c,
0xb0, 0xee, 0x21, 0xb7, 0x59, 0xd4, 0x9f, 0x04, 0xd3, 0xa3, 0xab, 0x97, 0xcf, 0x04, 0x26, 0x24,
0xe4, 0xb5, 0x16, 0x55, 0x95, 0xec, 0xfa, 0xb6, 0xd8, 0x66, 0xbf, 0x45, 0x50, 0x13, 0x0d, 0xfe,
0x1a, 0x11, 0x93, 0x5d, 0xdd, 0xe6, 0x1f, 0xc5, 0xfa, 0x57, 0x04, 0x35, 0x6d, 0x84, 0x46, 0x51,
0x6d, 0x28, 0x86, 0xff, 0x42, 0xe1, 0xdb, 0x5a, 0x8a, 0x53, 0x08, 0x35, 0x9a, 0xc2, 0x2d, 0xa3,
0xd0, 0x1f, 0xaf, 0x5b, 0xf1, 0xff, 0x61, 0x24, 0xdc, 0x83, 0x21, 0x89, 0xd1, 0x68, 0x12, 0x4c,
0x77, 0x93, 0x50, 0xb8, 0xf7, 0x24, 0xf1, 0xfc, 0x6b, 0x00, 0x7b, 0xdb, 0x20, 0x3e, 0x86, 0x43,
0x5f, 0xbc, 0x23, 0x47, 0xb6, 0x11, 0x6b, 0xb6, 0xc3, 0x8f, 0x00, 0xbc, 0x74, 0x67, 0x6b, 0xf3,
0x99, 0x05, 0x9c, 0xc1, 0x81, 0x5f, 0x7f, 0xb0, 0x6a, 0x25, 0xec, 0x9a, 0xf5, 0x38, 0x87, 0x23,
0xaf, 0xa4, 0x98, 0x91, 0x91, 0xad, 0xd6, 0xdf, 0x06, 0xdd, 0xa1, 0x75, 0xaa, 0x95, 0x06, 0xfc,
0x3f, 0x18, 0x7b, 0xe9, 0xde, 0x64, 0xed, 0x47, 0xe5, 0x0a, 0x25, 0x1b, 0xf2, 0x13, 0x60, 0x9b,
0x43, 0x60, 0xa5, 0x24, 0x1a, 0xa7, 0x84, 0x66, 0xe1, 0xd6, 0x9c, 0xa2, 0x7d, 0x54, 0x19, 0xde,
0xba, 0x25, 0x5a, 0x36, 0x3a, 0xff, 0x16, 0xc0, 0xde, 0xf6, 0xe6, 0x5a, 0x94, 0x98, 0x9a, 0x7b,
0x23, 0x31, 0x57, 0x06, 0x25, 0xdb, 0xe1, 0xc7, 0xb0, 0x1f, 0x53, 0xb3, 0xa5, 0x0f, 0xf8, 0x19,
0x9c, 0xb6, 0x42, 0xad, 0x9d, 0x2a, 0x35, 0x5e, 0x0b, 0x6b, 0x95, 0x28, 0xb0, 0xdd, 0xeb, 0xf1,
0x08, 0x4e, 0x62, 0x6a, 0x52, 0x65, 0x8a, 0x3f, 0x77, 0x3c, 0x7d, 0x4c, 0x4d, 0x42, 0xb5, 0x91,
0x62, 0x41, 0xb5, 0x63, 0x83, 0x16, 0x33, 0xa6, 0xe6, 0x6e, 0xf3, 0x23, 0xd2, 0x2f, 0xb5, 0xb0,
0xc8, 0x86, 0xdd, 0xbc, 0x54, 0xab, 0xb2, 0xbd, 0x43, 0x16, 0xf2, 0x03, 0xd8, 0x8d, 0xa9, 0xe9,
0x70, 0xdf, 0xce, 0x3f, 0x5d, 0x16, 0xca, 0x2d, 0xeb, 0xc5, 0x2c, 0xa3, 0xd5, 0xbc, 0x36, 0xca,
0xa1, 0xbc, 0x94, 0x56, 0x3d, 0xa2, 0xad, 0xe6, 0x8f, 0x42, 0x2f, 0x85, 0xd6, 0x62, 0x5e, 0x2e,
0xe6, 0xdd, 0xfb, 0xfb, 0x19, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x6f, 0x68, 0x18, 0x89, 0x03, 0x00,
0x00,
} | // This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package | random_line_split |
segment.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: segment.proto
/*
Package segment is a generated protocol buffer package.
It is generated from these files:
segment.proto
It has these top-level messages:
Segment
*/
package segment
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// road classes are based on OpenStreetMap usage of the "highway" tag.
// each value of the enumeration corresponds to one value of the tag,
// except for ClassServiceOther, which is used for service and other roads.
type Segment_RoadClass int32
const (
Segment_ClassMotorway Segment_RoadClass = 0
Segment_ClassTrunk Segment_RoadClass = 1
Segment_ClassPrimary Segment_RoadClass = 2
Segment_ClassSecondary Segment_RoadClass = 3
Segment_ClassTertiary Segment_RoadClass = 4
Segment_ClassUnclassified Segment_RoadClass = 5
Segment_ClassResidential Segment_RoadClass = 6
Segment_ClassServiceOther Segment_RoadClass = 7
)
var Segment_RoadClass_name = map[int32]string{
0: "ClassMotorway",
1: "ClassTrunk",
2: "ClassPrimary",
3: "ClassSecondary",
4: "ClassTertiary",
5: "ClassUnclassified",
6: "ClassResidential",
7: "ClassServiceOther",
}
var Segment_RoadClass_value = map[string]int32{
"ClassMotorway": 0,
"ClassTrunk": 1,
"ClassPrimary": 2,
"ClassSecondary": 3,
"ClassTertiary": 4,
"ClassUnclassified": 5,
"ClassResidential": 6,
"ClassServiceOther": 7,
}
func (x Segment_RoadClass) Enum() *Segment_RoadClass {
p := new(Segment_RoadClass)
*p = x
return p
}
func (x Segment_RoadClass) String() string {
return proto.EnumName(Segment_RoadClass_name, int32(x))
}
func (x *Segment_RoadClass) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_RoadClass_value, data, "Segment_RoadClass")
if err != nil {
return err
}
*x = Segment_RoadClass(value)
return nil
}
func (Segment_RoadClass) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
// form of way describes the physical attributes of the road.
type Segment_FormOfWay int32
const (
// use FowUndefined if you do not know what physical attributes the road
// has.
Segment_FowUndefined Segment_FormOfWay = 0
// use FowMotorway for motorways.
Segment_FowMotorway Segment_FormOfWay = 1
// use FowMultipleCarriageway for multiple carriageway roads. that is, when
// there are separate OSM ways for each direction of travel.
Segment_FowMultipleCarriageway Segment_FormOfWay = 2
// use FowSingleCarriageway for other roads.
Segment_FowSingleCarriageway Segment_FormOfWay = 3
// use FowRoundabout for roundabouts
Segment_FowRoundabout Segment_FormOfWay = 4
// use FowTrafficSquare for roads which enclose an area, but which are not
// roundabouts
Segment_FowTrafficSquare Segment_FormOfWay = 5
// use FowSlipRoad for slip roads, ramps and other links.
Segment_FowSlipRoad Segment_FormOfWay = 6
// use FowOther for roads which do not match any of the above definitions,
// but for which the form of way is known.
Segment_FowOther Segment_FormOfWay = 7
)
var Segment_FormOfWay_name = map[int32]string{
0: "FowUndefined",
1: "FowMotorway",
2: "FowMultipleCarriageway",
3: "FowSingleCarriageway",
4: "FowRoundabout",
5: "FowTrafficSquare",
6: "FowSlipRoad",
7: "FowOther",
}
var Segment_FormOfWay_value = map[string]int32{
"FowUndefined": 0,
"FowMotorway": 1,
"FowMultipleCarriageway": 2,
"FowSingleCarriageway": 3,
"FowRoundabout": 4,
"FowTrafficSquare": 5,
"FowSlipRoad": 6,
"FowOther": 7,
}
func (x Segment_FormOfWay) Enum() *Segment_FormOfWay {
p := new(Segment_FormOfWay)
*p = x
return p
}
func (x Segment_FormOfWay) String() string {
return proto.EnumName(Segment_FormOfWay_name, int32(x))
}
func (x *Segment_FormOfWay) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_FormOfWay_value, data, "Segment_FormOfWay")
if err != nil {
return err
}
*x = Segment_FormOfWay(value)
return nil
}
func (Segment_FormOfWay) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 1} }
type Segment struct {
// a segment is a list of at least two LocationReferences.
//
// all but the last LocationReference must contain a full set of data for
// each field, but the final one should consist of only a reference
// coordinate. any other information on the final LocationReference may be
// ignored.
//
// a segment with only a single LocationReference is invalid and may be
// ignored.
Lrps []*Segment_LocationReference `protobuf:"bytes,1,rep,name=lrps" json:"lrps,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment) Reset() { *m = Segment{} }
func (m *Segment) String() string { return proto.CompactTextString(m) }
func (*Segment) ProtoMessage() |
func (*Segment) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0} }
func (m *Segment) GetLrps() []*Segment_LocationReference {
if m != nil {
return m.Lrps
}
return nil
}
type Segment_LatLng struct {
// lat & lng in EPSG:4326 multiplied by 10^7 and rounded to the nearest
// integer. this gives a precision of about 1.1cm (7/16ths of an inch)
// worst case at the equator.
Lat *int32 `protobuf:"fixed32,1,opt,name=lat" json:"lat,omitempty"`
Lng *int32 `protobuf:"fixed32,2,opt,name=lng" json:"lng,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LatLng) Reset() { *m = Segment_LatLng{} }
func (m *Segment_LatLng) String() string { return proto.CompactTextString(m) }
func (*Segment_LatLng) ProtoMessage() {}
func (*Segment_LatLng) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
func (m *Segment_LatLng) GetLat() int32 {
if m != nil && m.Lat != nil {
return *m.Lat
}
return 0
}
func (m *Segment_LatLng) GetLng() int32 {
if m != nil && m.Lng != nil {
return *m.Lng
}
return 0
}
// a segment consists of multiple LocationReferences, each of which describes
// the road at a particular reference coordinate, or properties of the road
// between the current LocationReference and the next.
//
// the first and last LocationReference reference coordinates will usually be at
// "true" intersections, which are intersections where there are multiple
// paths through the intersection. This excludes "false" intersections where
// two roads cross, but there are not multiple paths, such as overpasses,
// bridges, changes of road name or properties.
//
// ___
// | / \ _4---5--
// -1-----2 \ __/ |
// | 3__/ |
//
// in the example above, the Segment consists of 5 LocationReferences,
// numbered 1-5. locations 1 & 5 are at true intersections with other roads
// and 2, 3 & 4 are intermediate LocationReferences inserted due to the length
// of the road.
//
// Occasionally, a LocationReference can be inserted along a road (i.e., not at
// a true intersection) to break long road segments into multiple OSMLR
// segments.
//
type Segment_LocationReference struct {
// the reference coordinate.
Coord *Segment_LatLng `protobuf:"bytes,1,opt,name=coord" json:"coord,omitempty"`
// bearing in degrees clockwise from true north between 0 and 359 - will
// generally fit in a couple of bytes varint.
//
// the bearing should be calculated toward a point 20m along the road from
// the reference coordinate towards the next LocationReference. if this is
// the final LocationReference, then omit the bearing.
//
// each LocationReference, of which there may be several in this Segment,
// except for the last must have a bearing calculated from the reference
// coordinate of this LocationReference.
Bear *uint32 `protobuf:"varint,2,opt,name=bear" json:"bear,omitempty"`
// road class at the reference coordinate.
StartFrc *Segment_RoadClass `protobuf:"varint,3,opt,name=start_frc,json=startFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"start_frc,omitempty"`
// form of way at the reference coordinate.
StartFow *Segment_FormOfWay `protobuf:"varint,4,opt,name=start_fow,json=startFow,enum=opentraffic.osmlr.Segment_FormOfWay" json:"start_fow,omitempty"`
// lowest road class (most important road) between the start coordinate
// and the next LocationReference.
LeastFrc *Segment_RoadClass `protobuf:"varint,5,opt,name=least_frc,json=leastFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"least_frc,omitempty"`
// length in meters, rounded to the nearest meter. the maximum allowed
// length is 15km, but most segments will be much shorter, so a varint
// representation makes sense.
//
// if the length between successive LocationReferences is more than 15km
// then you MUST insert an intermediate LocationReference.
Length *uint32 `protobuf:"varint,6,opt,name=length" json:"length,omitempty"`
// Is this LRP at a node/intersection (true) or along a road (false)?
// This hint can be useful wen associating OSMLR to routing graphs
AtNode *bool `protobuf:"varint,7,opt,name=at_node,json=atNode" json:"at_node,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LocationReference) Reset() { *m = Segment_LocationReference{} }
func (m *Segment_LocationReference) String() string { return proto.CompactTextString(m) }
func (*Segment_LocationReference) ProtoMessage() {}
func (*Segment_LocationReference) Descriptor() ([]byte, []int) {
return fileDescriptorSegment, []int{0, 1}
}
func (m *Segment_LocationReference) GetCoord() *Segment_LatLng {
if m != nil {
return m.Coord
}
return nil
}
func (m *Segment_LocationReference) GetBear() uint32 {
if m != nil && m.Bear != nil {
return *m.Bear
}
return 0
}
func (m *Segment_LocationReference) GetStartFrc() Segment_RoadClass {
if m != nil && m.StartFrc != nil {
return *m.StartFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetStartFow() Segment_FormOfWay {
if m != nil && m.StartFow != nil {
return *m.StartFow
}
return Segment_FowUndefined
}
func (m *Segment_LocationReference) GetLeastFrc() Segment_RoadClass {
if m != nil && m.LeastFrc != nil {
return *m.LeastFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetLength() uint32 {
if m != nil && m.Length != nil {
return *m.Length
}
return 0
}
func (m *Segment_LocationReference) GetAtNode() bool {
if m != nil && m.AtNode != nil {
return *m.AtNode
}
return false
}
func init() {
proto.RegisterType((*Segment)(nil), "opentraffic.osmlr.Segment")
proto.RegisterType((*Segment_LatLng)(nil), "opentraffic.osmlr.Segment.LatLng")
proto.RegisterType((*Segment_LocationReference)(nil), "opentraffic.osmlr.Segment.LocationReference")
proto.RegisterEnum("opentraffic.osmlr.Segment_RoadClass", Segment_RoadClass_name, Segment_RoadClass_value)
proto.RegisterEnum("opentraffic.osmlr.Segment_FormOfWay", Segment_FormOfWay_name, Segment_FormOfWay_value)
}
func init() { proto.RegisterFile("segment.proto", fileDescriptorSegment) }
var fileDescriptorSegment = []byte{
// 529 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xcb, 0x6e, 0xdb, 0x3c,
0x10, 0x85, 0x23, 0x5f, 0xe4, 0x64, 0x72, 0xa3, 0x89, 0xfc, 0xf9, 0x85, 0xac, 0xdc, 0xa0, 0x0b,
0x2f, 0x12, 0x1b, 0xc8, 0xa6, 0xdb, 0xb6, 0x01, 0xb4, 0x4a, 0x9b, 0x42, 0x4a, 0x50, 0xa0, 0x9b,
0x80, 0x16, 0x47, 0x32, 0x51, 0x9a, 0xa3, 0x52, 0x54, 0x04, 0x3f, 0x53, 0xbb, 0xeb, 0xdb, 0xf4,
0x69, 0x0a, 0xd1, 0xaa, 0xd1, 0x22, 0x40, 0x8a, 0xae, 0x34, 0x3c, 0x3c, 0x73, 0xe6, 0x23, 0x05,
0xc2, 0x61, 0x85, 0xc5, 0x0a, 0x8d, 0x9b, 0x95, 0x96, 0x1c, 0xf1, 0x31, 0x95, 0x68, 0x9c, 0x15,
0x79, 0xae, 0xb2, 0x19, 0x55, 0x2b, 0x6d, 0xcf, 0xbf, 0x87, 0x30, 0x4a, 0x37, 0x26, 0xfe, 0x1a,
0x06, 0xda, 0x96, 0x55, 0x14, 0x4c, 0xfa, 0xd3, 0xfd, 0xab, 0x8b, 0xd9, 0x13, 0xf7, 0xac, 0x73,
0xce, 0x6e, 0x28, 0x13, 0x4e, 0x91, 0x49, 0x30, 0x47, 0x8b, 0x26, 0xc3, 0xc4, 0x77, 0x9e, 0x5d,
0x40, 0x78, 0x23, 0xdc, 0x8d, 0x29, 0x38, 0x83, 0xbe, 0x16, 0x2e, 0x0a, 0x26, 0xc1, 0xf4, 0x38,
0x69, 0x4b, 0xaf, 0x98, 0x22, 0xea, 0x75, 0x8a, 0x29, 0xce, 0x7e, 0xf4, 0x60, 0xfc, 0x24, 0x89,
0xbf, 0x82, 0x61, 0x46, 0x64, 0xa5, 0xef, 0xdd, 0xbf, 0x7a, 0xf1, 0x1c, 0x86, 0x9f, 0x95, 0x6c,
0xfc, 0x9c, 0xc3, 0x60, 0x81, 0xc2, 0xfa, 0x09, 0x87, 0x89, 0xaf, 0xf9, 0x1b, 0xd8, 0xab, 0x9c,
0xb0, 0xee, 0x21, 0xb7, 0x59, 0xd4, 0x9f, 0x04, 0xd3, 0xa3, 0xab, 0x97, 0xcf, 0x04, 0x26, 0x24,
0xe4, 0xb5, 0x16, 0x55, 0x95, 0xec, 0xfa, 0xb6, 0xd8, 0x66, 0xbf, 0x45, 0x50, 0x13, 0x0d, 0xfe,
0x1a, 0x11, 0x93, 0x5d, 0xdd, 0xe6, 0x1f, 0xc5, 0xfa, 0x57, 0x04, 0x35, 0x6d, 0x84, 0x46, 0x51,
0x6d, 0x28, 0x86, 0xff, 0x42, 0xe1, 0xdb, 0x5a, 0x8a, 0x53, 0x08, 0x35, 0x9a, 0xc2, 0x2d, 0xa3,
0xd0, 0x1f, 0xaf, 0x5b, 0xf1, 0xff, 0x61, 0x24, 0xdc, 0x83, 0x21, 0x89, 0xd1, 0x68, 0x12, 0x4c,
0x77, 0x93, 0x50, 0xb8, 0xf7, 0x24, 0xf1, 0xfc, 0x6b, 0x00, 0x7b, 0xdb, 0x20, 0x3e, 0x86, 0x43,
0x5f, 0xbc, 0x23, 0x47, 0xb6, 0x11, 0x6b, 0xb6, 0xc3, 0x8f, 0x00, 0xbc, 0x74, 0x67, 0x6b, 0xf3,
0x99, 0x05, 0x9c, 0xc1, 0x81, 0x5f, 0x7f, 0xb0, 0x6a, 0x25, 0xec, 0x9a, 0xf5, 0x38, 0x87, 0x23,
0xaf, 0xa4, 0x98, 0x91, 0x91, 0xad, 0xd6, 0xdf, 0x06, 0xdd, 0xa1, 0x75, 0xaa, 0x95, 0x06, 0xfc,
0x3f, 0x18, 0x7b, 0xe9, 0xde, 0x64, 0xed, 0x47, 0xe5, 0x0a, 0x25, 0x1b, 0xf2, 0x13, 0x60, 0x9b,
0x43, 0x60, 0xa5, 0x24, 0x1a, 0xa7, 0x84, 0x66, 0xe1, 0xd6, 0x9c, 0xa2, 0x7d, 0x54, 0x19, 0xde,
0xba, 0x25, 0x5a, 0x36, 0x3a, 0xff, 0x16, 0xc0, 0xde, 0xf6, 0xe6, 0x5a, 0x94, 0x98, 0x9a, 0x7b,
0x23, 0x31, 0x57, 0x06, 0x25, 0xdb, 0xe1, 0xc7, 0xb0, 0x1f, 0x53, 0xb3, 0xa5, 0x0f, 0xf8, 0x19,
0x9c, 0xb6, 0x42, 0xad, 0x9d, 0x2a, 0x35, 0x5e, 0x0b, 0x6b, 0x95, 0x28, 0xb0, 0xdd, 0xeb, 0xf1,
0x08, 0x4e, 0x62, 0x6a, 0x52, 0x65, 0x8a, 0x3f, 0x77, 0x3c, 0x7d, 0x4c, 0x4d, 0x42, 0xb5, 0x91,
0x62, 0x41, 0xb5, 0x63, 0x83, 0x16, 0x33, 0xa6, 0xe6, 0x6e, 0xf3, 0x23, 0xd2, 0x2f, 0xb5, 0xb0,
0xc8, 0x86, 0xdd, 0xbc, 0x54, 0xab, 0xb2, 0xbd, 0x43, 0x16, 0xf2, 0x03, 0xd8, 0x8d, 0xa9, 0xe9,
0x70, 0xdf, 0xce, 0x3f, 0x5d, 0x16, 0xca, 0x2d, 0xeb, 0xc5, 0x2c, 0xa3, 0xd5, 0xbc, 0x36, 0xca,
0xa1, 0xbc, 0x94, 0x56, 0x3d, 0xa2, 0xad, 0xe6, 0x8f, 0x42, 0x2f, 0x85, 0xd6, 0x62, 0x5e, 0x2e,
0xe6, 0xdd, 0xfb, 0xfb, 0x19, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x6f, 0x68, 0x18, 0x89, 0x03, 0x00,
0x00,
}
| {} | identifier_body |
segment.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: segment.proto
/*
Package segment is a generated protocol buffer package.
It is generated from these files:
segment.proto
It has these top-level messages:
Segment
*/
package segment
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// road classes are based on OpenStreetMap usage of the "highway" tag.
// each value of the enumeration corresponds to one value of the tag,
// except for ClassServiceOther, which is used for service and other roads.
type Segment_RoadClass int32
const (
Segment_ClassMotorway Segment_RoadClass = 0
Segment_ClassTrunk Segment_RoadClass = 1
Segment_ClassPrimary Segment_RoadClass = 2
Segment_ClassSecondary Segment_RoadClass = 3
Segment_ClassTertiary Segment_RoadClass = 4
Segment_ClassUnclassified Segment_RoadClass = 5
Segment_ClassResidential Segment_RoadClass = 6
Segment_ClassServiceOther Segment_RoadClass = 7
)
var Segment_RoadClass_name = map[int32]string{
0: "ClassMotorway",
1: "ClassTrunk",
2: "ClassPrimary",
3: "ClassSecondary",
4: "ClassTertiary",
5: "ClassUnclassified",
6: "ClassResidential",
7: "ClassServiceOther",
}
var Segment_RoadClass_value = map[string]int32{
"ClassMotorway": 0,
"ClassTrunk": 1,
"ClassPrimary": 2,
"ClassSecondary": 3,
"ClassTertiary": 4,
"ClassUnclassified": 5,
"ClassResidential": 6,
"ClassServiceOther": 7,
}
func (x Segment_RoadClass) Enum() *Segment_RoadClass {
p := new(Segment_RoadClass)
*p = x
return p
}
func (x Segment_RoadClass) String() string {
return proto.EnumName(Segment_RoadClass_name, int32(x))
}
func (x *Segment_RoadClass) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_RoadClass_value, data, "Segment_RoadClass")
if err != nil {
return err
}
*x = Segment_RoadClass(value)
return nil
}
func (Segment_RoadClass) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
// form of way describes the physical attributes of the road.
type Segment_FormOfWay int32
const (
// use FowUndefined if you do not know what physical attributes the road
// has.
Segment_FowUndefined Segment_FormOfWay = 0
// use FowMotorway for motorways.
Segment_FowMotorway Segment_FormOfWay = 1
// use FowMultipleCarriageway for multiple carriageway roads. that is, when
// there are separate OSM ways for each direction of travel.
Segment_FowMultipleCarriageway Segment_FormOfWay = 2
// use FowSingleCarriageway for other roads.
Segment_FowSingleCarriageway Segment_FormOfWay = 3
// use FowRoundabout for roundabouts
Segment_FowRoundabout Segment_FormOfWay = 4
// use FowTrafficSquare for roads which enclose an area, but which are not
// roundabouts
Segment_FowTrafficSquare Segment_FormOfWay = 5
// use FowSlipRoad for slip roads, ramps and other links.
Segment_FowSlipRoad Segment_FormOfWay = 6
// use FowOther for roads which do not match any of the above definitions,
// but for which the form of way is known.
Segment_FowOther Segment_FormOfWay = 7
)
var Segment_FormOfWay_name = map[int32]string{
0: "FowUndefined",
1: "FowMotorway",
2: "FowMultipleCarriageway",
3: "FowSingleCarriageway",
4: "FowRoundabout",
5: "FowTrafficSquare",
6: "FowSlipRoad",
7: "FowOther",
}
var Segment_FormOfWay_value = map[string]int32{
"FowUndefined": 0,
"FowMotorway": 1,
"FowMultipleCarriageway": 2,
"FowSingleCarriageway": 3,
"FowRoundabout": 4,
"FowTrafficSquare": 5,
"FowSlipRoad": 6,
"FowOther": 7,
}
func (x Segment_FormOfWay) Enum() *Segment_FormOfWay {
p := new(Segment_FormOfWay)
*p = x
return p
}
func (x Segment_FormOfWay) String() string {
return proto.EnumName(Segment_FormOfWay_name, int32(x))
}
func (x *Segment_FormOfWay) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_FormOfWay_value, data, "Segment_FormOfWay")
if err != nil {
return err
}
*x = Segment_FormOfWay(value)
return nil
}
func (Segment_FormOfWay) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 1} }
type Segment struct {
// a segment is a list of at least two LocationReferences.
//
// all but the last LocationReference must contain a full set of data for
// each field, but the final one should consist of only a reference
// coordinate. any other information on the final LocationReference may be
// ignored.
//
// a segment with only a single LocationReference is invalid and may be
// ignored.
Lrps []*Segment_LocationReference `protobuf:"bytes,1,rep,name=lrps" json:"lrps,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment) Reset() { *m = Segment{} }
func (m *Segment) String() string { return proto.CompactTextString(m) }
func (*Segment) ProtoMessage() {}
func (*Segment) | () ([]byte, []int) { return fileDescriptorSegment, []int{0} }
func (m *Segment) GetLrps() []*Segment_LocationReference {
if m != nil {
return m.Lrps
}
return nil
}
type Segment_LatLng struct {
// lat & lng in EPSG:4326 multiplied by 10^7 and rounded to the nearest
// integer. this gives a precision of about 1.1cm (7/16ths of an inch)
// worst case at the equator.
Lat *int32 `protobuf:"fixed32,1,opt,name=lat" json:"lat,omitempty"`
Lng *int32 `protobuf:"fixed32,2,opt,name=lng" json:"lng,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LatLng) Reset() { *m = Segment_LatLng{} }
func (m *Segment_LatLng) String() string { return proto.CompactTextString(m) }
func (*Segment_LatLng) ProtoMessage() {}
func (*Segment_LatLng) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
func (m *Segment_LatLng) GetLat() int32 {
if m != nil && m.Lat != nil {
return *m.Lat
}
return 0
}
func (m *Segment_LatLng) GetLng() int32 {
if m != nil && m.Lng != nil {
return *m.Lng
}
return 0
}
// a segment consists of multiple LocationReferences, each of which describes
// the road at a particular reference coordinate, or properties of the road
// between the current LocationReference and the next.
//
// the first and last LocationReference reference coordinates will usually be at
// "true" intersections, which are intersections where there are multiple
// paths through the intersection. This excludes "false" intersections where
// two roads cross, but there are not multiple paths, such as overpasses,
// bridges, changes of road name or properties.
//
// ___
// | / \ _4---5--
// -1-----2 \ __/ |
// | 3__/ |
//
// in the example above, the Segment consists of 5 LocationReferences,
// numbered 1-5. locations 1 & 5 are at true intersections with other roads
// and 2, 3 & 4 are intermediate LocationReferences inserted due to the length
// of the road.
//
// Occasionally, a LocationReference can be inserted along a road (i.e., not at
// a true intersection) to break long road segments into multiple OSMLR
// segments.
//
type Segment_LocationReference struct {
// the reference coordinate.
Coord *Segment_LatLng `protobuf:"bytes,1,opt,name=coord" json:"coord,omitempty"`
// bearing in degrees clockwise from true north between 0 and 359 - will
// generally fit in a couple of bytes varint.
//
// the bearing should be calculated toward a point 20m along the road from
// the reference coordinate towards the next LocationReference. if this is
// the final LocationReference, then omit the bearing.
//
// each LocationReference, of which there may be several in this Segment,
// except for the last must have a bearing calculated from the reference
// coordinate of this LocationReference.
Bear *uint32 `protobuf:"varint,2,opt,name=bear" json:"bear,omitempty"`
// road class at the reference coordinate.
StartFrc *Segment_RoadClass `protobuf:"varint,3,opt,name=start_frc,json=startFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"start_frc,omitempty"`
// form of way at the reference coordinate.
StartFow *Segment_FormOfWay `protobuf:"varint,4,opt,name=start_fow,json=startFow,enum=opentraffic.osmlr.Segment_FormOfWay" json:"start_fow,omitempty"`
// lowest road class (most important road) between the start coordinate
// and the next LocationReference.
LeastFrc *Segment_RoadClass `protobuf:"varint,5,opt,name=least_frc,json=leastFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"least_frc,omitempty"`
// length in meters, rounded to the nearest meter. the maximum allowed
// length is 15km, but most segments will be much shorter, so a varint
// representation makes sense.
//
// if the length between successive LocationReferences is more than 15km
// then you MUST insert an intermediate LocationReference.
Length *uint32 `protobuf:"varint,6,opt,name=length" json:"length,omitempty"`
// Is this LRP at a node/intersection (true) or along a road (false)?
// This hint can be useful wen associating OSMLR to routing graphs
AtNode *bool `protobuf:"varint,7,opt,name=at_node,json=atNode" json:"at_node,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LocationReference) Reset() { *m = Segment_LocationReference{} }
func (m *Segment_LocationReference) String() string { return proto.CompactTextString(m) }
func (*Segment_LocationReference) ProtoMessage() {}
func (*Segment_LocationReference) Descriptor() ([]byte, []int) {
return fileDescriptorSegment, []int{0, 1}
}
func (m *Segment_LocationReference) GetCoord() *Segment_LatLng {
if m != nil {
return m.Coord
}
return nil
}
func (m *Segment_LocationReference) GetBear() uint32 {
if m != nil && m.Bear != nil {
return *m.Bear
}
return 0
}
func (m *Segment_LocationReference) GetStartFrc() Segment_RoadClass {
if m != nil && m.StartFrc != nil {
return *m.StartFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetStartFow() Segment_FormOfWay {
if m != nil && m.StartFow != nil {
return *m.StartFow
}
return Segment_FowUndefined
}
func (m *Segment_LocationReference) GetLeastFrc() Segment_RoadClass {
if m != nil && m.LeastFrc != nil {
return *m.LeastFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetLength() uint32 {
if m != nil && m.Length != nil {
return *m.Length
}
return 0
}
func (m *Segment_LocationReference) GetAtNode() bool {
if m != nil && m.AtNode != nil {
return *m.AtNode
}
return false
}
func init() {
proto.RegisterType((*Segment)(nil), "opentraffic.osmlr.Segment")
proto.RegisterType((*Segment_LatLng)(nil), "opentraffic.osmlr.Segment.LatLng")
proto.RegisterType((*Segment_LocationReference)(nil), "opentraffic.osmlr.Segment.LocationReference")
proto.RegisterEnum("opentraffic.osmlr.Segment_RoadClass", Segment_RoadClass_name, Segment_RoadClass_value)
proto.RegisterEnum("opentraffic.osmlr.Segment_FormOfWay", Segment_FormOfWay_name, Segment_FormOfWay_value)
}
func init() { proto.RegisterFile("segment.proto", fileDescriptorSegment) }
var fileDescriptorSegment = []byte{
// 529 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xcb, 0x6e, 0xdb, 0x3c,
0x10, 0x85, 0x23, 0x5f, 0xe4, 0x64, 0x72, 0xa3, 0x89, 0xfc, 0xf9, 0x85, 0xac, 0xdc, 0xa0, 0x0b,
0x2f, 0x12, 0x1b, 0xc8, 0xa6, 0xdb, 0xb6, 0x01, 0xb4, 0x4a, 0x9b, 0x42, 0x4a, 0x50, 0xa0, 0x9b,
0x80, 0x16, 0x47, 0x32, 0x51, 0x9a, 0xa3, 0x52, 0x54, 0x04, 0x3f, 0x53, 0xbb, 0xeb, 0xdb, 0xf4,
0x69, 0x0a, 0xd1, 0xaa, 0xd1, 0x22, 0x40, 0x8a, 0xae, 0x34, 0x3c, 0x3c, 0x73, 0xe6, 0x23, 0x05,
0xc2, 0x61, 0x85, 0xc5, 0x0a, 0x8d, 0x9b, 0x95, 0x96, 0x1c, 0xf1, 0x31, 0x95, 0x68, 0x9c, 0x15,
0x79, 0xae, 0xb2, 0x19, 0x55, 0x2b, 0x6d, 0xcf, 0xbf, 0x87, 0x30, 0x4a, 0x37, 0x26, 0xfe, 0x1a,
0x06, 0xda, 0x96, 0x55, 0x14, 0x4c, 0xfa, 0xd3, 0xfd, 0xab, 0x8b, 0xd9, 0x13, 0xf7, 0xac, 0x73,
0xce, 0x6e, 0x28, 0x13, 0x4e, 0x91, 0x49, 0x30, 0x47, 0x8b, 0x26, 0xc3, 0xc4, 0x77, 0x9e, 0x5d,
0x40, 0x78, 0x23, 0xdc, 0x8d, 0x29, 0x38, 0x83, 0xbe, 0x16, 0x2e, 0x0a, 0x26, 0xc1, 0xf4, 0x38,
0x69, 0x4b, 0xaf, 0x98, 0x22, 0xea, 0x75, 0x8a, 0x29, 0xce, 0x7e, 0xf4, 0x60, 0xfc, 0x24, 0x89,
0xbf, 0x82, 0x61, 0x46, 0x64, 0xa5, 0xef, 0xdd, 0xbf, 0x7a, 0xf1, 0x1c, 0x86, 0x9f, 0x95, 0x6c,
0xfc, 0x9c, 0xc3, 0x60, 0x81, 0xc2, 0xfa, 0x09, 0x87, 0x89, 0xaf, 0xf9, 0x1b, 0xd8, 0xab, 0x9c,
0xb0, 0xee, 0x21, 0xb7, 0x59, 0xd4, 0x9f, 0x04, 0xd3, 0xa3, 0xab, 0x97, 0xcf, 0x04, 0x26, 0x24,
0xe4, 0xb5, 0x16, 0x55, 0x95, 0xec, 0xfa, 0xb6, 0xd8, 0x66, 0xbf, 0x45, 0x50, 0x13, 0x0d, 0xfe,
0x1a, 0x11, 0x93, 0x5d, 0xdd, 0xe6, 0x1f, 0xc5, 0xfa, 0x57, 0x04, 0x35, 0x6d, 0x84, 0x46, 0x51,
0x6d, 0x28, 0x86, 0xff, 0x42, 0xe1, 0xdb, 0x5a, 0x8a, 0x53, 0x08, 0x35, 0x9a, 0xc2, 0x2d, 0xa3,
0xd0, 0x1f, 0xaf, 0x5b, 0xf1, 0xff, 0x61, 0x24, 0xdc, 0x83, 0x21, 0x89, 0xd1, 0x68, 0x12, 0x4c,
0x77, 0x93, 0x50, 0xb8, 0xf7, 0x24, 0xf1, 0xfc, 0x6b, 0x00, 0x7b, 0xdb, 0x20, 0x3e, 0x86, 0x43,
0x5f, 0xbc, 0x23, 0x47, 0xb6, 0x11, 0x6b, 0xb6, 0xc3, 0x8f, 0x00, 0xbc, 0x74, 0x67, 0x6b, 0xf3,
0x99, 0x05, 0x9c, 0xc1, 0x81, 0x5f, 0x7f, 0xb0, 0x6a, 0x25, 0xec, 0x9a, 0xf5, 0x38, 0x87, 0x23,
0xaf, 0xa4, 0x98, 0x91, 0x91, 0xad, 0xd6, 0xdf, 0x06, 0xdd, 0xa1, 0x75, 0xaa, 0x95, 0x06, 0xfc,
0x3f, 0x18, 0x7b, 0xe9, 0xde, 0x64, 0xed, 0x47, 0xe5, 0x0a, 0x25, 0x1b, 0xf2, 0x13, 0x60, 0x9b,
0x43, 0x60, 0xa5, 0x24, 0x1a, 0xa7, 0x84, 0x66, 0xe1, 0xd6, 0x9c, 0xa2, 0x7d, 0x54, 0x19, 0xde,
0xba, 0x25, 0x5a, 0x36, 0x3a, 0xff, 0x16, 0xc0, 0xde, 0xf6, 0xe6, 0x5a, 0x94, 0x98, 0x9a, 0x7b,
0x23, 0x31, 0x57, 0x06, 0x25, 0xdb, 0xe1, 0xc7, 0xb0, 0x1f, 0x53, 0xb3, 0xa5, 0x0f, 0xf8, 0x19,
0x9c, 0xb6, 0x42, 0xad, 0x9d, 0x2a, 0x35, 0x5e, 0x0b, 0x6b, 0x95, 0x28, 0xb0, 0xdd, 0xeb, 0xf1,
0x08, 0x4e, 0x62, 0x6a, 0x52, 0x65, 0x8a, 0x3f, 0x77, 0x3c, 0x7d, 0x4c, 0x4d, 0x42, 0xb5, 0x91,
0x62, 0x41, 0xb5, 0x63, 0x83, 0x16, 0x33, 0xa6, 0xe6, 0x6e, 0xf3, 0x23, 0xd2, 0x2f, 0xb5, 0xb0,
0xc8, 0x86, 0xdd, 0xbc, 0x54, 0xab, 0xb2, 0xbd, 0x43, 0x16, 0xf2, 0x03, 0xd8, 0x8d, 0xa9, 0xe9,
0x70, 0xdf, 0xce, 0x3f, 0x5d, 0x16, 0xca, 0x2d, 0xeb, 0xc5, 0x2c, 0xa3, 0xd5, 0xbc, 0x36, 0xca,
0xa1, 0xbc, 0x94, 0x56, 0x3d, 0xa2, 0xad, 0xe6, 0x8f, 0x42, 0x2f, 0x85, 0xd6, 0x62, 0x5e, 0x2e,
0xe6, 0xdd, 0xfb, 0xfb, 0x19, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x6f, 0x68, 0x18, 0x89, 0x03, 0x00,
0x00,
}
| Descriptor | identifier_name |
segment.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: segment.proto
/*
Package segment is a generated protocol buffer package.
It is generated from these files:
segment.proto
It has these top-level messages:
Segment
*/
package segment
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// road classes are based on OpenStreetMap usage of the "highway" tag.
// each value of the enumeration corresponds to one value of the tag,
// except for ClassServiceOther, which is used for service and other roads.
type Segment_RoadClass int32
const (
Segment_ClassMotorway Segment_RoadClass = 0
Segment_ClassTrunk Segment_RoadClass = 1
Segment_ClassPrimary Segment_RoadClass = 2
Segment_ClassSecondary Segment_RoadClass = 3
Segment_ClassTertiary Segment_RoadClass = 4
Segment_ClassUnclassified Segment_RoadClass = 5
Segment_ClassResidential Segment_RoadClass = 6
Segment_ClassServiceOther Segment_RoadClass = 7
)
var Segment_RoadClass_name = map[int32]string{
0: "ClassMotorway",
1: "ClassTrunk",
2: "ClassPrimary",
3: "ClassSecondary",
4: "ClassTertiary",
5: "ClassUnclassified",
6: "ClassResidential",
7: "ClassServiceOther",
}
var Segment_RoadClass_value = map[string]int32{
"ClassMotorway": 0,
"ClassTrunk": 1,
"ClassPrimary": 2,
"ClassSecondary": 3,
"ClassTertiary": 4,
"ClassUnclassified": 5,
"ClassResidential": 6,
"ClassServiceOther": 7,
}
func (x Segment_RoadClass) Enum() *Segment_RoadClass {
p := new(Segment_RoadClass)
*p = x
return p
}
func (x Segment_RoadClass) String() string {
return proto.EnumName(Segment_RoadClass_name, int32(x))
}
func (x *Segment_RoadClass) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_RoadClass_value, data, "Segment_RoadClass")
if err != nil {
return err
}
*x = Segment_RoadClass(value)
return nil
}
func (Segment_RoadClass) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
// form of way describes the physical attributes of the road.
type Segment_FormOfWay int32
const (
// use FowUndefined if you do not know what physical attributes the road
// has.
Segment_FowUndefined Segment_FormOfWay = 0
// use FowMotorway for motorways.
Segment_FowMotorway Segment_FormOfWay = 1
// use FowMultipleCarriageway for multiple carriageway roads. that is, when
// there are separate OSM ways for each direction of travel.
Segment_FowMultipleCarriageway Segment_FormOfWay = 2
// use FowSingleCarriageway for other roads.
Segment_FowSingleCarriageway Segment_FormOfWay = 3
// use FowRoundabout for roundabouts
Segment_FowRoundabout Segment_FormOfWay = 4
// use FowTrafficSquare for roads which enclose an area, but which are not
// roundabouts
Segment_FowTrafficSquare Segment_FormOfWay = 5
// use FowSlipRoad for slip roads, ramps and other links.
Segment_FowSlipRoad Segment_FormOfWay = 6
// use FowOther for roads which do not match any of the above definitions,
// but for which the form of way is known.
Segment_FowOther Segment_FormOfWay = 7
)
var Segment_FormOfWay_name = map[int32]string{
0: "FowUndefined",
1: "FowMotorway",
2: "FowMultipleCarriageway",
3: "FowSingleCarriageway",
4: "FowRoundabout",
5: "FowTrafficSquare",
6: "FowSlipRoad",
7: "FowOther",
}
var Segment_FormOfWay_value = map[string]int32{
"FowUndefined": 0,
"FowMotorway": 1,
"FowMultipleCarriageway": 2,
"FowSingleCarriageway": 3,
"FowRoundabout": 4,
"FowTrafficSquare": 5,
"FowSlipRoad": 6,
"FowOther": 7,
}
func (x Segment_FormOfWay) Enum() *Segment_FormOfWay {
p := new(Segment_FormOfWay)
*p = x
return p
}
func (x Segment_FormOfWay) String() string {
return proto.EnumName(Segment_FormOfWay_name, int32(x))
}
func (x *Segment_FormOfWay) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(Segment_FormOfWay_value, data, "Segment_FormOfWay")
if err != nil {
return err
}
*x = Segment_FormOfWay(value)
return nil
}
func (Segment_FormOfWay) EnumDescriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 1} }
type Segment struct {
// a segment is a list of at least two LocationReferences.
//
// all but the last LocationReference must contain a full set of data for
// each field, but the final one should consist of only a reference
// coordinate. any other information on the final LocationReference may be
// ignored.
//
// a segment with only a single LocationReference is invalid and may be
// ignored.
Lrps []*Segment_LocationReference `protobuf:"bytes,1,rep,name=lrps" json:"lrps,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment) Reset() { *m = Segment{} }
func (m *Segment) String() string { return proto.CompactTextString(m) }
func (*Segment) ProtoMessage() {}
func (*Segment) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0} }
func (m *Segment) GetLrps() []*Segment_LocationReference {
if m != nil |
return nil
}
type Segment_LatLng struct {
// lat & lng in EPSG:4326 multiplied by 10^7 and rounded to the nearest
// integer. this gives a precision of about 1.1cm (7/16ths of an inch)
// worst case at the equator.
Lat *int32 `protobuf:"fixed32,1,opt,name=lat" json:"lat,omitempty"`
Lng *int32 `protobuf:"fixed32,2,opt,name=lng" json:"lng,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LatLng) Reset() { *m = Segment_LatLng{} }
func (m *Segment_LatLng) String() string { return proto.CompactTextString(m) }
func (*Segment_LatLng) ProtoMessage() {}
func (*Segment_LatLng) Descriptor() ([]byte, []int) { return fileDescriptorSegment, []int{0, 0} }
func (m *Segment_LatLng) GetLat() int32 {
if m != nil && m.Lat != nil {
return *m.Lat
}
return 0
}
func (m *Segment_LatLng) GetLng() int32 {
if m != nil && m.Lng != nil {
return *m.Lng
}
return 0
}
// a segment consists of multiple LocationReferences, each of which describes
// the road at a particular reference coordinate, or properties of the road
// between the current LocationReference and the next.
//
// the first and last LocationReference reference coordinates will usually be at
// "true" intersections, which are intersections where there are multiple
// paths through the intersection. This excludes "false" intersections where
// two roads cross, but there are not multiple paths, such as overpasses,
// bridges, changes of road name or properties.
//
// ___
// | / \ _4---5--
// -1-----2 \ __/ |
// | 3__/ |
//
// in the example above, the Segment consists of 5 LocationReferences,
// numbered 1-5. locations 1 & 5 are at true intersections with other roads
// and 2, 3 & 4 are intermediate LocationReferences inserted due to the length
// of the road.
//
// Occasionally, a LocationReference can be inserted along a road (i.e., not at
// a true intersection) to break long road segments into multiple OSMLR
// segments.
//
type Segment_LocationReference struct {
// the reference coordinate.
Coord *Segment_LatLng `protobuf:"bytes,1,opt,name=coord" json:"coord,omitempty"`
// bearing in degrees clockwise from true north between 0 and 359 - will
// generally fit in a couple of bytes varint.
//
// the bearing should be calculated toward a point 20m along the road from
// the reference coordinate towards the next LocationReference. if this is
// the final LocationReference, then omit the bearing.
//
// each LocationReference, of which there may be several in this Segment,
// except for the last must have a bearing calculated from the reference
// coordinate of this LocationReference.
Bear *uint32 `protobuf:"varint,2,opt,name=bear" json:"bear,omitempty"`
// road class at the reference coordinate.
StartFrc *Segment_RoadClass `protobuf:"varint,3,opt,name=start_frc,json=startFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"start_frc,omitempty"`
// form of way at the reference coordinate.
StartFow *Segment_FormOfWay `protobuf:"varint,4,opt,name=start_fow,json=startFow,enum=opentraffic.osmlr.Segment_FormOfWay" json:"start_fow,omitempty"`
// lowest road class (most important road) between the start coordinate
// and the next LocationReference.
LeastFrc *Segment_RoadClass `protobuf:"varint,5,opt,name=least_frc,json=leastFrc,enum=opentraffic.osmlr.Segment_RoadClass" json:"least_frc,omitempty"`
// length in meters, rounded to the nearest meter. the maximum allowed
// length is 15km, but most segments will be much shorter, so a varint
// representation makes sense.
//
// if the length between successive LocationReferences is more than 15km
// then you MUST insert an intermediate LocationReference.
Length *uint32 `protobuf:"varint,6,opt,name=length" json:"length,omitempty"`
// Is this LRP at a node/intersection (true) or along a road (false)?
// This hint can be useful wen associating OSMLR to routing graphs
AtNode *bool `protobuf:"varint,7,opt,name=at_node,json=atNode" json:"at_node,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Segment_LocationReference) Reset() { *m = Segment_LocationReference{} }
func (m *Segment_LocationReference) String() string { return proto.CompactTextString(m) }
func (*Segment_LocationReference) ProtoMessage() {}
func (*Segment_LocationReference) Descriptor() ([]byte, []int) {
return fileDescriptorSegment, []int{0, 1}
}
func (m *Segment_LocationReference) GetCoord() *Segment_LatLng {
if m != nil {
return m.Coord
}
return nil
}
func (m *Segment_LocationReference) GetBear() uint32 {
if m != nil && m.Bear != nil {
return *m.Bear
}
return 0
}
func (m *Segment_LocationReference) GetStartFrc() Segment_RoadClass {
if m != nil && m.StartFrc != nil {
return *m.StartFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetStartFow() Segment_FormOfWay {
if m != nil && m.StartFow != nil {
return *m.StartFow
}
return Segment_FowUndefined
}
func (m *Segment_LocationReference) GetLeastFrc() Segment_RoadClass {
if m != nil && m.LeastFrc != nil {
return *m.LeastFrc
}
return Segment_ClassMotorway
}
func (m *Segment_LocationReference) GetLength() uint32 {
if m != nil && m.Length != nil {
return *m.Length
}
return 0
}
func (m *Segment_LocationReference) GetAtNode() bool {
if m != nil && m.AtNode != nil {
return *m.AtNode
}
return false
}
func init() {
proto.RegisterType((*Segment)(nil), "opentraffic.osmlr.Segment")
proto.RegisterType((*Segment_LatLng)(nil), "opentraffic.osmlr.Segment.LatLng")
proto.RegisterType((*Segment_LocationReference)(nil), "opentraffic.osmlr.Segment.LocationReference")
proto.RegisterEnum("opentraffic.osmlr.Segment_RoadClass", Segment_RoadClass_name, Segment_RoadClass_value)
proto.RegisterEnum("opentraffic.osmlr.Segment_FormOfWay", Segment_FormOfWay_name, Segment_FormOfWay_value)
}
func init() { proto.RegisterFile("segment.proto", fileDescriptorSegment) }
var fileDescriptorSegment = []byte{
// 529 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xcb, 0x6e, 0xdb, 0x3c,
0x10, 0x85, 0x23, 0x5f, 0xe4, 0x64, 0x72, 0xa3, 0x89, 0xfc, 0xf9, 0x85, 0xac, 0xdc, 0xa0, 0x0b,
0x2f, 0x12, 0x1b, 0xc8, 0xa6, 0xdb, 0xb6, 0x01, 0xb4, 0x4a, 0x9b, 0x42, 0x4a, 0x50, 0xa0, 0x9b,
0x80, 0x16, 0x47, 0x32, 0x51, 0x9a, 0xa3, 0x52, 0x54, 0x04, 0x3f, 0x53, 0xbb, 0xeb, 0xdb, 0xf4,
0x69, 0x0a, 0xd1, 0xaa, 0xd1, 0x22, 0x40, 0x8a, 0xae, 0x34, 0x3c, 0x3c, 0x73, 0xe6, 0x23, 0x05,
0xc2, 0x61, 0x85, 0xc5, 0x0a, 0x8d, 0x9b, 0x95, 0x96, 0x1c, 0xf1, 0x31, 0x95, 0x68, 0x9c, 0x15,
0x79, 0xae, 0xb2, 0x19, 0x55, 0x2b, 0x6d, 0xcf, 0xbf, 0x87, 0x30, 0x4a, 0x37, 0x26, 0xfe, 0x1a,
0x06, 0xda, 0x96, 0x55, 0x14, 0x4c, 0xfa, 0xd3, 0xfd, 0xab, 0x8b, 0xd9, 0x13, 0xf7, 0xac, 0x73,
0xce, 0x6e, 0x28, 0x13, 0x4e, 0x91, 0x49, 0x30, 0x47, 0x8b, 0x26, 0xc3, 0xc4, 0x77, 0x9e, 0x5d,
0x40, 0x78, 0x23, 0xdc, 0x8d, 0x29, 0x38, 0x83, 0xbe, 0x16, 0x2e, 0x0a, 0x26, 0xc1, 0xf4, 0x38,
0x69, 0x4b, 0xaf, 0x98, 0x22, 0xea, 0x75, 0x8a, 0x29, 0xce, 0x7e, 0xf4, 0x60, 0xfc, 0x24, 0x89,
0xbf, 0x82, 0x61, 0x46, 0x64, 0xa5, 0xef, 0xdd, 0xbf, 0x7a, 0xf1, 0x1c, 0x86, 0x9f, 0x95, 0x6c,
0xfc, 0x9c, 0xc3, 0x60, 0x81, 0xc2, 0xfa, 0x09, 0x87, 0x89, 0xaf, 0xf9, 0x1b, 0xd8, 0xab, 0x9c,
0xb0, 0xee, 0x21, 0xb7, 0x59, 0xd4, 0x9f, 0x04, 0xd3, 0xa3, 0xab, 0x97, 0xcf, 0x04, 0x26, 0x24,
0xe4, 0xb5, 0x16, 0x55, 0x95, 0xec, 0xfa, 0xb6, 0xd8, 0x66, 0xbf, 0x45, 0x50, 0x13, 0x0d, 0xfe,
0x1a, 0x11, 0x93, 0x5d, 0xdd, 0xe6, 0x1f, 0xc5, 0xfa, 0x57, 0x04, 0x35, 0x6d, 0x84, 0x46, 0x51,
0x6d, 0x28, 0x86, 0xff, 0x42, 0xe1, 0xdb, 0x5a, 0x8a, 0x53, 0x08, 0x35, 0x9a, 0xc2, 0x2d, 0xa3,
0xd0, 0x1f, 0xaf, 0x5b, 0xf1, 0xff, 0x61, 0x24, 0xdc, 0x83, 0x21, 0x89, 0xd1, 0x68, 0x12, 0x4c,
0x77, 0x93, 0x50, 0xb8, 0xf7, 0x24, 0xf1, 0xfc, 0x6b, 0x00, 0x7b, 0xdb, 0x20, 0x3e, 0x86, 0x43,
0x5f, 0xbc, 0x23, 0x47, 0xb6, 0x11, 0x6b, 0xb6, 0xc3, 0x8f, 0x00, 0xbc, 0x74, 0x67, 0x6b, 0xf3,
0x99, 0x05, 0x9c, 0xc1, 0x81, 0x5f, 0x7f, 0xb0, 0x6a, 0x25, 0xec, 0x9a, 0xf5, 0x38, 0x87, 0x23,
0xaf, 0xa4, 0x98, 0x91, 0x91, 0xad, 0xd6, 0xdf, 0x06, 0xdd, 0xa1, 0x75, 0xaa, 0x95, 0x06, 0xfc,
0x3f, 0x18, 0x7b, 0xe9, 0xde, 0x64, 0xed, 0x47, 0xe5, 0x0a, 0x25, 0x1b, 0xf2, 0x13, 0x60, 0x9b,
0x43, 0x60, 0xa5, 0x24, 0x1a, 0xa7, 0x84, 0x66, 0xe1, 0xd6, 0x9c, 0xa2, 0x7d, 0x54, 0x19, 0xde,
0xba, 0x25, 0x5a, 0x36, 0x3a, 0xff, 0x16, 0xc0, 0xde, 0xf6, 0xe6, 0x5a, 0x94, 0x98, 0x9a, 0x7b,
0x23, 0x31, 0x57, 0x06, 0x25, 0xdb, 0xe1, 0xc7, 0xb0, 0x1f, 0x53, 0xb3, 0xa5, 0x0f, 0xf8, 0x19,
0x9c, 0xb6, 0x42, 0xad, 0x9d, 0x2a, 0x35, 0x5e, 0x0b, 0x6b, 0x95, 0x28, 0xb0, 0xdd, 0xeb, 0xf1,
0x08, 0x4e, 0x62, 0x6a, 0x52, 0x65, 0x8a, 0x3f, 0x77, 0x3c, 0x7d, 0x4c, 0x4d, 0x42, 0xb5, 0x91,
0x62, 0x41, 0xb5, 0x63, 0x83, 0x16, 0x33, 0xa6, 0xe6, 0x6e, 0xf3, 0x23, 0xd2, 0x2f, 0xb5, 0xb0,
0xc8, 0x86, 0xdd, 0xbc, 0x54, 0xab, 0xb2, 0xbd, 0x43, 0x16, 0xf2, 0x03, 0xd8, 0x8d, 0xa9, 0xe9,
0x70, 0xdf, 0xce, 0x3f, 0x5d, 0x16, 0xca, 0x2d, 0xeb, 0xc5, 0x2c, 0xa3, 0xd5, 0xbc, 0x36, 0xca,
0xa1, 0xbc, 0x94, 0x56, 0x3d, 0xa2, 0xad, 0xe6, 0x8f, 0x42, 0x2f, 0x85, 0xd6, 0x62, 0x5e, 0x2e,
0xe6, 0xdd, 0xfb, 0xfb, 0x19, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x6f, 0x68, 0x18, 0x89, 0x03, 0x00,
0x00,
}
| {
return m.Lrps
} | conditional_block |
table-form.component.ts | import {Component, OnInit, Input, Output, EventEmitter, TemplateRef, OnDestroy, ViewChild, Host} from '@angular/core';
// import { trigger, transition, animate,style} from '@angular/animations';
import {NzTabChangeEvent, NzTableComponent} from 'ng-zorro-antd';
import { UserinfoService } from '@service/userinfo-service.service';
import { GlobalService } from '@service/global-service.service';
import { urls } from '@model/url';
import { HttpUtilService } from '@service/http-util.service';
import { Utils } from '@util/utils';
import { toDemical } from '@validator/validator';
import { GridRowSource } from '../simple-page/grid-block/grid-row.directive';
import {Subscription, Observable, Subject} from 'rxjs';
@Component({
selector: 'app-table-form',
templateUrl: `./table-form.component.html`,
styleUrls: ['./table-form.component.css'],
providers: [
GridRowSource,
],
/* animations:[
trigger('gridAnimate',[
transition(':enter',[
style({opacity:0,height:0,transform:'translate(30px,0)'}),
animate('0.3s ease-in',style({opacity:1,height:'auto',transform:'translate(0,0)',background:'#fffeee'}))
]),
transition(':leave',[
animate('0.3s ease-out',style({opacity:0,height:0,transform:'translate(30px,0)'}))
])
])
]*/
})
export class TableFormComponent implements OnInit,OnDestroy {
// tempfindSet: any = { "parameter": "companyName", "parameterSend": "companyId", "name": "发票抬头", "formId": "company_pop" };
// 数据弹框传入参数配置格式
@ViewChild('nzTable') nzTableComponent: NzTableComponent;
private gloPageSub: Subscription;
private gloColSub: Subscription;
private gloSelectedSub: Subscription;
private _columns: any[];
tableHeight = '500px';
tableWidth = '100%';
allChecked = false;
indeterminate = false;
updateData: Array<any> = [];
dataHeader: Array<any> = []; // 表头
dataHeaderRefresh = false; // 判断表头是否刷新
pageIndex = 1; // 当前页码
_trSelected: boolean[] = [];
_gridOperate: any = {}; // 操作{show:false,title:'操作',width:'120px',template:''};
_calTotalFiled: any = {};
_totalPage = 0;
_pageSizeOptions: number[] = [30, 100, 500, 1000, 5000, 1000000]; // 1000000 显示全部
headerFilter: any = {}; // filter数据
filterSearch: any = {}; // filter条件
allData: any[] = [];
curFormId: string;
virtualMinBuffer: number;
virtualMaxBuffer: number;
private constData: any[] = [];
private currentChange = false;
private trSelectHand = false; // 判断是否
@Input() noGetStaticData: boolean; // 不用获取静态数据 设置True
@Input() closeBgTr = true; // tr换行背景色是否显示
@Input() selfTableHeight: string | null | undefined; // 表格高度自适应,如果不需要自适应可设置该参数 自定义table高度
@Input() extraTableHeight: number; // 正常模式页面额外的高度
@Output() currentTableHeightFun = new EventEmitter<number>(); // 如果表格高度是自适应的,抛出表格高度
@Output() inpEmit = new EventEmitter<any>();
@Input() tabArr: any[] = []; // tab头数据 不要用
@Input() tabIndex = 0; // 激活的面板,默认第一个
@Output() tabIndexChange: EventEmitter<number> = new EventEmitter<number>();
@Input() updateHeader = false; // 点击tab是否更新表头
@Output() tabResultFun = new EventEmitter<any>(); // tab头点击事件
@Input() set columns(val: any[]) { // 自定义表头 [{colCname:'中文名',colEname:'英文名',visible:'XSBJ10'|'XSBJ20'}]
this._columns = val;
if (Array.isArray(this._columns)) { this.columnsFilter(this._columns); }
}
get columns() {
return this._columns;
}
@Input() checkBoxWidth: string;
@Input() listWidth: string; // list宽度
@Input() trSelectedShow = false; // 选中是否显示样式
@Input() set trSelected(val: boolean[]) { // index以作区分
if (Array.isArray(val)) {
this._trSelected = val;
this.trSelectHand = true;
}
}
get trSelected() {
return this._trSelected;
}
@Input() formId: string; // formId 优先级高
@Input() gridId: string; // 一个页面多个列表必填,传gridId以进行区分,
@Input() isCheckBox = true; // 是否显示选择框,默认显示
@Input() disabledAllCheckBox:boolean = false;//是否禁止全选,默认允许
@Input() isMutli: boolean; // 选择框是否多选
@Input() nzShowPagination = true; // 列表是否显示分页器 ,默认显示
@Input() showCount = true; // 是否显示选择条数
@Input() pageSize = 100; // 条数 默认100
@Input() paginationRef: TemplateRef<any>;
@Input() tdTemplate:TemplateRef<any>;
@Input() set pageSizeOptions(val: number[]) { // 页码自定义
this._pageSizeOptions = val;
}
get pageSizeOptions() {
return this._pageSizeOptions;
}
@Input() colSet = true; // 列表设置是否显示
@Input() isResetFilter: boolean; // 是否对filte进行重置
// 列表数据
@Input() set dataSet(val: any[]) { // 只接收外部传进的值
if (this.isResetFilter) {
this.filterSearch = {}; // filter重置 已筛选的存储
}
this.currentChange = false;
this.allData = [];
this.constData = Array.isArray(val) ? val : []; // 数量不可变
window.setTimeout(() => {
this.allData = [...this.constData];
this.dataFilterResult();
this.headerFilterData(this.allData);
},300);
}
// 数据总数
@Input() set totalPage(val: number) {
this._totalPage = val;
} // 数据总数
get totalPage() {
return this._totalPage;
}
@Input() listLoading: boolean; // list加载
@Input() pageFun: boolean; // true页码条数自写
@Input() set gridOperate(val: any) { // 表格操作自定义
if (Utils.isObject(val)) {
this._gridOperate.show = val.show;
this._gridOperate.title = val.title || '操作';
this._gridOperate.width = val.width || '120px';
this._gridOperate.template = val.template;
}
} // 操作{show:false,title:'操作',width:'120px',template:''};
@Input() caculateEnameArr: any = [
];
@Input() set refresh(val:any){ //用于初始化表格中存在已选数据,选中条数的变化,想触发必须更改成不同值
this.refreshStatus();
}
@Input() popData: any;
@Input() popTableData: any = [];
@Input() searchParamFiled: any; // pop弹框调接口要传的参数名
@Input() searchParamFiledNot: any; // pop弹框调接口要传的参数名不必传 {eName:ttrue},格式
@Input() tableTitle: string|TemplateRef<void>; // 表格标题
@Input() tableFooter: string|TemplateRef<void>; // 表格尾部
@Input() selectedChange = false; // 全选事件订阅
@Input() searchListFiled: any; // pop弹窗取当前数据哪个字段的值
@Input() staticCode: (data: string, item?: any) => Observable<any[]>; // 组件内静态数据自定义 [{name: null, value: null}]
@Input() needStaticCode: boolean; // 当没有apiParam时需要获取静态数据时设为true
@Output() updateDataResult = new EventEmitter<any>(); // 选中数据的结果以及点击列表选择框事件
@Output() inputBlurFun = new EventEmitter<any>(); // input Blur失焦
@Output() modelChange = new EventEmitter<any>(); // input值改变事件
@Output() listClick = new EventEmitter<any>(); // list点击事件
@Output() listOver = new EventEmitter<any>(); // list鼠标移入事件
@Output() listLeave = new EventEmitter<any>(); // list鼠标移出事件
@Output() pageIndexEmit = new EventEmitter<any>(); // 页码点击事件
@Output() pageSizeEmit = new EventEmitter<any>(); // 条数点击事件
@Output() userColumnsEmit = new EventEmitter<any>(); // 表头数据返回
@Output() currentPageDataChangeEmit = new EventEmitter<any>(); // 当前页面数据更新
@Output() selectedChangeEmit = new EventEmitter<any>(); // 勾选事件,抛出数据
@Output() keyboardEmit = new EventEmitter<any>()
constructor(private http: HttpUtilService, private info: UserinfoService, private globalSer: GlobalService,
@Host() private rowSource: GridRowSource) {
}
private restoreRender(item: any) {
if (item.type === 'template') {
const tplName = `tpl-${item.gridId}-${item.colEname}`;
item.template = this.rowSource.getRow(tplName);
if (!item.template) {
console.error(`template类型列配置错误!templateId:${tplName}`);
}
}
}
ngOnInit() {
this.virtualMinBuffer = parseInt(this.selfTableHeight || this.tableHeight, 0);
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
this.curFormId = this.formId || this.info.APPINFO.formId;
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId,gridId: this.gridId});
}
this.gloPageSub = this.globalSer.pageNumEmitter.subscribe( (x: any) => {
if (this.curFormId === x.formId) {
this.pageIndex = x.page;
}
});
this.gloColSub = this.globalSer.colChangeEmitter.subscribe(
(res: any) => this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId})
);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (this.selectedChange) {
this.gloSelectedSub = this.globalSer.tableSelectedChangeEmitter.subscribe(
res => {
if (res.gridId === this.gridId) {
this.checkAll(res.checked)
}
}
);
}
this.globalSer.tableGridIdToSearchForm.emit({'gridId': this.gridId, 'formId': this.curFormId}); // 向查询区域传递gridId;
this.globalSer.routerEvent.subscribe((x: any) => {
if (x.isSys) {
this.nzTableComponent.cdkVirtualScrollViewport.scrollToIndex(0);
}
});
this.globalSer.pageNumEmitter.emit(
{formId: this.curFormId, gridId: this.gridId, page: 1, length: this.pageSize, search: true}
); // 初始化的条数
}
tableHeightFun(data: number) { // 表格自适应抛出数据
this.tableHeight = `${data}px`;
this.currentTableHeightFun.emit(data);
this.virtualMinBuffer = data;
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
}
// 选择逻辑
refreshStatus(data?: any): void {
let currentChecked: boolean;
if (data && !this.isMutli) {
currentChecked = data.checked;
}
this.allData.map((y: any) => !this.isMutli && (y.checked = false));
if (data && !this.isMutli) {
data.checked = currentChecked; // 单选情况下设置checked;
}
this.allCheckBoxStyle();
this.updateData = this.constData.filter((x: any) => x.checked);
this.updateDataResult.emit(this.updateData); // 选中的结果数据
// 统计需要统计的字段
for (const c of this.caculateEnameArr){
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
// for (let elem of this.updateData) {
// // this._calTotalFiled[c.field] += Number(elem[c.field]);
// this._calTotalFiled[c.field] = toDemical(Utils.add(this._calTotalFiled[c.field], elem[c.field]),c.demLength);
// }
// 2019-1-14郑鑫修改, 将每次求和都四舍五入改为将最后结果四舍五入
this._calTotalFiled[c.field] = toDemical(
this.updateData.map(item => Number(item[c.field]) || 0).reduce((acc, cur) => acc + cur, 0), c.demLength
);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (data) {
this.selectedChangeEmit.emit([data]);
} else {
this.selectedChangeEmit.emit(this.allData);
}
}
private allCheckBoxStyle() {
const dataArr = this.allData.filter(value => !value.disabled);
const allChecked = dataArr[0] ? dataArr.every(value => value.checked === true) : false;
const allUnChecked = dataArr.every(value => !value.checked);
this.allChecked = allChecked;
this.indeterminate = (!allChecked) && (!allUnChecked);
}
checkAll(value: boolean): void {
this.allData = this.allData ? this.allData : [];
this.allData.forEach(data => {
if (!data.disabled) {
data.checked = value;
}
});
this.refreshStatus();
}
// 表头获取
getUserColumns(param: any): void { // 获取表头
this.http.post(urls.columns, param).then( (res: any) => { // 获取表头
if (res.success) {
let dataHeader: any;
dataHeader = res.data.data;
dataHeader.map((item: any) => {
item.apiParameter = item.apiParameter && JSON.parse(item.apiParameter) || {};
});
this.columnsFilter(dataHeader, 'http');
this.dataHeader.forEach(
item => {
if (item.type === 'select' && item.visible === 'XSBJ10' ) { // 新增可见调用
item.apiParameter.optionList =item.apiParameter.optionList || [];
// 添加noSetCode 属性为true 可以关闭组件内静态数据 的获取
// tslint:disable-next-line: max-line-length 当没有设置valueSetCode时需要staticCode时添加edit条件,需要设置needStaticCode为true
if ((item.apiParameter.valueSetCode || (item.edit === 'BJBJ10' && this.needStaticCode)) && !item.noSetCode && !this.noGetStaticData) {
this.getSelectData(item.apiParameter.optionList,item.apiParameter.valueSetCode, item);
}
}
this.restoreRender(item);
}
);
}
});
}
// 表头获取数据刷选
columnsFilter(dataHeader: any[], type?: string) {
let len = 0, width = 0;
this.dataHeader = dataHeader.filter((x: any) => x.visible === 'XSBJ10');
this.dataHeader.map((item: any) => {
width = parseFloat(item.width) ? parseFloat(item.width) : 120;
len = Utils.add(len, width);
item.width = `${width}px`;
});
const checkBoxWidth = this.isCheckBox ? 50 : 0;
const gridOperateWidth = this._gridOperate.show ? parseFloat(this._gridOperate.width) : 0;
this.tableWidth = `${ Utils.add(Utils.add(len, checkBoxWidth), gridOperateWidth)}px`; // 默认加上选择框的
// 返回表头数据
if (type === 'http') { this.userColumnsEmit.emit(this.dataHeader); }
this.dataHeaderRefresh = true; // 表头是否刷新
window.setTimeout(() => {
this.dataHeaderRefresh = false;
});
}
// 改变页码
pageChange(n: number): void {
this.pageIndex = n;
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: n,
// tslint:disable-next-line:no-unused-expression
length: this.pageSize}) : null;
this.pageIndexEmit.emit(n);
}
// 改变条数
pageSizeFun(n: number): void {
this.pageIndex = 1;
// tslint:disable-next-line:no-unused-expression
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: 1, length: n}) : null;
this.pageSizeEmit.emit(n);
}
// 当前展示数据改变要做的处理
currentPageDataChange(data: any): void {
if (this.currentChange) {
return;
}
this.allChecked = false;
this.indeterminate = false;
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
if (!this.trSelectHand) { this._trSelected = []; } // 如果传trSelected属性,则刷新列表手动清除选中样式
this.currentPageDataChangeEmit.emit(data);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
}
// tab头点击
tabClick(data: any): void { // 不要用
this.tabResultFun.emit(data);
setTimeout( () => {
if (this.updateHeader) {
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId}); // 参数待定
}
}
}, 100);
}
// input失焦事件
inputBlur(data: any, dex: number){
this.inputBlurFun.emit({data: data, index: dex});
}
// input实时改变事件
onChange(val: string, data: any, n: any, h: any, ind: any){
this.modelChange.emit({val: val, data: data, input: n, header: h, index: ind});
}
// list鼠标移入事件
listOverFun(data: any, index: number){
this.listOver.emit({'data': data, 'index': index});
}
// list鼠标移出事件
listLeaveFun(data: any, index: number){
this.listLeave.emit({'data': data, 'index': index});
}
// 表格列表点击事件
tdClick(data: any, index: number, bol: boolean){
if (!bol) {
return;
}
this._trSelected = [];
this._trSelected[index] = true;
this.listClick.emit(data);
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
}
// 操作
inpEmitFun(data: any, dex: any, rowid: any, n: any) {
this.inpEmit.emit(
{ inpName: data.inpName || '', inpValue: data.inpValue || '',
selData: data.selData || {}, index: dex, rowid: rowid, eName: n.colEname}
);
}
//
filterChange(bool: Boolean, viewer: any, header: any){
if (!bool) {
viewer.scrollToIndex(0);
const name = header.colEname;
header._checked = [];
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
this.filterData(header._checked, name);
}
}
// sort 排序
sort(flag: any, index: any) {
// flag 排序类型 null descend ascend
const type = !flag ? 'descend' : flag === 'descend' ? 'ascend' : null;
index._sortType = type;
// if (!type){
// this.allData = [...this.constData];
// return;
// }
this.allData = this.allData.sort((x: any, y: any) => {
let a = x[index.colEname], b = y[index.colEname];
if (index.type === 'number') {
a = Number(a || 0);
b = Number(b || 0);
} else {
a = JSON.stringify(a || null);
b = JSON.stringify(b || null);
if (index.type === 'string') {
return (type === 'ascend') ? a.localeCompare(b, 'zh') : b.localeCompare(a, 'zh');
}
}
return (type === 'ascend') ? ( a > b ? 1 : -1) : (b > a ? 1 : -1);
} );
this.currentChange = true;
this.allData = [...this.allData];
}
// filter 关闭
filterClose(flag: string, header: any){
const name = header.colEname;
header._checked = [];
if (flag === 'confirm') {
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
} else if (flag === 'reset') {
this.headerFilter[name].forEach(x => x.checked = false);
header._filterVal = null;
}
header._filterVisible = false;
this.filterData( header._checked, name);
}
// filter 刷选
filterData(data: any[]|any, name: string){
this.filterSearch[name] = [...( Array.isArray(data) && data || [])];
if (!Array.isArray(data) || (data.length < 1) ) { // 每列重置
this.updateData.forEach((item: any) => item.checked = false); // 选中重置
this.updateData = [];
this.allChecked = false;
this.indeterminate = false;
this.filterSearch[name] = [];
this.allData = [...this.constData];
} else { // 确定
this.allData = this.constData.filter((x: any) => {
const xName = x[name] !== null && x[name] !== undefined && x[name].toString().trim() || '';
return this.filterSearch[name].indexOf(xName) !== -1;
})
}
// 对别的已选做刷选
const arr = [...this.allData];
this.allData.map((x: any, index: number) => {
Object.keys(this.filterSearch).map((y: string) => {
if (y !== name.trim()) {
const xName = x[y] !== null && x[y] !== undefined && x[y].toString().trim() || '';
if ( (this.filterSearch[y].length > 0) && (this.filterSearch[y].indexOf(xName) === -1) ) {
arr[index] = undefined;
}
}
});
});
this.allData = arr.filter((x: any) => x !== undefined);
this.currentChange = true;
this.headerFilterData(this.allData);
}
// colDrag 列拖拽
coldrag(data: any) {
let tableWidth = parseFloat(this.tableWidth);
const currentWidth = parseFloat(this.dataHeader[data.index].width);
tableWidth = tableWidth - currentWidth;
this.tableWidth = `${tableWidth + data.nex}px`;
this.dataHeader[data.index].width = `${data.nex}px`;
}
ngOnDestroy() {
if (this.gloColSub) { this.gloColSub.unsubscribe(); }
if (this.gloPageSub) { this.gloPageSub.unsubscribe(); }
if (this.gloSelectedSub) { this.gloSelectedSub.unsubscribe(); }
}
// static 静态数据获取
getSelectData(data: Array<any>, valueSetCode: string, item: any) {
if (this.staticCode) {
this.staticCode(valueSetCode, item).subscribe( x => {
Array.prototype.push.apply(data, x);
})
return;
}
this.http.post(urls.static, {valueSetCode: valueSetCode}).then(
(res: any) => {
if (res.success) {
Array.prototype.push.apply(data, res.data.data.data);
}
}
);
}
private headerFilterData(data: any) {
// filter数据,$~$
const hadFilter = Object.keys(this.filterSearch).map((x: string) => (this.filterSearch[x].length > 0) && x);
window.setTimeout(() => {
let colHad = {}, tempStr: string;
this.dataHeader.map((y: any) => {
if (!hadFilter.includes(y.colEname)) { this.headerFilter[y.colEname] = []; }
});
data.map((x: any) => {
this.dataHeader.map((y: any) => {
const yname = (x[y.colEname] === '' || x[y.colEname] == null || x[y.colEname] === undefined) ? '' : x[y.colEname];
tempStr = y.colEname + yname;
if (!colHad[tempStr] && !hadFilter.includes(y.colEname)) {
const xName = yname.toString().trim() || '';
this.headerFilter[y.colEname].push({'text': xName, 'value': xName});
colHad[tempStr] = true;
}
});
});
colHad = undefined; // 清空
});
}
private dataFilterResult() {
let u: any[] = [...this.allData];
let enterBool = false;
for (const i in this.filterSearch) {
if(this.filterSearch[i].length > 0) {
enterBool = true;
u = u.filter((m: any) => this.filterSearch[i].includes(m[i] || ''));
}
}
if (enterBool) { this.allData = [...u]; }
}
/**
* tab change事件
* @param param
*/
tabIndexChangeEvent(param: number) {
this.tabIndexChange.emit(param);
}
keyUp(data:any,tableData:any){
this.keyboardEmit.emit({keyboardData:data,tableData:tableData});
}
isNotSelected(data: any,colEname:string): boolean {
return !this.allData.some(value1 => value1[colEname] === data.value);
}
}
| conditional_block | ||
table-form.component.ts | import {Component, OnInit, Input, Output, EventEmitter, TemplateRef, OnDestroy, ViewChild, Host} from '@angular/core';
// import { trigger, transition, animate,style} from '@angular/animations';
import {NzTabChangeEvent, NzTableComponent} from 'ng-zorro-antd';
import { UserinfoService } from '@service/userinfo-service.service';
import { GlobalService } from '@service/global-service.service';
import { urls } from '@model/url';
import { HttpUtilService } from '@service/http-util.service';
import { Utils } from '@util/utils';
import { toDemical } from '@validator/validator';
import { GridRowSource } from '../simple-page/grid-block/grid-row.directive';
import {Subscription, Observable, Subject} from 'rxjs';
@Component({
selector: 'app-table-form',
templateUrl: `./table-form.component.html`,
styleUrls: ['./table-form.component.css'],
providers: [
GridRowSource,
],
/* animations:[
trigger('gridAnimate',[
transition(':enter',[
style({opacity:0,height:0,transform:'translate(30px,0)'}),
animate('0.3s ease-in',style({opacity:1,height:'auto',transform:'translate(0,0)',background:'#fffeee'}))
]),
transition(':leave',[
animate('0.3s ease-out',style({opacity:0,height:0,transform:'translate(30px,0)'}))
])
])
]*/
})
export class TableFormComponent implements OnInit,OnDestroy {
// tempfindSet: any = { "parameter": "companyName", "parameterSend": "companyId", "name": "发票抬头", "formId": "company_pop" };
// 数据弹框传入参数配置格式
@ViewChild('nzTable') nzTableComponent: NzTableComponent;
private gloPageSub: Subscription;
private gloColSub: Subscription;
private gloSelectedSub: Subscription;
private _columns: any[];
tableHeight = '500px';
tableWidth = '100%';
allChecked = false;
indeterminate = false;
updateData: Array<any> = [];
dataHeader: Array<any> = []; // 表头
dataHeaderRefresh = false; // 判断表头是否刷新
pageIndex = 1; // 当前页码
_trSelected: boolean[] = [];
_gridOperate: any = {}; // 操作{show:false,title:'操作',width:'120px',template:''};
_calTotalFiled: any = {};
_totalPage = 0;
_pageSizeOptions: number[] = [30, 100, 500, 1000, 5000, 1000000]; // 1000000 显示全部
headerFilter: any = {}; // filter数据
filterSearch: any = {}; // filter条件
allData: any[] = [];
curFormId: string;
virtualMinBuffer: number;
virtualMaxBuffer: number;
private constData: any[] = [];
private currentChange = false;
private trSelectHand = false; // 判断是否
@Input() noGetStaticData: boolean; // 不用获取静态数据 设置True
@Input() closeBgTr = true; // tr换行背景色是否显示
@Input() selfTableHeight: string | null | undefined; // 表格高度自适应,如果不需要自适应可设置该参数 自定义table高度
@Input() extraTableHeight: number; // 正常模式页面额外的高度
@Output() currentTableHeightFun = new EventEmitter<number>(); // 如果表格高度是自适应的,抛出表格高度
@Output() inpEmit = new EventEmitter<any>();
@Input() tabArr: any[] = []; // tab头数据 不要用
@Input() tabIndex = 0; // 激活的面板,默认第一个
@Output() tabIndexChange: EventEmitter<number> = new EventEmitter<number>();
@Input() updateHeader = false; // 点击tab是否更新表头
@Output() tabResultFun = new EventEmitter<any>(); // tab头点击事件
@Input() set columns(val: any[]) { // 自定义表头 [{colCname:'中文名',colEname:'英文名',visible:'XSBJ10'|'XSBJ20'}]
this._columns = val;
if (Array.isArray(this._columns)) { this.columnsFilter(this._columns); }
}
get columns() {
return this._columns;
}
@Input() checkBoxWidth: string;
@Input() listWidth: string; // list宽度
@Input() trSelectedShow = false; // 选中是否显示样式
@Input() set trSelected(val: boolean[]) { // index以作区分
if (Array.isArray(val)) {
this._trSelected = val;
this.trSelectHand = true;
}
}
get trSelected() {
return this._trSelected;
}
@Input() formId: string; // formId 优先级高
@Input() gridId: string; // 一个页面多个列表必填,传gridId以进行区分,
@Input() isCheckBox = true; // 是否显示选择框,默认显示
@Input() disabledAllCheckBox:boolean = false;//是否禁止全选,默认允许
@Input() isMutli: boolean; // 选择框是否多选
@Input() nzShowPagination = true; // 列表是否显示分页器 ,默认显示
@Input() showCount = true; // 是否显示选择条数
@Input() pageSize = 100; // 条数 默认100
@Input() paginationRef: TemplateRef<any>;
@Input() tdTemplate:TemplateRef<any>;
@Input() set pageSizeOptions(val: number[]) { // 页码自定义
this._pageSizeOptions = val;
}
get pageSizeOptions() {
return this._pageSizeOptions;
}
@Input() colSet = true; // 列表设置是否显示
@Input() isResetFilter: boolean; // 是否对filte进行重置
// 列表数据
@Input() set dataSet(val: any[]) { // 只接收外部传进的值
if (this.isResetFilter) {
this.filterSearch = {}; // filter重置 已筛选的存储
}
this.currentChange = false;
this.allData = [];
this.constData = Array.isArray(val) ? val : []; // 数量不可变
window.setTimeout(() => {
this.allData = [...this.constData];
this.dataFilterResult();
this.headerFilterData(this.allData);
},300);
}
// 数据总数
@Input() set totalPage(val: number) {
this._totalPage = val;
} // 数据总数
get totalPage() {
return this._totalPage;
}
@Input() listLoading: boolean; // list加载
@Input() pageFun: boolean; // true页码条数自写
@Input() set gridOperate(val: any) { // 表格操作自定义
if (Utils.isObject(val)) {
this._gridOperate.show = val.show;
this._gridOperate.title = val.title || '操作';
this._gridOperate.width = val.width || '120px';
this._gridOperate.template = val.template;
}
} // 操作{show:false,title:'操作',width:'120px',template:''};
@Input() caculateEnameArr: any = [
];
@Input() set refresh(val:any) | refreshStatus();
}
@Input() popData: any;
@Input() popTableData: any = [];
@Input() searchParamFiled: any; // pop弹框调接口要传的参数名
@Input() searchParamFiledNot: any; // pop弹框调接口要传的参数名不必传 {eName:ttrue},格式
@Input() tableTitle: string|TemplateRef<void>; // 表格标题
@Input() tableFooter: string|TemplateRef<void>; // 表格尾部
@Input() selectedChange = false; // 全选事件订阅
@Input() searchListFiled: any; // pop弹窗取当前数据哪个字段的值
@Input() staticCode: (data: string, item?: any) => Observable<any[]>; // 组件内静态数据自定义 [{name: null, value: null}]
@Input() needStaticCode: boolean; // 当没有apiParam时需要获取静态数据时设为true
@Output() updateDataResult = new EventEmitter<any>(); // 选中数据的结果以及点击列表选择框事件
@Output() inputBlurFun = new EventEmitter<any>(); // input Blur失焦
@Output() modelChange = new EventEmitter<any>(); // input值改变事件
@Output() listClick = new EventEmitter<any>(); // list点击事件
@Output() listOver = new EventEmitter<any>(); // list鼠标移入事件
@Output() listLeave = new EventEmitter<any>(); // list鼠标移出事件
@Output() pageIndexEmit = new EventEmitter<any>(); // 页码点击事件
@Output() pageSizeEmit = new EventEmitter<any>(); // 条数点击事件
@Output() userColumnsEmit = new EventEmitter<any>(); // 表头数据返回
@Output() currentPageDataChangeEmit = new EventEmitter<any>(); // 当前页面数据更新
@Output() selectedChangeEmit = new EventEmitter<any>(); // 勾选事件,抛出数据
@Output() keyboardEmit = new EventEmitter<any>()
constructor(private http: HttpUtilService, private info: UserinfoService, private globalSer: GlobalService,
@Host() private rowSource: GridRowSource) {
}
private restoreRender(item: any) {
if (item.type === 'template') {
const tplName = `tpl-${item.gridId}-${item.colEname}`;
item.template = this.rowSource.getRow(tplName);
if (!item.template) {
console.error(`template类型列配置错误!templateId:${tplName}`);
}
}
}
ngOnInit() {
this.virtualMinBuffer = parseInt(this.selfTableHeight || this.tableHeight, 0);
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
this.curFormId = this.formId || this.info.APPINFO.formId;
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId,gridId: this.gridId});
}
this.gloPageSub = this.globalSer.pageNumEmitter.subscribe( (x: any) => {
if (this.curFormId === x.formId) {
this.pageIndex = x.page;
}
});
this.gloColSub = this.globalSer.colChangeEmitter.subscribe(
(res: any) => this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId})
);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (this.selectedChange) {
this.gloSelectedSub = this.globalSer.tableSelectedChangeEmitter.subscribe(
res => {
if (res.gridId === this.gridId) {
this.checkAll(res.checked)
}
}
);
}
this.globalSer.tableGridIdToSearchForm.emit({'gridId': this.gridId, 'formId': this.curFormId}); // 向查询区域传递gridId;
this.globalSer.routerEvent.subscribe((x: any) => {
if (x.isSys) {
this.nzTableComponent.cdkVirtualScrollViewport.scrollToIndex(0);
}
});
this.globalSer.pageNumEmitter.emit(
{formId: this.curFormId, gridId: this.gridId, page: 1, length: this.pageSize, search: true}
); // 初始化的条数
}
tableHeightFun(data: number) { // 表格自适应抛出数据
this.tableHeight = `${data}px`;
this.currentTableHeightFun.emit(data);
this.virtualMinBuffer = data;
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
}
// 选择逻辑
refreshStatus(data?: any): void {
let currentChecked: boolean;
if (data && !this.isMutli) {
currentChecked = data.checked;
}
this.allData.map((y: any) => !this.isMutli && (y.checked = false));
if (data && !this.isMutli) {
data.checked = currentChecked; // 单选情况下设置checked;
}
this.allCheckBoxStyle();
this.updateData = this.constData.filter((x: any) => x.checked);
this.updateDataResult.emit(this.updateData); // 选中的结果数据
// 统计需要统计的字段
for (const c of this.caculateEnameArr){
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
// for (let elem of this.updateData) {
// // this._calTotalFiled[c.field] += Number(elem[c.field]);
// this._calTotalFiled[c.field] = toDemical(Utils.add(this._calTotalFiled[c.field], elem[c.field]),c.demLength);
// }
// 2019-1-14郑鑫修改, 将每次求和都四舍五入改为将最后结果四舍五入
this._calTotalFiled[c.field] = toDemical(
this.updateData.map(item => Number(item[c.field]) || 0).reduce((acc, cur) => acc + cur, 0), c.demLength
);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (data) {
this.selectedChangeEmit.emit([data]);
} else {
this.selectedChangeEmit.emit(this.allData);
}
}
private allCheckBoxStyle() {
const dataArr = this.allData.filter(value => !value.disabled);
const allChecked = dataArr[0] ? dataArr.every(value => value.checked === true) : false;
const allUnChecked = dataArr.every(value => !value.checked);
this.allChecked = allChecked;
this.indeterminate = (!allChecked) && (!allUnChecked);
}
checkAll(value: boolean): void {
this.allData = this.allData ? this.allData : [];
this.allData.forEach(data => {
if (!data.disabled) {
data.checked = value;
}
});
this.refreshStatus();
}
// 表头获取
getUserColumns(param: any): void { // 获取表头
this.http.post(urls.columns, param).then( (res: any) => { // 获取表头
if (res.success) {
let dataHeader: any;
dataHeader = res.data.data;
dataHeader.map((item: any) => {
item.apiParameter = item.apiParameter && JSON.parse(item.apiParameter) || {};
});
this.columnsFilter(dataHeader, 'http');
this.dataHeader.forEach(
item => {
if (item.type === 'select' && item.visible === 'XSBJ10' ) { // 新增可见调用
item.apiParameter.optionList =item.apiParameter.optionList || [];
// 添加noSetCode 属性为true 可以关闭组件内静态数据 的获取
// tslint:disable-next-line: max-line-length 当没有设置valueSetCode时需要staticCode时添加edit条件,需要设置needStaticCode为true
if ((item.apiParameter.valueSetCode || (item.edit === 'BJBJ10' && this.needStaticCode)) && !item.noSetCode && !this.noGetStaticData) {
this.getSelectData(item.apiParameter.optionList,item.apiParameter.valueSetCode, item);
}
}
this.restoreRender(item);
}
);
}
});
}
// 表头获取数据刷选
columnsFilter(dataHeader: any[], type?: string) {
let len = 0, width = 0;
this.dataHeader = dataHeader.filter((x: any) => x.visible === 'XSBJ10');
this.dataHeader.map((item: any) => {
width = parseFloat(item.width) ? parseFloat(item.width) : 120;
len = Utils.add(len, width);
item.width = `${width}px`;
});
const checkBoxWidth = this.isCheckBox ? 50 : 0;
const gridOperateWidth = this._gridOperate.show ? parseFloat(this._gridOperate.width) : 0;
this.tableWidth = `${ Utils.add(Utils.add(len, checkBoxWidth), gridOperateWidth)}px`; // 默认加上选择框的
// 返回表头数据
if (type === 'http') { this.userColumnsEmit.emit(this.dataHeader); }
this.dataHeaderRefresh = true; // 表头是否刷新
window.setTimeout(() => {
this.dataHeaderRefresh = false;
});
}
// 改变页码
pageChange(n: number): void {
this.pageIndex = n;
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: n,
// tslint:disable-next-line:no-unused-expression
length: this.pageSize}) : null;
this.pageIndexEmit.emit(n);
}
// 改变条数
pageSizeFun(n: number): void {
this.pageIndex = 1;
// tslint:disable-next-line:no-unused-expression
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: 1, length: n}) : null;
this.pageSizeEmit.emit(n);
}
// 当前展示数据改变要做的处理
currentPageDataChange(data: any): void {
if (this.currentChange) {
return;
}
this.allChecked = false;
this.indeterminate = false;
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
if (!this.trSelectHand) { this._trSelected = []; } // 如果传trSelected属性,则刷新列表手动清除选中样式
this.currentPageDataChangeEmit.emit(data);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
}
// tab头点击
tabClick(data: any): void { // 不要用
this.tabResultFun.emit(data);
setTimeout( () => {
if (this.updateHeader) {
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId}); // 参数待定
}
}
}, 100);
}
// input失焦事件
inputBlur(data: any, dex: number){
this.inputBlurFun.emit({data: data, index: dex});
}
// input实时改变事件
onChange(val: string, data: any, n: any, h: any, ind: any){
this.modelChange.emit({val: val, data: data, input: n, header: h, index: ind});
}
// list鼠标移入事件
listOverFun(data: any, index: number){
this.listOver.emit({'data': data, 'index': index});
}
// list鼠标移出事件
listLeaveFun(data: any, index: number){
this.listLeave.emit({'data': data, 'index': index});
}
// 表格列表点击事件
tdClick(data: any, index: number, bol: boolean){
if (!bol) {
return;
}
this._trSelected = [];
this._trSelected[index] = true;
this.listClick.emit(data);
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
}
// 操作
inpEmitFun(data: any, dex: any, rowid: any, n: any) {
this.inpEmit.emit(
{ inpName: data.inpName || '', inpValue: data.inpValue || '',
selData: data.selData || {}, index: dex, rowid: rowid, eName: n.colEname}
);
}
//
filterChange(bool: Boolean, viewer: any, header: any){
if (!bool) {
viewer.scrollToIndex(0);
const name = header.colEname;
header._checked = [];
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
this.filterData(header._checked, name);
}
}
// sort 排序
sort(flag: any, index: any) {
// flag 排序类型 null descend ascend
const type = !flag ? 'descend' : flag === 'descend' ? 'ascend' : null;
index._sortType = type;
// if (!type){
// this.allData = [...this.constData];
// return;
// }
this.allData = this.allData.sort((x: any, y: any) => {
let a = x[index.colEname], b = y[index.colEname];
if (index.type === 'number') {
a = Number(a || 0);
b = Number(b || 0);
} else {
a = JSON.stringify(a || null);
b = JSON.stringify(b || null);
if (index.type === 'string') {
return (type === 'ascend') ? a.localeCompare(b, 'zh') : b.localeCompare(a, 'zh');
}
}
return (type === 'ascend') ? ( a > b ? 1 : -1) : (b > a ? 1 : -1);
} );
this.currentChange = true;
this.allData = [...this.allData];
}
// filter 关闭
filterClose(flag: string, header: any){
const name = header.colEname;
header._checked = [];
if (flag === 'confirm') {
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
} else if (flag === 'reset') {
this.headerFilter[name].forEach(x => x.checked = false);
header._filterVal = null;
}
header._filterVisible = false;
this.filterData( header._checked, name);
}
// filter 刷选
filterData(data: any[]|any, name: string){
this.filterSearch[name] = [...( Array.isArray(data) && data || [])];
if (!Array.isArray(data) || (data.length < 1) ) { // 每列重置
this.updateData.forEach((item: any) => item.checked = false); // 选中重置
this.updateData = [];
this.allChecked = false;
this.indeterminate = false;
this.filterSearch[name] = [];
this.allData = [...this.constData];
} else { // 确定
this.allData = this.constData.filter((x: any) => {
const xName = x[name] !== null && x[name] !== undefined && x[name].toString().trim() || '';
return this.filterSearch[name].indexOf(xName) !== -1;
})
}
// 对别的已选做刷选
const arr = [...this.allData];
this.allData.map((x: any, index: number) => {
Object.keys(this.filterSearch).map((y: string) => {
if (y !== name.trim()) {
const xName = x[y] !== null && x[y] !== undefined && x[y].toString().trim() || '';
if ( (this.filterSearch[y].length > 0) && (this.filterSearch[y].indexOf(xName) === -1) ) {
arr[index] = undefined;
}
}
});
});
this.allData = arr.filter((x: any) => x !== undefined);
this.currentChange = true;
this.headerFilterData(this.allData);
}
// colDrag 列拖拽
coldrag(data: any) {
let tableWidth = parseFloat(this.tableWidth);
const currentWidth = parseFloat(this.dataHeader[data.index].width);
tableWidth = tableWidth - currentWidth;
this.tableWidth = `${tableWidth + data.nex}px`;
this.dataHeader[data.index].width = `${data.nex}px`;
}
ngOnDestroy() {
if (this.gloColSub) { this.gloColSub.unsubscribe(); }
if (this.gloPageSub) { this.gloPageSub.unsubscribe(); }
if (this.gloSelectedSub) { this.gloSelectedSub.unsubscribe(); }
}
// static 静态数据获取
getSelectData(data: Array<any>, valueSetCode: string, item: any) {
if (this.staticCode) {
this.staticCode(valueSetCode, item).subscribe( x => {
Array.prototype.push.apply(data, x);
})
return;
}
this.http.post(urls.static, {valueSetCode: valueSetCode}).then(
(res: any) => {
if (res.success) {
Array.prototype.push.apply(data, res.data.data.data);
}
}
);
}
private headerFilterData(data: any) {
// filter数据,$~$
const hadFilter = Object.keys(this.filterSearch).map((x: string) => (this.filterSearch[x].length > 0) && x);
window.setTimeout(() => {
let colHad = {}, tempStr: string;
this.dataHeader.map((y: any) => {
if (!hadFilter.includes(y.colEname)) { this.headerFilter[y.colEname] = []; }
});
data.map((x: any) => {
this.dataHeader.map((y: any) => {
const yname = (x[y.colEname] === '' || x[y.colEname] == null || x[y.colEname] === undefined) ? '' : x[y.colEname];
tempStr = y.colEname + yname;
if (!colHad[tempStr] && !hadFilter.includes(y.colEname)) {
const xName = yname.toString().trim() || '';
this.headerFilter[y.colEname].push({'text': xName, 'value': xName});
colHad[tempStr] = true;
}
});
});
colHad = undefined; // 清空
});
}
private dataFilterResult() {
let u: any[] = [...this.allData];
let enterBool = false;
for (const i in this.filterSearch) {
if(this.filterSearch[i].length > 0) {
enterBool = true;
u = u.filter((m: any) => this.filterSearch[i].includes(m[i] || ''));
}
}
if (enterBool) { this.allData = [...u]; }
}
/**
* tab change事件
* @param param
*/
tabIndexChangeEvent(param: number) {
this.tabIndexChange.emit(param);
}
keyUp(data:any,tableData:any){
this.keyboardEmit.emit({keyboardData:data,tableData:tableData});
}
isNotSelected(data: any,colEname:string): boolean {
return !this.allData.some(value1 => value1[colEname] === data.value);
}
}
| { //用于初始化表格中存在已选数据,选中条数的变化,想触发必须更改成不同值
this. | identifier_body |
table-form.component.ts | import {Component, OnInit, Input, Output, EventEmitter, TemplateRef, OnDestroy, ViewChild, Host} from '@angular/core';
// import { trigger, transition, animate,style} from '@angular/animations';
import {NzTabChangeEvent, NzTableComponent} from 'ng-zorro-antd';
import { UserinfoService } from '@service/userinfo-service.service';
import { GlobalService } from '@service/global-service.service';
import { urls } from '@model/url';
import { HttpUtilService } from '@service/http-util.service';
import { Utils } from '@util/utils';
import { toDemical } from '@validator/validator';
import { GridRowSource } from '../simple-page/grid-block/grid-row.directive';
import {Subscription, Observable, Subject} from 'rxjs';
@Component({
selector: 'app-table-form',
templateUrl: `./table-form.component.html`,
styleUrls: ['./table-form.component.css'],
providers: [
GridRowSource,
],
/* animations:[
trigger('gridAnimate',[
transition(':enter',[ | ])
])
]*/
})
export class TableFormComponent implements OnInit,OnDestroy {
// tempfindSet: any = { "parameter": "companyName", "parameterSend": "companyId", "name": "发票抬头", "formId": "company_pop" };
// 数据弹框传入参数配置格式
@ViewChild('nzTable') nzTableComponent: NzTableComponent;
private gloPageSub: Subscription;
private gloColSub: Subscription;
private gloSelectedSub: Subscription;
private _columns: any[];
tableHeight = '500px';
tableWidth = '100%';
allChecked = false;
indeterminate = false;
updateData: Array<any> = [];
dataHeader: Array<any> = []; // 表头
dataHeaderRefresh = false; // 判断表头是否刷新
pageIndex = 1; // 当前页码
_trSelected: boolean[] = [];
_gridOperate: any = {}; // 操作{show:false,title:'操作',width:'120px',template:''};
_calTotalFiled: any = {};
_totalPage = 0;
_pageSizeOptions: number[] = [30, 100, 500, 1000, 5000, 1000000]; // 1000000 显示全部
headerFilter: any = {}; // filter数据
filterSearch: any = {}; // filter条件
allData: any[] = [];
curFormId: string;
virtualMinBuffer: number;
virtualMaxBuffer: number;
private constData: any[] = [];
private currentChange = false;
private trSelectHand = false; // 判断是否
@Input() noGetStaticData: boolean; // 不用获取静态数据 设置True
@Input() closeBgTr = true; // tr换行背景色是否显示
@Input() selfTableHeight: string | null | undefined; // 表格高度自适应,如果不需要自适应可设置该参数 自定义table高度
@Input() extraTableHeight: number; // 正常模式页面额外的高度
@Output() currentTableHeightFun = new EventEmitter<number>(); // 如果表格高度是自适应的,抛出表格高度
@Output() inpEmit = new EventEmitter<any>();
@Input() tabArr: any[] = []; // tab头数据 不要用
@Input() tabIndex = 0; // 激活的面板,默认第一个
@Output() tabIndexChange: EventEmitter<number> = new EventEmitter<number>();
@Input() updateHeader = false; // 点击tab是否更新表头
@Output() tabResultFun = new EventEmitter<any>(); // tab头点击事件
@Input() set columns(val: any[]) { // 自定义表头 [{colCname:'中文名',colEname:'英文名',visible:'XSBJ10'|'XSBJ20'}]
this._columns = val;
if (Array.isArray(this._columns)) { this.columnsFilter(this._columns); }
}
get columns() {
return this._columns;
}
@Input() checkBoxWidth: string;
@Input() listWidth: string; // list宽度
@Input() trSelectedShow = false; // 选中是否显示样式
@Input() set trSelected(val: boolean[]) { // index以作区分
if (Array.isArray(val)) {
this._trSelected = val;
this.trSelectHand = true;
}
}
get trSelected() {
return this._trSelected;
}
@Input() formId: string; // formId 优先级高
@Input() gridId: string; // 一个页面多个列表必填,传gridId以进行区分,
@Input() isCheckBox = true; // 是否显示选择框,默认显示
@Input() disabledAllCheckBox:boolean = false;//是否禁止全选,默认允许
@Input() isMutli: boolean; // 选择框是否多选
@Input() nzShowPagination = true; // 列表是否显示分页器 ,默认显示
@Input() showCount = true; // 是否显示选择条数
@Input() pageSize = 100; // 条数 默认100
@Input() paginationRef: TemplateRef<any>;
@Input() tdTemplate:TemplateRef<any>;
@Input() set pageSizeOptions(val: number[]) { // 页码自定义
this._pageSizeOptions = val;
}
get pageSizeOptions() {
return this._pageSizeOptions;
}
@Input() colSet = true; // 列表设置是否显示
@Input() isResetFilter: boolean; // 是否对filte进行重置
// 列表数据
@Input() set dataSet(val: any[]) { // 只接收外部传进的值
if (this.isResetFilter) {
this.filterSearch = {}; // filter重置 已筛选的存储
}
this.currentChange = false;
this.allData = [];
this.constData = Array.isArray(val) ? val : []; // 数量不可变
window.setTimeout(() => {
this.allData = [...this.constData];
this.dataFilterResult();
this.headerFilterData(this.allData);
},300);
}
// 数据总数
@Input() set totalPage(val: number) {
this._totalPage = val;
} // 数据总数
get totalPage() {
return this._totalPage;
}
@Input() listLoading: boolean; // list加载
@Input() pageFun: boolean; // true页码条数自写
@Input() set gridOperate(val: any) { // 表格操作自定义
if (Utils.isObject(val)) {
this._gridOperate.show = val.show;
this._gridOperate.title = val.title || '操作';
this._gridOperate.width = val.width || '120px';
this._gridOperate.template = val.template;
}
} // 操作{show:false,title:'操作',width:'120px',template:''};
@Input() caculateEnameArr: any = [
];
@Input() set refresh(val:any){ //用于初始化表格中存在已选数据,选中条数的变化,想触发必须更改成不同值
this.refreshStatus();
}
@Input() popData: any;
@Input() popTableData: any = [];
@Input() searchParamFiled: any; // pop弹框调接口要传的参数名
@Input() searchParamFiledNot: any; // pop弹框调接口要传的参数名不必传 {eName:ttrue},格式
@Input() tableTitle: string|TemplateRef<void>; // 表格标题
@Input() tableFooter: string|TemplateRef<void>; // 表格尾部
@Input() selectedChange = false; // 全选事件订阅
@Input() searchListFiled: any; // pop弹窗取当前数据哪个字段的值
@Input() staticCode: (data: string, item?: any) => Observable<any[]>; // 组件内静态数据自定义 [{name: null, value: null}]
@Input() needStaticCode: boolean; // 当没有apiParam时需要获取静态数据时设为true
@Output() updateDataResult = new EventEmitter<any>(); // 选中数据的结果以及点击列表选择框事件
@Output() inputBlurFun = new EventEmitter<any>(); // input Blur失焦
@Output() modelChange = new EventEmitter<any>(); // input值改变事件
@Output() listClick = new EventEmitter<any>(); // list点击事件
@Output() listOver = new EventEmitter<any>(); // list鼠标移入事件
@Output() listLeave = new EventEmitter<any>(); // list鼠标移出事件
@Output() pageIndexEmit = new EventEmitter<any>(); // 页码点击事件
@Output() pageSizeEmit = new EventEmitter<any>(); // 条数点击事件
@Output() userColumnsEmit = new EventEmitter<any>(); // 表头数据返回
@Output() currentPageDataChangeEmit = new EventEmitter<any>(); // 当前页面数据更新
@Output() selectedChangeEmit = new EventEmitter<any>(); // 勾选事件,抛出数据
@Output() keyboardEmit = new EventEmitter<any>()
constructor(private http: HttpUtilService, private info: UserinfoService, private globalSer: GlobalService,
@Host() private rowSource: GridRowSource) {
}
private restoreRender(item: any) {
if (item.type === 'template') {
const tplName = `tpl-${item.gridId}-${item.colEname}`;
item.template = this.rowSource.getRow(tplName);
if (!item.template) {
console.error(`template类型列配置错误!templateId:${tplName}`);
}
}
}
ngOnInit() {
this.virtualMinBuffer = parseInt(this.selfTableHeight || this.tableHeight, 0);
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
this.curFormId = this.formId || this.info.APPINFO.formId;
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId,gridId: this.gridId});
}
this.gloPageSub = this.globalSer.pageNumEmitter.subscribe( (x: any) => {
if (this.curFormId === x.formId) {
this.pageIndex = x.page;
}
});
this.gloColSub = this.globalSer.colChangeEmitter.subscribe(
(res: any) => this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId})
);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (this.selectedChange) {
this.gloSelectedSub = this.globalSer.tableSelectedChangeEmitter.subscribe(
res => {
if (res.gridId === this.gridId) {
this.checkAll(res.checked)
}
}
);
}
this.globalSer.tableGridIdToSearchForm.emit({'gridId': this.gridId, 'formId': this.curFormId}); // 向查询区域传递gridId;
this.globalSer.routerEvent.subscribe((x: any) => {
if (x.isSys) {
this.nzTableComponent.cdkVirtualScrollViewport.scrollToIndex(0);
}
});
this.globalSer.pageNumEmitter.emit(
{formId: this.curFormId, gridId: this.gridId, page: 1, length: this.pageSize, search: true}
); // 初始化的条数
}
tableHeightFun(data: number) { // 表格自适应抛出数据
this.tableHeight = `${data}px`;
this.currentTableHeightFun.emit(data);
this.virtualMinBuffer = data;
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
}
// 选择逻辑
refreshStatus(data?: any): void {
let currentChecked: boolean;
if (data && !this.isMutli) {
currentChecked = data.checked;
}
this.allData.map((y: any) => !this.isMutli && (y.checked = false));
if (data && !this.isMutli) {
data.checked = currentChecked; // 单选情况下设置checked;
}
this.allCheckBoxStyle();
this.updateData = this.constData.filter((x: any) => x.checked);
this.updateDataResult.emit(this.updateData); // 选中的结果数据
// 统计需要统计的字段
for (const c of this.caculateEnameArr){
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
// for (let elem of this.updateData) {
// // this._calTotalFiled[c.field] += Number(elem[c.field]);
// this._calTotalFiled[c.field] = toDemical(Utils.add(this._calTotalFiled[c.field], elem[c.field]),c.demLength);
// }
// 2019-1-14郑鑫修改, 将每次求和都四舍五入改为将最后结果四舍五入
this._calTotalFiled[c.field] = toDemical(
this.updateData.map(item => Number(item[c.field]) || 0).reduce((acc, cur) => acc + cur, 0), c.demLength
);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (data) {
this.selectedChangeEmit.emit([data]);
} else {
this.selectedChangeEmit.emit(this.allData);
}
}
private allCheckBoxStyle() {
const dataArr = this.allData.filter(value => !value.disabled);
const allChecked = dataArr[0] ? dataArr.every(value => value.checked === true) : false;
const allUnChecked = dataArr.every(value => !value.checked);
this.allChecked = allChecked;
this.indeterminate = (!allChecked) && (!allUnChecked);
}
checkAll(value: boolean): void {
this.allData = this.allData ? this.allData : [];
this.allData.forEach(data => {
if (!data.disabled) {
data.checked = value;
}
});
this.refreshStatus();
}
// 表头获取
getUserColumns(param: any): void { // 获取表头
this.http.post(urls.columns, param).then( (res: any) => { // 获取表头
if (res.success) {
let dataHeader: any;
dataHeader = res.data.data;
dataHeader.map((item: any) => {
item.apiParameter = item.apiParameter && JSON.parse(item.apiParameter) || {};
});
this.columnsFilter(dataHeader, 'http');
this.dataHeader.forEach(
item => {
if (item.type === 'select' && item.visible === 'XSBJ10' ) { // 新增可见调用
item.apiParameter.optionList =item.apiParameter.optionList || [];
// 添加noSetCode 属性为true 可以关闭组件内静态数据 的获取
// tslint:disable-next-line: max-line-length 当没有设置valueSetCode时需要staticCode时添加edit条件,需要设置needStaticCode为true
if ((item.apiParameter.valueSetCode || (item.edit === 'BJBJ10' && this.needStaticCode)) && !item.noSetCode && !this.noGetStaticData) {
this.getSelectData(item.apiParameter.optionList,item.apiParameter.valueSetCode, item);
}
}
this.restoreRender(item);
}
);
}
});
}
// 表头获取数据刷选
columnsFilter(dataHeader: any[], type?: string) {
let len = 0, width = 0;
this.dataHeader = dataHeader.filter((x: any) => x.visible === 'XSBJ10');
this.dataHeader.map((item: any) => {
width = parseFloat(item.width) ? parseFloat(item.width) : 120;
len = Utils.add(len, width);
item.width = `${width}px`;
});
const checkBoxWidth = this.isCheckBox ? 50 : 0;
const gridOperateWidth = this._gridOperate.show ? parseFloat(this._gridOperate.width) : 0;
this.tableWidth = `${ Utils.add(Utils.add(len, checkBoxWidth), gridOperateWidth)}px`; // 默认加上选择框的
// 返回表头数据
if (type === 'http') { this.userColumnsEmit.emit(this.dataHeader); }
this.dataHeaderRefresh = true; // 表头是否刷新
window.setTimeout(() => {
this.dataHeaderRefresh = false;
});
}
// 改变页码
pageChange(n: number): void {
this.pageIndex = n;
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: n,
// tslint:disable-next-line:no-unused-expression
length: this.pageSize}) : null;
this.pageIndexEmit.emit(n);
}
// 改变条数
pageSizeFun(n: number): void {
this.pageIndex = 1;
// tslint:disable-next-line:no-unused-expression
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: 1, length: n}) : null;
this.pageSizeEmit.emit(n);
}
// 当前展示数据改变要做的处理
currentPageDataChange(data: any): void {
if (this.currentChange) {
return;
}
this.allChecked = false;
this.indeterminate = false;
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
if (!this.trSelectHand) { this._trSelected = []; } // 如果传trSelected属性,则刷新列表手动清除选中样式
this.currentPageDataChangeEmit.emit(data);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
}
// tab头点击
tabClick(data: any): void { // 不要用
this.tabResultFun.emit(data);
setTimeout( () => {
if (this.updateHeader) {
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId}); // 参数待定
}
}
}, 100);
}
// input失焦事件
inputBlur(data: any, dex: number){
this.inputBlurFun.emit({data: data, index: dex});
}
// input实时改变事件
onChange(val: string, data: any, n: any, h: any, ind: any){
this.modelChange.emit({val: val, data: data, input: n, header: h, index: ind});
}
// list鼠标移入事件
listOverFun(data: any, index: number){
this.listOver.emit({'data': data, 'index': index});
}
// list鼠标移出事件
listLeaveFun(data: any, index: number){
this.listLeave.emit({'data': data, 'index': index});
}
// 表格列表点击事件
tdClick(data: any, index: number, bol: boolean){
if (!bol) {
return;
}
this._trSelected = [];
this._trSelected[index] = true;
this.listClick.emit(data);
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
}
// 操作
inpEmitFun(data: any, dex: any, rowid: any, n: any) {
this.inpEmit.emit(
{ inpName: data.inpName || '', inpValue: data.inpValue || '',
selData: data.selData || {}, index: dex, rowid: rowid, eName: n.colEname}
);
}
//
filterChange(bool: Boolean, viewer: any, header: any){
if (!bool) {
viewer.scrollToIndex(0);
const name = header.colEname;
header._checked = [];
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
this.filterData(header._checked, name);
}
}
// sort 排序
sort(flag: any, index: any) {
// flag 排序类型 null descend ascend
const type = !flag ? 'descend' : flag === 'descend' ? 'ascend' : null;
index._sortType = type;
// if (!type){
// this.allData = [...this.constData];
// return;
// }
this.allData = this.allData.sort((x: any, y: any) => {
let a = x[index.colEname], b = y[index.colEname];
if (index.type === 'number') {
a = Number(a || 0);
b = Number(b || 0);
} else {
a = JSON.stringify(a || null);
b = JSON.stringify(b || null);
if (index.type === 'string') {
return (type === 'ascend') ? a.localeCompare(b, 'zh') : b.localeCompare(a, 'zh');
}
}
return (type === 'ascend') ? ( a > b ? 1 : -1) : (b > a ? 1 : -1);
} );
this.currentChange = true;
this.allData = [...this.allData];
}
// filter 关闭
filterClose(flag: string, header: any){
const name = header.colEname;
header._checked = [];
if (flag === 'confirm') {
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
} else if (flag === 'reset') {
this.headerFilter[name].forEach(x => x.checked = false);
header._filterVal = null;
}
header._filterVisible = false;
this.filterData( header._checked, name);
}
// filter 刷选
filterData(data: any[]|any, name: string){
this.filterSearch[name] = [...( Array.isArray(data) && data || [])];
if (!Array.isArray(data) || (data.length < 1) ) { // 每列重置
this.updateData.forEach((item: any) => item.checked = false); // 选中重置
this.updateData = [];
this.allChecked = false;
this.indeterminate = false;
this.filterSearch[name] = [];
this.allData = [...this.constData];
} else { // 确定
this.allData = this.constData.filter((x: any) => {
const xName = x[name] !== null && x[name] !== undefined && x[name].toString().trim() || '';
return this.filterSearch[name].indexOf(xName) !== -1;
})
}
// 对别的已选做刷选
const arr = [...this.allData];
this.allData.map((x: any, index: number) => {
Object.keys(this.filterSearch).map((y: string) => {
if (y !== name.trim()) {
const xName = x[y] !== null && x[y] !== undefined && x[y].toString().trim() || '';
if ( (this.filterSearch[y].length > 0) && (this.filterSearch[y].indexOf(xName) === -1) ) {
arr[index] = undefined;
}
}
});
});
this.allData = arr.filter((x: any) => x !== undefined);
this.currentChange = true;
this.headerFilterData(this.allData);
}
// colDrag 列拖拽
coldrag(data: any) {
let tableWidth = parseFloat(this.tableWidth);
const currentWidth = parseFloat(this.dataHeader[data.index].width);
tableWidth = tableWidth - currentWidth;
this.tableWidth = `${tableWidth + data.nex}px`;
this.dataHeader[data.index].width = `${data.nex}px`;
}
ngOnDestroy() {
if (this.gloColSub) { this.gloColSub.unsubscribe(); }
if (this.gloPageSub) { this.gloPageSub.unsubscribe(); }
if (this.gloSelectedSub) { this.gloSelectedSub.unsubscribe(); }
}
// static 静态数据获取
getSelectData(data: Array<any>, valueSetCode: string, item: any) {
if (this.staticCode) {
this.staticCode(valueSetCode, item).subscribe( x => {
Array.prototype.push.apply(data, x);
})
return;
}
this.http.post(urls.static, {valueSetCode: valueSetCode}).then(
(res: any) => {
if (res.success) {
Array.prototype.push.apply(data, res.data.data.data);
}
}
);
}
private headerFilterData(data: any) {
// filter数据,$~$
const hadFilter = Object.keys(this.filterSearch).map((x: string) => (this.filterSearch[x].length > 0) && x);
window.setTimeout(() => {
let colHad = {}, tempStr: string;
this.dataHeader.map((y: any) => {
if (!hadFilter.includes(y.colEname)) { this.headerFilter[y.colEname] = []; }
});
data.map((x: any) => {
this.dataHeader.map((y: any) => {
const yname = (x[y.colEname] === '' || x[y.colEname] == null || x[y.colEname] === undefined) ? '' : x[y.colEname];
tempStr = y.colEname + yname;
if (!colHad[tempStr] && !hadFilter.includes(y.colEname)) {
const xName = yname.toString().trim() || '';
this.headerFilter[y.colEname].push({'text': xName, 'value': xName});
colHad[tempStr] = true;
}
});
});
colHad = undefined; // 清空
});
}
private dataFilterResult() {
let u: any[] = [...this.allData];
let enterBool = false;
for (const i in this.filterSearch) {
if(this.filterSearch[i].length > 0) {
enterBool = true;
u = u.filter((m: any) => this.filterSearch[i].includes(m[i] || ''));
}
}
if (enterBool) { this.allData = [...u]; }
}
/**
* tab change事件
* @param param
*/
tabIndexChangeEvent(param: number) {
this.tabIndexChange.emit(param);
}
keyUp(data:any,tableData:any){
this.keyboardEmit.emit({keyboardData:data,tableData:tableData});
}
isNotSelected(data: any,colEname:string): boolean {
return !this.allData.some(value1 => value1[colEname] === data.value);
}
} | style({opacity:0,height:0,transform:'translate(30px,0)'}),
animate('0.3s ease-in',style({opacity:1,height:'auto',transform:'translate(0,0)',background:'#fffeee'}))
]),
transition(':leave',[
animate('0.3s ease-out',style({opacity:0,height:0,transform:'translate(30px,0)'})) | random_line_split |
table-form.component.ts | import {Component, OnInit, Input, Output, EventEmitter, TemplateRef, OnDestroy, ViewChild, Host} from '@angular/core';
// import { trigger, transition, animate,style} from '@angular/animations';
import {NzTabChangeEvent, NzTableComponent} from 'ng-zorro-antd';
import { UserinfoService } from '@service/userinfo-service.service';
import { GlobalService } from '@service/global-service.service';
import { urls } from '@model/url';
import { HttpUtilService } from '@service/http-util.service';
import { Utils } from '@util/utils';
import { toDemical } from '@validator/validator';
import { GridRowSource } from '../simple-page/grid-block/grid-row.directive';
import {Subscription, Observable, Subject} from 'rxjs';
@Component({
selector: 'app-table-form',
templateUrl: `./table-form.component.html`,
styleUrls: ['./table-form.component.css'],
providers: [
GridRowSource,
],
/* animations:[
trigger('gridAnimate',[
transition(':enter',[
style({opacity:0,height:0,transform:'translate(30px,0)'}),
animate('0.3s ease-in',style({opacity:1,height:'auto',transform:'translate(0,0)',background:'#fffeee'}))
]),
transition(':leave',[
animate('0.3s ease-out',style({opacity:0,height:0,transform:'translate(30px,0)'}))
])
])
]*/
})
export class TableFormComponent implements OnInit,OnDestroy {
// tempfindSet: any = { "parameter": "companyName", "parameterSend": "companyId", "name": "发票抬头", "formId": "company_pop" };
// 数据弹框传入参数配置格式
@ViewChild('nzTable') nzTableComponent: NzTableComponent;
private gloPageSub: Subscription;
private gloColSub: Subscription;
private gloSelectedSub: Subscription;
private _columns: any[];
tableHeight = '500px';
tableWidth = '100%';
allChecked = false;
indeterminate = false;
updateData: Array<any> = [];
dataHeader: Array<any> = []; // 表头
dataHeaderRefresh = false; // 判断表头是否刷新
pageIndex = 1; // 当前页码
_trSelected: boolean[] = [];
_gridOperate: any = {}; // 操作{show:false,title:'操作',width:'120px',template:''};
_calTotalFiled: any = {};
_totalPage = 0;
_pageSizeOptions: number[] = [30, 100, 500, 1000, 5000, 1000000]; // 1000000 显示全部
headerFilter: any = {}; // filter数据
filterSearch: any = {}; // filter条件
allData: any[] = [];
curFormId: string;
virtualMinBuffer: number;
virtualMaxBuffer: number;
private constData: any[] = [];
private currentChange = false;
private trSelectHand = false; // 判断是否
@Input() noGetStaticData: boolean; // 不用获取静态数据 设置True
@Input() closeBgTr = true; // tr换行背景色是否显示
@Input() selfTableHeight: string | null | undefined; // 表格高度自适应,如果不需要自适应可设置该参数 自定义table高度
@Input() extraTableHeight: number; // 正常模式页面额外的高度
@Output() currentTableHeightFun = new EventEmitter<number>(); // 如果表格高度是自适应的,抛出表格高度
@Output() inpEmit = new EventEmitter<any>();
@Input() tabArr: any[] = []; // tab头数据 不要用
@Input() tabIndex = 0; // 激活的面板,默认第一个
@Output() tabIndexChange: EventEmitter<number> = new EventEmitter<number>();
@Input() updateHeader = false; // 点击tab是否更新表头
@Output() tabResultFun = new EventEmitter<any>(); // tab头点击事件
@Input() set columns(val: any[]) { // 自定义表头 [{colCname:'中文名',colEname:'英文名',visible:'XSBJ10'|'XSBJ20'}]
this._columns = val;
if (Array.isArray(this._columns)) { this.columnsFilter(this._columns); }
}
get columns() {
return this._columns;
}
@Input() checkBoxWidth: string;
@Input() listWidth: string; // list宽度
@Input() trSelectedShow = false; // 选中是否显示样式
@Input() set trSelected(val: boolean[]) { // index以作区分
if (Array.isArray(val)) {
this._trSelected = val;
this.trSelectHand = true;
}
}
get trSelected() {
return this._trSelected;
}
@Input() formId: string; // formId 优先级高
@Input() gridId: string; // 一个页面多个列表必填,传gridId以进行区分,
@Input() isCheckBox = true; // 是否显示选择框,默认显示
@Input() disabledAllCheckBox:boolean = false;//是否禁止全选,默认允许
@Input() isMutli: boolean; // 选择框是否多选
@Input() nzShowPagination = true; // 列表是否显示分页器 ,默认显示
@Input() showCount = true; // 是否显示选择条数
@Input() pageSize = 100; // 条数 默认100
@Input() paginationRef: TemplateRef<any>;
@Input() tdTemplate:TemplateRef<any>;
@Input() set pageSizeOptions(val: number[]) { // 页码自定义
this._pageSizeOptions = val;
}
get pageSizeOptions() {
return this._pageSizeOptions;
}
@Input() colSet = true; // 列表设置是否显示
@Input() isResetFilter: boolean; // 是否对filte进行重置
// 列表数据
@Input() set dataSet(val: any[]) { // 只接收外部传进的值
if (this.isResetFilter) {
this.filterSearch = {}; // filter重置 已筛选的存储
}
this.currentChange = false;
this.allData = [];
this.constData = Array.isArray(val) ? val : []; // 数量不可变
window.setTimeout(() => {
this.allData = [...this.constData];
this.dataFilterResult();
this.headerFilterData(this.allData);
},300);
}
// 数据总数
@Input() set totalPage(val: number) {
this._totalPage = val;
} // 数据总数
get totalPage() {
return this._totalPage;
}
@Input() listLoading: boolean; // list加载
@Input() pageFun: boolean; // true页码条数自写
@Input() set gridOperate(val: any) { // 表格操作自定义
if (Utils.isObject(val)) {
this._gridOperate.show = val.show;
this._gridOperate.title = val.title || '操作';
this._gridOperate.width = val.width || '120px';
this._gridOperate.template = val.template;
}
} // 操作{show:false,title:'操作',width:'120px',template:''};
@Input() caculateEnameArr: any = [
];
@Input() set refresh(val:any){ //用于初始化表格中存在已选数据,选中条数的变化,想触发必须更改成不同值
this.refresh |
}
@Input() popData: any;
@Input() popTableData: any = [];
@Input() searchParamFiled: any; // pop弹框调接口要传的参数名
@Input() searchParamFiledNot: any; // pop弹框调接口要传的参数名不必传 {eName:ttrue},格式
@Input() tableTitle: string|TemplateRef<void>; // 表格标题
@Input() tableFooter: string|TemplateRef<void>; // 表格尾部
@Input() selectedChange = false; // 全选事件订阅
@Input() searchListFiled: any; // pop弹窗取当前数据哪个字段的值
@Input() staticCode: (data: string, item?: any) => Observable<any[]>; // 组件内静态数据自定义 [{name: null, value: null}]
@Input() needStaticCode: boolean; // 当没有apiParam时需要获取静态数据时设为true
@Output() updateDataResult = new EventEmitter<any>(); // 选中数据的结果以及点击列表选择框事件
@Output() inputBlurFun = new EventEmitter<any>(); // input Blur失焦
@Output() modelChange = new EventEmitter<any>(); // input值改变事件
@Output() listClick = new EventEmitter<any>(); // list点击事件
@Output() listOver = new EventEmitter<any>(); // list鼠标移入事件
@Output() listLeave = new EventEmitter<any>(); // list鼠标移出事件
@Output() pageIndexEmit = new EventEmitter<any>(); // 页码点击事件
@Output() pageSizeEmit = new EventEmitter<any>(); // 条数点击事件
@Output() userColumnsEmit = new EventEmitter<any>(); // 表头数据返回
@Output() currentPageDataChangeEmit = new EventEmitter<any>(); // 当前页面数据更新
@Output() selectedChangeEmit = new EventEmitter<any>(); // 勾选事件,抛出数据
@Output() keyboardEmit = new EventEmitter<any>()
constructor(private http: HttpUtilService, private info: UserinfoService, private globalSer: GlobalService,
@Host() private rowSource: GridRowSource) {
}
private restoreRender(item: any) {
if (item.type === 'template') {
const tplName = `tpl-${item.gridId}-${item.colEname}`;
item.template = this.rowSource.getRow(tplName);
if (!item.template) {
console.error(`template类型列配置错误!templateId:${tplName}`);
}
}
}
ngOnInit() {
this.virtualMinBuffer = parseInt(this.selfTableHeight || this.tableHeight, 0);
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
this.curFormId = this.formId || this.info.APPINFO.formId;
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId,gridId: this.gridId});
}
this.gloPageSub = this.globalSer.pageNumEmitter.subscribe( (x: any) => {
if (this.curFormId === x.formId) {
this.pageIndex = x.page;
}
});
this.gloColSub = this.globalSer.colChangeEmitter.subscribe(
(res: any) => this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId})
);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (this.selectedChange) {
this.gloSelectedSub = this.globalSer.tableSelectedChangeEmitter.subscribe(
res => {
if (res.gridId === this.gridId) {
this.checkAll(res.checked)
}
}
);
}
this.globalSer.tableGridIdToSearchForm.emit({'gridId': this.gridId, 'formId': this.curFormId}); // 向查询区域传递gridId;
this.globalSer.routerEvent.subscribe((x: any) => {
if (x.isSys) {
this.nzTableComponent.cdkVirtualScrollViewport.scrollToIndex(0);
}
});
this.globalSer.pageNumEmitter.emit(
{formId: this.curFormId, gridId: this.gridId, page: 1, length: this.pageSize, search: true}
); // 初始化的条数
}
tableHeightFun(data: number) { // 表格自适应抛出数据
this.tableHeight = `${data}px`;
this.currentTableHeightFun.emit(data);
this.virtualMinBuffer = data;
this.virtualMaxBuffer = this.virtualMinBuffer + 100;
}
// 选择逻辑
refreshStatus(data?: any): void {
let currentChecked: boolean;
if (data && !this.isMutli) {
currentChecked = data.checked;
}
this.allData.map((y: any) => !this.isMutli && (y.checked = false));
if (data && !this.isMutli) {
data.checked = currentChecked; // 单选情况下设置checked;
}
this.allCheckBoxStyle();
this.updateData = this.constData.filter((x: any) => x.checked);
this.updateDataResult.emit(this.updateData); // 选中的结果数据
// 统计需要统计的字段
for (const c of this.caculateEnameArr){
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
// for (let elem of this.updateData) {
// // this._calTotalFiled[c.field] += Number(elem[c.field]);
// this._calTotalFiled[c.field] = toDemical(Utils.add(this._calTotalFiled[c.field], elem[c.field]),c.demLength);
// }
// 2019-1-14郑鑫修改, 将每次求和都四舍五入改为将最后结果四舍五入
this._calTotalFiled[c.field] = toDemical(
this.updateData.map(item => Number(item[c.field]) || 0).reduce((acc, cur) => acc + cur, 0), c.demLength
);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
if (data) {
this.selectedChangeEmit.emit([data]);
} else {
this.selectedChangeEmit.emit(this.allData);
}
}
private allCheckBoxStyle() {
const dataArr = this.allData.filter(value => !value.disabled);
const allChecked = dataArr[0] ? dataArr.every(value => value.checked === true) : false;
const allUnChecked = dataArr.every(value => !value.checked);
this.allChecked = allChecked;
this.indeterminate = (!allChecked) && (!allUnChecked);
}
checkAll(value: boolean): void {
this.allData = this.allData ? this.allData : [];
this.allData.forEach(data => {
if (!data.disabled) {
data.checked = value;
}
});
this.refreshStatus();
}
// 表头获取
getUserColumns(param: any): void { // 获取表头
this.http.post(urls.columns, param).then( (res: any) => { // 获取表头
if (res.success) {
let dataHeader: any;
dataHeader = res.data.data;
dataHeader.map((item: any) => {
item.apiParameter = item.apiParameter && JSON.parse(item.apiParameter) || {};
});
this.columnsFilter(dataHeader, 'http');
this.dataHeader.forEach(
item => {
if (item.type === 'select' && item.visible === 'XSBJ10' ) { // 新增可见调用
item.apiParameter.optionList =item.apiParameter.optionList || [];
// 添加noSetCode 属性为true 可以关闭组件内静态数据 的获取
// tslint:disable-next-line: max-line-length 当没有设置valueSetCode时需要staticCode时添加edit条件,需要设置needStaticCode为true
if ((item.apiParameter.valueSetCode || (item.edit === 'BJBJ10' && this.needStaticCode)) && !item.noSetCode && !this.noGetStaticData) {
this.getSelectData(item.apiParameter.optionList,item.apiParameter.valueSetCode, item);
}
}
this.restoreRender(item);
}
);
}
});
}
// 表头获取数据刷选
columnsFilter(dataHeader: any[], type?: string) {
let len = 0, width = 0;
this.dataHeader = dataHeader.filter((x: any) => x.visible === 'XSBJ10');
this.dataHeader.map((item: any) => {
width = parseFloat(item.width) ? parseFloat(item.width) : 120;
len = Utils.add(len, width);
item.width = `${width}px`;
});
const checkBoxWidth = this.isCheckBox ? 50 : 0;
const gridOperateWidth = this._gridOperate.show ? parseFloat(this._gridOperate.width) : 0;
this.tableWidth = `${ Utils.add(Utils.add(len, checkBoxWidth), gridOperateWidth)}px`; // 默认加上选择框的
// 返回表头数据
if (type === 'http') { this.userColumnsEmit.emit(this.dataHeader); }
this.dataHeaderRefresh = true; // 表头是否刷新
window.setTimeout(() => {
this.dataHeaderRefresh = false;
});
}
// 改变页码
pageChange(n: number): void {
this.pageIndex = n;
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: n,
// tslint:disable-next-line:no-unused-expression
length: this.pageSize}) : null;
this.pageIndexEmit.emit(n);
}
// 改变条数
pageSizeFun(n: number): void {
this.pageIndex = 1;
// tslint:disable-next-line:no-unused-expression
!this.pageFun ? this.globalSer.pageNumEmitter.emit({formId: this.curFormId, gridId: this.gridId, page: 1, length: n}) : null;
this.pageSizeEmit.emit(n);
}
// 当前展示数据改变要做的处理
currentPageDataChange(data: any): void {
if (this.currentChange) {
return;
}
this.allChecked = false;
this.indeterminate = false;
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
if (!this.trSelectHand) { this._trSelected = []; } // 如果传trSelected属性,则刷新列表手动清除选中样式
this.currentPageDataChangeEmit.emit(data);
for (const c of this.caculateEnameArr) {
this._calTotalFiled[c.field] = toDemical(0, c.demLength);
c.revStr = `${c.tipInfo} ${this._calTotalFiled[c.field]} ${c.tipInfoType}`;
}
}
// tab头点击
tabClick(data: any): void { // 不要用
this.tabResultFun.emit(data);
setTimeout( () => {
if (this.updateHeader) {
if (!this._columns) {
this.getUserColumns({formId: this.curFormId, userId: this.info.APPINFO.USER.userId, gridId: this.gridId}); // 参数待定
}
}
}, 100);
}
// input失焦事件
inputBlur(data: any, dex: number){
this.inputBlurFun.emit({data: data, index: dex});
}
// input实时改变事件
onChange(val: string, data: any, n: any, h: any, ind: any){
this.modelChange.emit({val: val, data: data, input: n, header: h, index: ind});
}
// list鼠标移入事件
listOverFun(data: any, index: number){
this.listOver.emit({'data': data, 'index': index});
}
// list鼠标移出事件
listLeaveFun(data: any, index: number){
this.listLeave.emit({'data': data, 'index': index});
}
// 表格列表点击事件
tdClick(data: any, index: number, bol: boolean){
if (!bol) {
return;
}
this._trSelected = [];
this._trSelected[index] = true;
this.listClick.emit(data);
this.allCheckBoxStyle();
this.updateData = [];
if (this.constData && this.constData[0]) {
this.updateData = this.constData.filter((x: any) => x.checked);
}
}
// 操作
inpEmitFun(data: any, dex: any, rowid: any, n: any) {
this.inpEmit.emit(
{ inpName: data.inpName || '', inpValue: data.inpValue || '',
selData: data.selData || {}, index: dex, rowid: rowid, eName: n.colEname}
);
}
//
filterChange(bool: Boolean, viewer: any, header: any){
if (!bool) {
viewer.scrollToIndex(0);
const name = header.colEname;
header._checked = [];
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
this.filterData(header._checked, name);
}
}
// sort 排序
sort(flag: any, index: any) {
// flag 排序类型 null descend ascend
const type = !flag ? 'descend' : flag === 'descend' ? 'ascend' : null;
index._sortType = type;
// if (!type){
// this.allData = [...this.constData];
// return;
// }
this.allData = this.allData.sort((x: any, y: any) => {
let a = x[index.colEname], b = y[index.colEname];
if (index.type === 'number') {
a = Number(a || 0);
b = Number(b || 0);
} else {
a = JSON.stringify(a || null);
b = JSON.stringify(b || null);
if (index.type === 'string') {
return (type === 'ascend') ? a.localeCompare(b, 'zh') : b.localeCompare(a, 'zh');
}
}
return (type === 'ascend') ? ( a > b ? 1 : -1) : (b > a ? 1 : -1);
} );
this.currentChange = true;
this.allData = [...this.allData];
}
// filter 关闭
filterClose(flag: string, header: any){
const name = header.colEname;
header._checked = [];
if (flag === 'confirm') {
this.headerFilter[name].forEach(x => x.checked && header._checked.push(x.text));
} else if (flag === 'reset') {
this.headerFilter[name].forEach(x => x.checked = false);
header._filterVal = null;
}
header._filterVisible = false;
this.filterData( header._checked, name);
}
// filter 刷选
filterData(data: any[]|any, name: string){
this.filterSearch[name] = [...( Array.isArray(data) && data || [])];
if (!Array.isArray(data) || (data.length < 1) ) { // 每列重置
this.updateData.forEach((item: any) => item.checked = false); // 选中重置
this.updateData = [];
this.allChecked = false;
this.indeterminate = false;
this.filterSearch[name] = [];
this.allData = [...this.constData];
} else { // 确定
this.allData = this.constData.filter((x: any) => {
const xName = x[name] !== null && x[name] !== undefined && x[name].toString().trim() || '';
return this.filterSearch[name].indexOf(xName) !== -1;
})
}
// 对别的已选做刷选
const arr = [...this.allData];
this.allData.map((x: any, index: number) => {
Object.keys(this.filterSearch).map((y: string) => {
if (y !== name.trim()) {
const xName = x[y] !== null && x[y] !== undefined && x[y].toString().trim() || '';
if ( (this.filterSearch[y].length > 0) && (this.filterSearch[y].indexOf(xName) === -1) ) {
arr[index] = undefined;
}
}
});
});
this.allData = arr.filter((x: any) => x !== undefined);
this.currentChange = true;
this.headerFilterData(this.allData);
}
// colDrag 列拖拽
coldrag(data: any) {
let tableWidth = parseFloat(this.tableWidth);
const currentWidth = parseFloat(this.dataHeader[data.index].width);
tableWidth = tableWidth - currentWidth;
this.tableWidth = `${tableWidth + data.nex}px`;
this.dataHeader[data.index].width = `${data.nex}px`;
}
ngOnDestroy() {
if (this.gloColSub) { this.gloColSub.unsubscribe(); }
if (this.gloPageSub) { this.gloPageSub.unsubscribe(); }
if (this.gloSelectedSub) { this.gloSelectedSub.unsubscribe(); }
}
// static 静态数据获取
getSelectData(data: Array<any>, valueSetCode: string, item: any) {
if (this.staticCode) {
this.staticCode(valueSetCode, item).subscribe( x => {
Array.prototype.push.apply(data, x);
})
return;
}
this.http.post(urls.static, {valueSetCode: valueSetCode}).then(
(res: any) => {
if (res.success) {
Array.prototype.push.apply(data, res.data.data.data);
}
}
);
}
private headerFilterData(data: any) {
// filter数据,$~$
const hadFilter = Object.keys(this.filterSearch).map((x: string) => (this.filterSearch[x].length > 0) && x);
window.setTimeout(() => {
let colHad = {}, tempStr: string;
this.dataHeader.map((y: any) => {
if (!hadFilter.includes(y.colEname)) { this.headerFilter[y.colEname] = []; }
});
data.map((x: any) => {
this.dataHeader.map((y: any) => {
const yname = (x[y.colEname] === '' || x[y.colEname] == null || x[y.colEname] === undefined) ? '' : x[y.colEname];
tempStr = y.colEname + yname;
if (!colHad[tempStr] && !hadFilter.includes(y.colEname)) {
const xName = yname.toString().trim() || '';
this.headerFilter[y.colEname].push({'text': xName, 'value': xName});
colHad[tempStr] = true;
}
});
});
colHad = undefined; // 清空
});
}
private dataFilterResult() {
let u: any[] = [...this.allData];
let enterBool = false;
for (const i in this.filterSearch) {
if(this.filterSearch[i].length > 0) {
enterBool = true;
u = u.filter((m: any) => this.filterSearch[i].includes(m[i] || ''));
}
}
if (enterBool) { this.allData = [...u]; }
}
/**
* tab change事件
* @param param
*/
tabIndexChangeEvent(param: number) {
this.tabIndexChange.emit(param);
}
keyUp(data:any,tableData:any){
this.keyboardEmit.emit({keyboardData:data,tableData:tableData});
}
isNotSelected(data: any,colEname:string): boolean {
return !this.allData.some(value1 => value1[colEname] === data.value);
}
}
| Status(); | identifier_name |
itemUpdates.ts | /**
* @file
* Support for listening in on new items
*/
import { Store } from 'redux';
import * as _ from 'lodash';
import { LangType } from '../api/apiUrls';
import * as schemas from '../api/schemas';
import * as equipmentSchemas from '../api/schemas/equipment';
import * as gachaSchemas from '../api/schemas/gacha';
import { enlir } from '../data';
import { dressRecordsById } from '../data/dressRecords';
import { EnlirLegendMateria, EnlirRealm, EnlirRelic, EnlirSoulBreak } from '../data/enlir';
import { items, ItemType, ItemTypeLookup } from '../data/items';
import { IState } from '../reducers';
import { logger } from '../utils/logger';
import { getRequestLang, Handler, HandlerRequest } from './common';
/**
* Locally updated items. These are mainly tracked so that we can avoid
* displaying item updates more than once. As a hack, the drop item tracker
* also checks this so that it can handle items like Rush Tickets from the Fat
* Black Chocobo events; those are added too frequently for our manual items.ts
* updates to necessarily keep up.
*
* Note that these are kept in memory only; persisting it would add
* complications for knowing when to expire entries, when to use it versus the
* authoritative items.ts data, etc.
*
* To limit the hack, this should only be accessed from the proxy back-end, not
* from the Electron front-end.
*/
let localItems = _.clone(items);
export const localItemsById = _.zipObject(
items.map((i) => i.id),
localItems,
);
interface PrizeItem {
type_name: schemas.ItemTypeName;
name: string;
id: number;
image_path: string;
}
function showUnknownItem(item: PrizeItem) {
logger.warn(`Unknown ${item.type_name.toLowerCase()}: ${item.name} (ID ${item.id})`);
}
function addLocalItem({ name, type_name, id }: PrizeItem) {
const newItem = { name, type: type_name.toLowerCase() as ItemType, id };
localItems.push(newItem);
localItems = _.sortBy(localItems, 'id');
localItemsById[id] = newItem;
}
function showLocalItem(item: PrizeItem) {
const type = ItemTypeLookup[item.type_name] || item.type_name;
logger.info(
'New (previously unknown) item:\n' +
`{\n name: "${item.name}",\n type: ItemType.${type},\n id: ${item.id}\n},`,
);
}
function showLocalDressRecord({
dress_record_id,
name,
buddy_id,
}: {
dress_record_id: number;
name: string;
buddy_id: number;
}) {
logger.info(
'New (previously unknown) dress record:\n' +
`{\n name: "${name}",\n id: ${dress_record_id},\n characterId: ${buddy_id},\n},`,
);
}
function checkKnownEnlir(item: PrizeItem, ...enlirData: any) {
if (_.every(enlirData, (i) => i[item.id] == null)) {
showUnknownItem(item);
}
}
function checkKnownDressRecord(item: PrizeItem) {
if (dressRecordsById[item.id] == null) {
const match = item.image_path.match(/(\d+)\/\d+\/\d+\.png/);
const buddyId = match ? +match[1] : 0;
showLocalDressRecord({ dress_record_id: item.id, name: item.name, buddy_id: buddyId });
}
}
function checkKnownItems(item: PrizeItem) {
if (localItemsById[item.id] == null) {
if (item.type_name === 'MUSIC_TICKET') {
// Music tickets are regularly released and are easy to dynamically add,
// so we won't try tracking them ourselves.
return;
}
showUnknownItem(item);
addLocalItem(item);
showLocalItem(item);
}
}
function | (item: PrizeItem) {
if (item.type_name === 'BEAST') {
checkKnownEnlir(item, enlir.magicites);
} else if (item.type_name === 'EQUIPMENT') {
checkKnownEnlir(item, enlir.relics, enlir.heroArtifacts);
} else if (item.type_name === 'ABILITY') {
checkKnownEnlir(item, enlir.abilities);
} else if (
item.type_name === 'BUDDY' ||
item.type_name === 'MEMORY_CRYSTAL' ||
item.type_name === 'RECORD_MATERIA'
) {
// FIXME: Need an internal-id-indexed version of characters, memory crystals, record materia
} else if (item.type_name === 'DRESS_RECORD') {
checkKnownDressRecord(item);
} else {
checkKnownItems(item);
}
}
function checkPartyItems(
partyItems: Array<{ name: string; id: number; image_path: string }>,
type: ItemType,
) {
for (const i of _.sortBy(partyItems, 'id')) {
checkKnownItems({
name: i.name,
id: i.id,
type_name: type.toUpperCase() as schemas.ItemTypeName,
image_path: i.image_path,
});
}
}
function checkPartyDressRecords(data: schemas.PartyList | schemas.PartyListOther) {
for (const i of _.sortBy(data.dress_records, 'dress_record_id')) {
if (dressRecordsById[i.dress_record_id] == null) {
showLocalDressRecord(i);
}
}
}
function checkAllPartyItems(data: schemas.PartyList | schemas.PartyListOther) {
// No need to check isRecordDungeonPartyList, as long as we're only
// looking for new / previously unknown items.
checkPartyItems(data.equipment_hyper_evolve_materials, ItemType.DarkMatter);
checkPartyItems(data.equipment_sp_materials, ItemType.UpgradeMaterial);
checkPartyItems(data.materials, ItemType.Orb);
checkPartyItems(data.grow_eggs, ItemType.GrowthEgg);
checkPartyItems(data.sphere_materials, ItemType.Mote);
checkPartyDressRecords(data);
}
function handleWinBattle(data: schemas.WinBattle) {
_.forEach(data.result.prize_master, (item) => {
checkItem({
id: +item.item_id,
type_name: item.type_name,
name: item.name,
image_path: item.image_path,
});
});
}
interface EnlirEntity {
id: number;
name: string;
gl: boolean;
realm: EnlirRealm | null;
}
interface CheckedEntity<T extends EnlirEntity> {
enlirItem: T;
updateRelease: boolean;
updateName: string | undefined;
}
function compareGlEntity<T1 extends { id: number; name: string }, T2 extends EnlirEntity>(
callback: (message: string) => void,
item: T1,
enlirItems: { [id: number]: T2 },
description: string,
source: string,
trim?: (name: string) => string,
): CheckedEntity<T2> | null {
const enlirItem = enlirItems[item.id];
if (!enlirItem) {
callback(`Item update: Unknown ${description} ID ${item.id}, ${item.name}, from ${source}`);
return null;
}
let updateRelease = false;
if (!enlirItem.gl) {
callback(`Item update: ${description} ID ${item.id}, ${item.name}, is now released in global`);
updateRelease = true;
}
const trimmedName = trim ? trim(item.name) : item.name.trimRight();
let updateName: string | undefined;
if (enlirItem.name !== trimmedName) {
callback(
`Item update: ${description} ID ${item.id}, ${item.name}, ` +
`is named ${enlirItem.name} in Enlir`,
);
updateName = item.name;
}
return {
enlirItem,
updateRelease,
updateName,
};
}
function removeRealm<T extends EnlirEntity>(enlirItem: T, name: string) {
if (!enlirItem.realm) {
return name;
}
const re = new RegExp(' \\(' + _.escapeRegExp(enlirItem.realm) + '\\)$');
return name.replace(re, '');
}
function showUpdateCommands<T extends EnlirEntity>(
checked: Array<CheckedEntity<T>>,
tabName: string,
callback: (message: string) => void,
) {
const releaseIds = checked.filter((i) => i.updateRelease).map((i) => i.enlirItem.id);
if (releaseIds.length) {
callback(`update-enlir.ts releaseInGl ${tabName} ${releaseIds.join(' ')}`);
}
const renames = checked
.filter((i) => i.updateName)
.map(
(i) =>
[i.enlirItem.id, '"' + removeRealm(i.enlirItem, i.updateName!) + '"'] as [number, string],
);
if (renames.length) {
callback(`update-enlir.ts rename ${tabName} ${_.flatten(renames).join(' ')}`);
}
}
function checkGlRelicDrawEquipment(
equipmentList: equipmentSchemas.Equipment[],
callback: (message: string) => void,
) {
const checkedRelics: Array<CheckedEntity<EnlirRelic>> = [];
const checkedSoulBreaks: Array<CheckedEntity<EnlirSoulBreak>> = [];
const checkedLegendMateria: Array<CheckedEntity<EnlirLegendMateria>> = [];
const trimRealm = (name: string) => {
return name
.replace(/ \(([IVX]+|Type-0|FFT|Beyond)\) *$/, '')
.replace(/ \([IVX]+-(.*?)\) *$/, ' ($1)');
};
for (const equipment of equipmentList) {
const { id, name, soul_strike, legend_materia } = equipment;
const relicName = `relic ${name} (ID ${id})`;
const compareRelic = compareGlEntity(
callback,
equipment,
enlir.relics,
'relic',
relicName,
trimRealm,
);
if (!compareRelic) {
continue;
}
checkedRelics.push(compareRelic);
if (soul_strike) {
const compareSoulBreak = compareGlEntity(
callback,
soul_strike,
enlir.soulBreaks,
'soul break',
relicName,
);
if (compareSoulBreak) {
checkedSoulBreaks.push(compareSoulBreak);
}
}
if (legend_materia) {
const compareLegendMateria = compareGlEntity(
callback,
legend_materia,
enlir.legendMateria,
'legend materia',
relicName,
trimRealm,
);
if (compareLegendMateria) {
checkedLegendMateria.push(compareLegendMateria);
}
}
}
showUpdateCommands(checkedRelics, 'relics', callback);
showUpdateCommands(checkedSoulBreaks, 'soulBreaks', callback);
showUpdateCommands(checkedLegendMateria, 'legendMateria', callback);
}
function getGachaShowEquipment(data: gachaSchemas.GachaShow, currentTime: number) {
const equipmentList: equipmentSchemas.Equipment[] = [];
for (const i of data.series_list) {
if (i.opened_at > currentTime / 1000) {
continue;
}
for (const { equipment } of i.banner_list) {
if (equipment) {
equipmentList.push(equipment);
}
}
}
return equipmentList;
}
function getGachaProbabilitiesEquipment(data: gachaSchemas.GachaProbability) {
return _.flatten(
_.values(data)
.filter((i) => i.equipments)
.map((i) => i.equipments),
);
}
function handleGlRelicDrawEquipment(
request: HandlerRequest,
getEquipment: () => equipmentSchemas.Equipment[],
) {
if (getRequestLang(request) !== LangType.Gl) {
return;
}
const results: string[] = [];
const callback = (message: string) => results.push(message);
checkGlRelicDrawEquipment(getEquipment(), callback);
results.sort().forEach((i) => logger.info(i));
}
const itemUpdatesHandler: Handler = {
dungeons(data: schemas.Dungeons) {
for (const d of data.dungeons) {
_.forEach(d.prizes, (prizeList) => {
for (const prize of prizeList) {
checkItem(prize);
}
});
for (const dropItem of d.battle_drop_items) {
checkItem(dropItem);
}
}
},
'party/list': checkAllPartyItems,
'party/list_other': checkAllPartyItems,
win_battle: handleWinBattle,
battle_win: handleWinBattle,
'battle/win': handleWinBattle,
'gacha/show'(data: gachaSchemas.GachaShow, store: Store<IState>, request: HandlerRequest) {
handleGlRelicDrawEquipment(request, () =>
getGachaShowEquipment(data, store.getState().timeState.currentTime),
);
},
'gacha/probability'(
data: gachaSchemas.GachaProbability,
store: Store<IState>,
request: HandlerRequest,
) {
handleGlRelicDrawEquipment(request, () => getGachaProbabilitiesEquipment(data));
},
};
export default itemUpdatesHandler;
| checkItem | identifier_name |
itemUpdates.ts | /**
* @file
* Support for listening in on new items
*/
import { Store } from 'redux';
import * as _ from 'lodash';
import { LangType } from '../api/apiUrls';
import * as schemas from '../api/schemas';
import * as equipmentSchemas from '../api/schemas/equipment';
import * as gachaSchemas from '../api/schemas/gacha';
import { enlir } from '../data';
import { dressRecordsById } from '../data/dressRecords';
import { EnlirLegendMateria, EnlirRealm, EnlirRelic, EnlirSoulBreak } from '../data/enlir';
import { items, ItemType, ItemTypeLookup } from '../data/items';
import { IState } from '../reducers';
import { logger } from '../utils/logger';
import { getRequestLang, Handler, HandlerRequest } from './common';
/**
* Locally updated items. These are mainly tracked so that we can avoid
* displaying item updates more than once. As a hack, the drop item tracker
* also checks this so that it can handle items like Rush Tickets from the Fat
* Black Chocobo events; those are added too frequently for our manual items.ts
* updates to necessarily keep up.
*
* Note that these are kept in memory only; persisting it would add
* complications for knowing when to expire entries, when to use it versus the
* authoritative items.ts data, etc.
*
* To limit the hack, this should only be accessed from the proxy back-end, not
* from the Electron front-end.
*/
let localItems = _.clone(items);
export const localItemsById = _.zipObject(
items.map((i) => i.id),
localItems,
);
interface PrizeItem {
type_name: schemas.ItemTypeName;
name: string;
id: number;
image_path: string;
}
function showUnknownItem(item: PrizeItem) {
logger.warn(`Unknown ${item.type_name.toLowerCase()}: ${item.name} (ID ${item.id})`);
}
function addLocalItem({ name, type_name, id }: PrizeItem) {
const newItem = { name, type: type_name.toLowerCase() as ItemType, id };
localItems.push(newItem);
localItems = _.sortBy(localItems, 'id');
localItemsById[id] = newItem;
}
function showLocalItem(item: PrizeItem) {
const type = ItemTypeLookup[item.type_name] || item.type_name;
logger.info(
'New (previously unknown) item:\n' +
`{\n name: "${item.name}",\n type: ItemType.${type},\n id: ${item.id}\n},`,
);
}
function showLocalDressRecord({
dress_record_id,
name,
buddy_id,
}: {
dress_record_id: number;
name: string;
buddy_id: number;
}) {
logger.info(
'New (previously unknown) dress record:\n' +
`{\n name: "${name}",\n id: ${dress_record_id},\n characterId: ${buddy_id},\n},`,
);
}
function checkKnownEnlir(item: PrizeItem, ...enlirData: any) |
function checkKnownDressRecord(item: PrizeItem) {
if (dressRecordsById[item.id] == null) {
const match = item.image_path.match(/(\d+)\/\d+\/\d+\.png/);
const buddyId = match ? +match[1] : 0;
showLocalDressRecord({ dress_record_id: item.id, name: item.name, buddy_id: buddyId });
}
}
function checkKnownItems(item: PrizeItem) {
if (localItemsById[item.id] == null) {
if (item.type_name === 'MUSIC_TICKET') {
// Music tickets are regularly released and are easy to dynamically add,
// so we won't try tracking them ourselves.
return;
}
showUnknownItem(item);
addLocalItem(item);
showLocalItem(item);
}
}
function checkItem(item: PrizeItem) {
if (item.type_name === 'BEAST') {
checkKnownEnlir(item, enlir.magicites);
} else if (item.type_name === 'EQUIPMENT') {
checkKnownEnlir(item, enlir.relics, enlir.heroArtifacts);
} else if (item.type_name === 'ABILITY') {
checkKnownEnlir(item, enlir.abilities);
} else if (
item.type_name === 'BUDDY' ||
item.type_name === 'MEMORY_CRYSTAL' ||
item.type_name === 'RECORD_MATERIA'
) {
// FIXME: Need an internal-id-indexed version of characters, memory crystals, record materia
} else if (item.type_name === 'DRESS_RECORD') {
checkKnownDressRecord(item);
} else {
checkKnownItems(item);
}
}
function checkPartyItems(
partyItems: Array<{ name: string; id: number; image_path: string }>,
type: ItemType,
) {
for (const i of _.sortBy(partyItems, 'id')) {
checkKnownItems({
name: i.name,
id: i.id,
type_name: type.toUpperCase() as schemas.ItemTypeName,
image_path: i.image_path,
});
}
}
function checkPartyDressRecords(data: schemas.PartyList | schemas.PartyListOther) {
for (const i of _.sortBy(data.dress_records, 'dress_record_id')) {
if (dressRecordsById[i.dress_record_id] == null) {
showLocalDressRecord(i);
}
}
}
function checkAllPartyItems(data: schemas.PartyList | schemas.PartyListOther) {
// No need to check isRecordDungeonPartyList, as long as we're only
// looking for new / previously unknown items.
checkPartyItems(data.equipment_hyper_evolve_materials, ItemType.DarkMatter);
checkPartyItems(data.equipment_sp_materials, ItemType.UpgradeMaterial);
checkPartyItems(data.materials, ItemType.Orb);
checkPartyItems(data.grow_eggs, ItemType.GrowthEgg);
checkPartyItems(data.sphere_materials, ItemType.Mote);
checkPartyDressRecords(data);
}
function handleWinBattle(data: schemas.WinBattle) {
_.forEach(data.result.prize_master, (item) => {
checkItem({
id: +item.item_id,
type_name: item.type_name,
name: item.name,
image_path: item.image_path,
});
});
}
interface EnlirEntity {
id: number;
name: string;
gl: boolean;
realm: EnlirRealm | null;
}
interface CheckedEntity<T extends EnlirEntity> {
enlirItem: T;
updateRelease: boolean;
updateName: string | undefined;
}
function compareGlEntity<T1 extends { id: number; name: string }, T2 extends EnlirEntity>(
callback: (message: string) => void,
item: T1,
enlirItems: { [id: number]: T2 },
description: string,
source: string,
trim?: (name: string) => string,
): CheckedEntity<T2> | null {
const enlirItem = enlirItems[item.id];
if (!enlirItem) {
callback(`Item update: Unknown ${description} ID ${item.id}, ${item.name}, from ${source}`);
return null;
}
let updateRelease = false;
if (!enlirItem.gl) {
callback(`Item update: ${description} ID ${item.id}, ${item.name}, is now released in global`);
updateRelease = true;
}
const trimmedName = trim ? trim(item.name) : item.name.trimRight();
let updateName: string | undefined;
if (enlirItem.name !== trimmedName) {
callback(
`Item update: ${description} ID ${item.id}, ${item.name}, ` +
`is named ${enlirItem.name} in Enlir`,
);
updateName = item.name;
}
return {
enlirItem,
updateRelease,
updateName,
};
}
function removeRealm<T extends EnlirEntity>(enlirItem: T, name: string) {
if (!enlirItem.realm) {
return name;
}
const re = new RegExp(' \\(' + _.escapeRegExp(enlirItem.realm) + '\\)$');
return name.replace(re, '');
}
function showUpdateCommands<T extends EnlirEntity>(
checked: Array<CheckedEntity<T>>,
tabName: string,
callback: (message: string) => void,
) {
const releaseIds = checked.filter((i) => i.updateRelease).map((i) => i.enlirItem.id);
if (releaseIds.length) {
callback(`update-enlir.ts releaseInGl ${tabName} ${releaseIds.join(' ')}`);
}
const renames = checked
.filter((i) => i.updateName)
.map(
(i) =>
[i.enlirItem.id, '"' + removeRealm(i.enlirItem, i.updateName!) + '"'] as [number, string],
);
if (renames.length) {
callback(`update-enlir.ts rename ${tabName} ${_.flatten(renames).join(' ')}`);
}
}
function checkGlRelicDrawEquipment(
equipmentList: equipmentSchemas.Equipment[],
callback: (message: string) => void,
) {
const checkedRelics: Array<CheckedEntity<EnlirRelic>> = [];
const checkedSoulBreaks: Array<CheckedEntity<EnlirSoulBreak>> = [];
const checkedLegendMateria: Array<CheckedEntity<EnlirLegendMateria>> = [];
const trimRealm = (name: string) => {
return name
.replace(/ \(([IVX]+|Type-0|FFT|Beyond)\) *$/, '')
.replace(/ \([IVX]+-(.*?)\) *$/, ' ($1)');
};
for (const equipment of equipmentList) {
const { id, name, soul_strike, legend_materia } = equipment;
const relicName = `relic ${name} (ID ${id})`;
const compareRelic = compareGlEntity(
callback,
equipment,
enlir.relics,
'relic',
relicName,
trimRealm,
);
if (!compareRelic) {
continue;
}
checkedRelics.push(compareRelic);
if (soul_strike) {
const compareSoulBreak = compareGlEntity(
callback,
soul_strike,
enlir.soulBreaks,
'soul break',
relicName,
);
if (compareSoulBreak) {
checkedSoulBreaks.push(compareSoulBreak);
}
}
if (legend_materia) {
const compareLegendMateria = compareGlEntity(
callback,
legend_materia,
enlir.legendMateria,
'legend materia',
relicName,
trimRealm,
);
if (compareLegendMateria) {
checkedLegendMateria.push(compareLegendMateria);
}
}
}
showUpdateCommands(checkedRelics, 'relics', callback);
showUpdateCommands(checkedSoulBreaks, 'soulBreaks', callback);
showUpdateCommands(checkedLegendMateria, 'legendMateria', callback);
}
function getGachaShowEquipment(data: gachaSchemas.GachaShow, currentTime: number) {
const equipmentList: equipmentSchemas.Equipment[] = [];
for (const i of data.series_list) {
if (i.opened_at > currentTime / 1000) {
continue;
}
for (const { equipment } of i.banner_list) {
if (equipment) {
equipmentList.push(equipment);
}
}
}
return equipmentList;
}
function getGachaProbabilitiesEquipment(data: gachaSchemas.GachaProbability) {
return _.flatten(
_.values(data)
.filter((i) => i.equipments)
.map((i) => i.equipments),
);
}
function handleGlRelicDrawEquipment(
request: HandlerRequest,
getEquipment: () => equipmentSchemas.Equipment[],
) {
if (getRequestLang(request) !== LangType.Gl) {
return;
}
const results: string[] = [];
const callback = (message: string) => results.push(message);
checkGlRelicDrawEquipment(getEquipment(), callback);
results.sort().forEach((i) => logger.info(i));
}
const itemUpdatesHandler: Handler = {
dungeons(data: schemas.Dungeons) {
for (const d of data.dungeons) {
_.forEach(d.prizes, (prizeList) => {
for (const prize of prizeList) {
checkItem(prize);
}
});
for (const dropItem of d.battle_drop_items) {
checkItem(dropItem);
}
}
},
'party/list': checkAllPartyItems,
'party/list_other': checkAllPartyItems,
win_battle: handleWinBattle,
battle_win: handleWinBattle,
'battle/win': handleWinBattle,
'gacha/show'(data: gachaSchemas.GachaShow, store: Store<IState>, request: HandlerRequest) {
handleGlRelicDrawEquipment(request, () =>
getGachaShowEquipment(data, store.getState().timeState.currentTime),
);
},
'gacha/probability'(
data: gachaSchemas.GachaProbability,
store: Store<IState>,
request: HandlerRequest,
) {
handleGlRelicDrawEquipment(request, () => getGachaProbabilitiesEquipment(data));
},
};
export default itemUpdatesHandler;
| {
if (_.every(enlirData, (i) => i[item.id] == null)) {
showUnknownItem(item);
}
} | identifier_body |
itemUpdates.ts | /**
* @file
* Support for listening in on new items
*/
import { Store } from 'redux';
import * as _ from 'lodash';
import { LangType } from '../api/apiUrls';
import * as schemas from '../api/schemas';
import * as equipmentSchemas from '../api/schemas/equipment';
import * as gachaSchemas from '../api/schemas/gacha';
import { enlir } from '../data';
import { dressRecordsById } from '../data/dressRecords';
import { EnlirLegendMateria, EnlirRealm, EnlirRelic, EnlirSoulBreak } from '../data/enlir';
import { items, ItemType, ItemTypeLookup } from '../data/items';
import { IState } from '../reducers';
import { logger } from '../utils/logger';
import { getRequestLang, Handler, HandlerRequest } from './common';
/**
* Locally updated items. These are mainly tracked so that we can avoid
* displaying item updates more than once. As a hack, the drop item tracker
* also checks this so that it can handle items like Rush Tickets from the Fat
* Black Chocobo events; those are added too frequently for our manual items.ts
* updates to necessarily keep up.
*
* Note that these are kept in memory only; persisting it would add
* complications for knowing when to expire entries, when to use it versus the
* authoritative items.ts data, etc.
*
* To limit the hack, this should only be accessed from the proxy back-end, not
* from the Electron front-end.
*/
let localItems = _.clone(items);
export const localItemsById = _.zipObject(
items.map((i) => i.id),
localItems,
);
interface PrizeItem {
type_name: schemas.ItemTypeName;
name: string;
id: number;
image_path: string;
}
function showUnknownItem(item: PrizeItem) {
logger.warn(`Unknown ${item.type_name.toLowerCase()}: ${item.name} (ID ${item.id})`);
}
function addLocalItem({ name, type_name, id }: PrizeItem) {
const newItem = { name, type: type_name.toLowerCase() as ItemType, id };
localItems.push(newItem);
localItems = _.sortBy(localItems, 'id');
localItemsById[id] = newItem;
}
function showLocalItem(item: PrizeItem) {
const type = ItemTypeLookup[item.type_name] || item.type_name;
logger.info(
'New (previously unknown) item:\n' +
`{\n name: "${item.name}",\n type: ItemType.${type},\n id: ${item.id}\n},`,
);
}
function showLocalDressRecord({
dress_record_id,
name,
buddy_id,
}: {
dress_record_id: number;
name: string;
buddy_id: number;
}) {
logger.info(
'New (previously unknown) dress record:\n' +
`{\n name: "${name}",\n id: ${dress_record_id},\n characterId: ${buddy_id},\n},`,
);
}
function checkKnownEnlir(item: PrizeItem, ...enlirData: any) {
if (_.every(enlirData, (i) => i[item.id] == null)) {
showUnknownItem(item);
}
}
function checkKnownDressRecord(item: PrizeItem) {
if (dressRecordsById[item.id] == null) {
const match = item.image_path.match(/(\d+)\/\d+\/\d+\.png/);
const buddyId = match ? +match[1] : 0;
showLocalDressRecord({ dress_record_id: item.id, name: item.name, buddy_id: buddyId });
}
}
function checkKnownItems(item: PrizeItem) {
if (localItemsById[item.id] == null) {
if (item.type_name === 'MUSIC_TICKET') {
// Music tickets are regularly released and are easy to dynamically add,
// so we won't try tracking them ourselves.
return;
}
showUnknownItem(item);
addLocalItem(item);
showLocalItem(item);
}
}
function checkItem(item: PrizeItem) {
if (item.type_name === 'BEAST') {
checkKnownEnlir(item, enlir.magicites);
} else if (item.type_name === 'EQUIPMENT') {
checkKnownEnlir(item, enlir.relics, enlir.heroArtifacts);
} else if (item.type_name === 'ABILITY') {
checkKnownEnlir(item, enlir.abilities);
} else if (
item.type_name === 'BUDDY' ||
item.type_name === 'MEMORY_CRYSTAL' ||
item.type_name === 'RECORD_MATERIA'
) {
// FIXME: Need an internal-id-indexed version of characters, memory crystals, record materia
} else if (item.type_name === 'DRESS_RECORD') {
checkKnownDressRecord(item);
} else {
checkKnownItems(item);
}
}
function checkPartyItems(
partyItems: Array<{ name: string; id: number; image_path: string }>,
type: ItemType,
) {
for (const i of _.sortBy(partyItems, 'id')) {
checkKnownItems({
name: i.name,
id: i.id,
type_name: type.toUpperCase() as schemas.ItemTypeName,
image_path: i.image_path,
});
}
}
function checkPartyDressRecords(data: schemas.PartyList | schemas.PartyListOther) {
for (const i of _.sortBy(data.dress_records, 'dress_record_id')) {
if (dressRecordsById[i.dress_record_id] == null) {
showLocalDressRecord(i);
}
}
}
function checkAllPartyItems(data: schemas.PartyList | schemas.PartyListOther) {
// No need to check isRecordDungeonPartyList, as long as we're only
// looking for new / previously unknown items.
checkPartyItems(data.equipment_hyper_evolve_materials, ItemType.DarkMatter);
checkPartyItems(data.equipment_sp_materials, ItemType.UpgradeMaterial);
checkPartyItems(data.materials, ItemType.Orb);
checkPartyItems(data.grow_eggs, ItemType.GrowthEgg);
checkPartyItems(data.sphere_materials, ItemType.Mote);
checkPartyDressRecords(data);
}
function handleWinBattle(data: schemas.WinBattle) {
_.forEach(data.result.prize_master, (item) => {
checkItem({
id: +item.item_id,
type_name: item.type_name,
name: item.name,
image_path: item.image_path,
});
});
}
interface EnlirEntity {
id: number;
name: string;
gl: boolean;
realm: EnlirRealm | null;
}
interface CheckedEntity<T extends EnlirEntity> {
enlirItem: T;
updateRelease: boolean;
updateName: string | undefined;
}
function compareGlEntity<T1 extends { id: number; name: string }, T2 extends EnlirEntity>(
callback: (message: string) => void,
item: T1,
enlirItems: { [id: number]: T2 },
description: string,
source: string,
trim?: (name: string) => string,
): CheckedEntity<T2> | null {
const enlirItem = enlirItems[item.id];
if (!enlirItem) {
callback(`Item update: Unknown ${description} ID ${item.id}, ${item.name}, from ${source}`);
return null;
}
let updateRelease = false;
if (!enlirItem.gl) {
callback(`Item update: ${description} ID ${item.id}, ${item.name}, is now released in global`);
updateRelease = true;
}
const trimmedName = trim ? trim(item.name) : item.name.trimRight();
let updateName: string | undefined;
if (enlirItem.name !== trimmedName) {
callback(
`Item update: ${description} ID ${item.id}, ${item.name}, ` +
`is named ${enlirItem.name} in Enlir`,
);
updateName = item.name;
}
return {
enlirItem,
updateRelease,
updateName,
};
}
function removeRealm<T extends EnlirEntity>(enlirItem: T, name: string) {
if (!enlirItem.realm) {
return name;
}
const re = new RegExp(' \\(' + _.escapeRegExp(enlirItem.realm) + '\\)$');
return name.replace(re, '');
}
function showUpdateCommands<T extends EnlirEntity>(
checked: Array<CheckedEntity<T>>,
tabName: string,
callback: (message: string) => void,
) {
const releaseIds = checked.filter((i) => i.updateRelease).map((i) => i.enlirItem.id);
if (releaseIds.length) {
callback(`update-enlir.ts releaseInGl ${tabName} ${releaseIds.join(' ')}`);
}
const renames = checked
.filter((i) => i.updateName)
.map(
(i) =>
[i.enlirItem.id, '"' + removeRealm(i.enlirItem, i.updateName!) + '"'] as [number, string],
);
if (renames.length) {
callback(`update-enlir.ts rename ${tabName} ${_.flatten(renames).join(' ')}`);
}
}
function checkGlRelicDrawEquipment(
equipmentList: equipmentSchemas.Equipment[],
callback: (message: string) => void,
) {
const checkedRelics: Array<CheckedEntity<EnlirRelic>> = [];
const checkedSoulBreaks: Array<CheckedEntity<EnlirSoulBreak>> = [];
const checkedLegendMateria: Array<CheckedEntity<EnlirLegendMateria>> = [];
const trimRealm = (name: string) => {
return name
.replace(/ \(([IVX]+|Type-0|FFT|Beyond)\) *$/, '')
.replace(/ \([IVX]+-(.*?)\) *$/, ' ($1)');
};
for (const equipment of equipmentList) {
const { id, name, soul_strike, legend_materia } = equipment;
const relicName = `relic ${name} (ID ${id})`;
const compareRelic = compareGlEntity(
callback,
equipment,
enlir.relics,
'relic',
relicName,
trimRealm,
);
if (!compareRelic) {
continue;
}
checkedRelics.push(compareRelic);
if (soul_strike) {
const compareSoulBreak = compareGlEntity(
callback,
soul_strike,
enlir.soulBreaks,
'soul break',
relicName,
);
if (compareSoulBreak) {
checkedSoulBreaks.push(compareSoulBreak);
}
}
if (legend_materia) {
const compareLegendMateria = compareGlEntity(
callback,
legend_materia,
enlir.legendMateria,
'legend materia',
relicName,
trimRealm,
);
if (compareLegendMateria) {
checkedLegendMateria.push(compareLegendMateria);
}
}
}
showUpdateCommands(checkedRelics, 'relics', callback);
showUpdateCommands(checkedSoulBreaks, 'soulBreaks', callback); | for (const i of data.series_list) {
if (i.opened_at > currentTime / 1000) {
continue;
}
for (const { equipment } of i.banner_list) {
if (equipment) {
equipmentList.push(equipment);
}
}
}
return equipmentList;
}
function getGachaProbabilitiesEquipment(data: gachaSchemas.GachaProbability) {
return _.flatten(
_.values(data)
.filter((i) => i.equipments)
.map((i) => i.equipments),
);
}
function handleGlRelicDrawEquipment(
request: HandlerRequest,
getEquipment: () => equipmentSchemas.Equipment[],
) {
if (getRequestLang(request) !== LangType.Gl) {
return;
}
const results: string[] = [];
const callback = (message: string) => results.push(message);
checkGlRelicDrawEquipment(getEquipment(), callback);
results.sort().forEach((i) => logger.info(i));
}
const itemUpdatesHandler: Handler = {
dungeons(data: schemas.Dungeons) {
for (const d of data.dungeons) {
_.forEach(d.prizes, (prizeList) => {
for (const prize of prizeList) {
checkItem(prize);
}
});
for (const dropItem of d.battle_drop_items) {
checkItem(dropItem);
}
}
},
'party/list': checkAllPartyItems,
'party/list_other': checkAllPartyItems,
win_battle: handleWinBattle,
battle_win: handleWinBattle,
'battle/win': handleWinBattle,
'gacha/show'(data: gachaSchemas.GachaShow, store: Store<IState>, request: HandlerRequest) {
handleGlRelicDrawEquipment(request, () =>
getGachaShowEquipment(data, store.getState().timeState.currentTime),
);
},
'gacha/probability'(
data: gachaSchemas.GachaProbability,
store: Store<IState>,
request: HandlerRequest,
) {
handleGlRelicDrawEquipment(request, () => getGachaProbabilitiesEquipment(data));
},
};
export default itemUpdatesHandler; | showUpdateCommands(checkedLegendMateria, 'legendMateria', callback);
}
function getGachaShowEquipment(data: gachaSchemas.GachaShow, currentTime: number) {
const equipmentList: equipmentSchemas.Equipment[] = []; | random_line_split |
goog-varint.ts | // Copyright 2008 Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Code generated by the Protocol Buffer compiler is owned by the owner
// of the input file used when generating it. This code is not
// standalone and requires a support library to be linked with it. This
// support library is itself covered by the above license.
/**
* Read a 64 bit varint as two JS numbers.
*
* Returns tuple:
* [0]: low bits
* [0]: high bits
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175
*/
export function varint64read(this: ReaderLike): [number, number] {
let lowBits = 0;
let highBits = 0;
for (let shift = 0; shift < 28; shift += 7) {
let b = this.buf[this.pos++];
lowBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
let middleByte = this.buf[this.pos++];
// last four bits of the first 32 bit number
lowBits |= (middleByte & 0x0F) << 28;
// 3 upper bits are part of the next 32 bit number
highBits = (middleByte & 0x70) >> 4;
if ((middleByte & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
for (let shift = 3; shift <= 31; shift += 7) {
let b = this.buf[this.pos++];
highBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
throw new Error('invalid varint');
}
/**
* Write a 64 bit varint, given as two JS numbers, to the given bytes array.
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344
*/
export function varint64write(lo: number, hi: number, bytes: number[]): void {
for (let i = 0; i < 28; i = i + 7) {
const shift = lo >>> i;
const hasNext = !((shift >>> 7) == 0 && hi == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4);
const hasMoreBits = !((hi >> 3) == 0);
bytes.push(
(hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF);
if (!hasMoreBits) {
return;
}
for (let i = 3; i < 31; i = i + 7) {
const shift = hi >>> i;
const hasNext = !((shift >>> 7) == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
bytes.push((hi >>> 31) & 0x01);
}
// constants for binary math
const TWO_PWR_32_DBL = (1 << 16) * (1 << 16);
/**
* Parse decimal string of 64 bit integer value as two JS numbers.
*
* Returns tuple:
* [0]: minus sign?
* [1]: low bits
* [2]: high bits
*
* Copyright 2008 Google Inc.
*/
export function int64fromString(dec: string): [boolean, number, number] {
// Check for minus sign.
let minus = dec[0] == '-';
if (minus)
dec = dec.slice(1);
// Work 6 decimal digits at a time, acting like we're converting base 1e6
// digits to binary. This is safe to do with floating point math because
// Number.isSafeInteger(ALL_32_BITS * 1e6) == true.
const base = 1e6;
let lowBits = 0;
let highBits = 0;
function add1e6digit(begin: number, end?: number) {
// Note: Number('') is 0.
const digit1e6 = Number(dec.slice(begin, end));
highBits *= base;
lowBits = lowBits * base + digit1e6;
// Carry bits from lowBits to
if (lowBits >= TWO_PWR_32_DBL) {
highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0);
lowBits = lowBits % TWO_PWR_32_DBL;
}
}
add1e6digit(-24, -18);
add1e6digit(-18, -12);
add1e6digit(-12, -6);
add1e6digit(-6);
return [minus, lowBits, highBits];
}
/**
* Format 64 bit integer value (as two JS numbers) to decimal string.
*
* Copyright 2008 Google Inc.
*/
export function int64toString(bitsLow: number, bitsHigh: number): string {
// Skip the expensive conversion if the number is small enough to use the
// built-in conversions.
if (bitsHigh <= 0x1FFFFF) {
return '' + (TWO_PWR_32_DBL * bitsHigh + bitsLow);
}
// What this code is doing is essentially converting the input number from
// base-2 to base-1e7, which allows us to represent the 64-bit range with
// only 3 (very large) digits. Those digits are then trivial to convert to
// a base-10 string.
// The magic numbers used here are -
// 2^24 = 16777216 = (1,6777216) in base-1e7.
// 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7.
// Split 32:32 representation into 16:24:24 representation so our
// intermediate digits don't overflow.
let low = bitsLow & 0xFFFFFF;
let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF;
let high = (bitsHigh >> 16) & 0xFFFF;
// Assemble our three base-1e7 digits, ignoring carries. The maximum
// value in a digit at this step is representable as a 48-bit integer, which
// can be stored in a 64-bit floating point number.
let digitA = low + (mid * 6777216) + (high * 6710656);
let digitB = mid + (high * 8147497);
let digitC = (high * 2);
// Apply carries from A to B and from B to C.
let base = 10000000;
if (digitA >= base) {
digitB += Math.floor(digitA / base);
digitA %= base;
}
if (digitB >= base) {
digitC += Math.floor(digitB / base);
digitB %= base;
}
// Convert base-1e7 digits to base-10, with optional leading zeroes.
function decimalFrom1e7(digit1e7: number, needLeadingZeros: number) {
let partial = digit1e7 ? String(digit1e7) : '';
if (needLeadingZeros) {
return '0000000'.slice(partial.length) + partial;
}
return partial;
}
return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) +
decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) +
// If the final 1e7 digit didn't need leading zeros, we would have
// returned via the trivial code path at the top.
decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1);
}
/**
* Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)`
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144
*/
export function varint32write(value: number, bytes: number[]): void |
/**
* Read an unsigned 32 bit varint.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220
*/
export function varint32read(this: ReaderLike): number {
let b = this.buf[this.pos++];
let result = b & 0x7F;
if ((b & 0x80) == 0) {
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 7;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 14;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 21;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
// Extract only last 4 bits
b = this.buf[this.pos++];
result |= (b & 0x0F) << 28;
for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++)
b = this.buf[this.pos++];
if ((b & 0x80) != 0)
throw new Error('invalid varint');
this.assertBounds();
// Result can have 32 bits, convert it to unsigned
return result >>> 0;
}
type ReaderLike = {
buf: Uint8Array;
pos: number;
len: number;
assertBounds(): void;
}
| {
if (value >= 0) {
// write value as varint 32
while (value > 0x7f) {
bytes.push((value & 0x7f) | 0x80);
value = value >>> 7;
}
bytes.push(value);
} else {
for (let i = 0; i < 9; i++) {
bytes.push(value & 127 | 128);
value = value >> 7;
}
bytes.push(1);
}
} | identifier_body |
goog-varint.ts | // Copyright 2008 Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Code generated by the Protocol Buffer compiler is owned by the owner
// of the input file used when generating it. This code is not
// standalone and requires a support library to be linked with it. This
// support library is itself covered by the above license.
/**
* Read a 64 bit varint as two JS numbers.
*
* Returns tuple:
* [0]: low bits
* [0]: high bits
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175
*/
export function varint64read(this: ReaderLike): [number, number] {
let lowBits = 0;
let highBits = 0;
for (let shift = 0; shift < 28; shift += 7) {
let b = this.buf[this.pos++];
lowBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
let middleByte = this.buf[this.pos++];
// last four bits of the first 32 bit number
lowBits |= (middleByte & 0x0F) << 28;
// 3 upper bits are part of the next 32 bit number
highBits = (middleByte & 0x70) >> 4;
if ((middleByte & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
for (let shift = 3; shift <= 31; shift += 7) {
let b = this.buf[this.pos++];
highBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
throw new Error('invalid varint');
}
/**
* Write a 64 bit varint, given as two JS numbers, to the given bytes array.
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344
*/
export function varint64write(lo: number, hi: number, bytes: number[]): void {
for (let i = 0; i < 28; i = i + 7) {
const shift = lo >>> i;
const hasNext = !((shift >>> 7) == 0 && hi == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4);
const hasMoreBits = !((hi >> 3) == 0);
bytes.push(
(hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF);
if (!hasMoreBits) {
return;
}
for (let i = 3; i < 31; i = i + 7) {
const shift = hi >>> i;
const hasNext = !((shift >>> 7) == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
| // constants for binary math
const TWO_PWR_32_DBL = (1 << 16) * (1 << 16);
/**
* Parse decimal string of 64 bit integer value as two JS numbers.
*
* Returns tuple:
* [0]: minus sign?
* [1]: low bits
* [2]: high bits
*
* Copyright 2008 Google Inc.
*/
export function int64fromString(dec: string): [boolean, number, number] {
// Check for minus sign.
let minus = dec[0] == '-';
if (minus)
dec = dec.slice(1);
// Work 6 decimal digits at a time, acting like we're converting base 1e6
// digits to binary. This is safe to do with floating point math because
// Number.isSafeInteger(ALL_32_BITS * 1e6) == true.
const base = 1e6;
let lowBits = 0;
let highBits = 0;
function add1e6digit(begin: number, end?: number) {
// Note: Number('') is 0.
const digit1e6 = Number(dec.slice(begin, end));
highBits *= base;
lowBits = lowBits * base + digit1e6;
// Carry bits from lowBits to
if (lowBits >= TWO_PWR_32_DBL) {
highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0);
lowBits = lowBits % TWO_PWR_32_DBL;
}
}
add1e6digit(-24, -18);
add1e6digit(-18, -12);
add1e6digit(-12, -6);
add1e6digit(-6);
return [minus, lowBits, highBits];
}
/**
* Format 64 bit integer value (as two JS numbers) to decimal string.
*
* Copyright 2008 Google Inc.
*/
export function int64toString(bitsLow: number, bitsHigh: number): string {
// Skip the expensive conversion if the number is small enough to use the
// built-in conversions.
if (bitsHigh <= 0x1FFFFF) {
return '' + (TWO_PWR_32_DBL * bitsHigh + bitsLow);
}
// What this code is doing is essentially converting the input number from
// base-2 to base-1e7, which allows us to represent the 64-bit range with
// only 3 (very large) digits. Those digits are then trivial to convert to
// a base-10 string.
// The magic numbers used here are -
// 2^24 = 16777216 = (1,6777216) in base-1e7.
// 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7.
// Split 32:32 representation into 16:24:24 representation so our
// intermediate digits don't overflow.
let low = bitsLow & 0xFFFFFF;
let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF;
let high = (bitsHigh >> 16) & 0xFFFF;
// Assemble our three base-1e7 digits, ignoring carries. The maximum
// value in a digit at this step is representable as a 48-bit integer, which
// can be stored in a 64-bit floating point number.
let digitA = low + (mid * 6777216) + (high * 6710656);
let digitB = mid + (high * 8147497);
let digitC = (high * 2);
// Apply carries from A to B and from B to C.
let base = 10000000;
if (digitA >= base) {
digitB += Math.floor(digitA / base);
digitA %= base;
}
if (digitB >= base) {
digitC += Math.floor(digitB / base);
digitB %= base;
}
// Convert base-1e7 digits to base-10, with optional leading zeroes.
function decimalFrom1e7(digit1e7: number, needLeadingZeros: number) {
let partial = digit1e7 ? String(digit1e7) : '';
if (needLeadingZeros) {
return '0000000'.slice(partial.length) + partial;
}
return partial;
}
return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) +
decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) +
// If the final 1e7 digit didn't need leading zeros, we would have
// returned via the trivial code path at the top.
decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1);
}
/**
* Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)`
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144
*/
export function varint32write(value: number, bytes: number[]): void {
if (value >= 0) {
// write value as varint 32
while (value > 0x7f) {
bytes.push((value & 0x7f) | 0x80);
value = value >>> 7;
}
bytes.push(value);
} else {
for (let i = 0; i < 9; i++) {
bytes.push(value & 127 | 128);
value = value >> 7;
}
bytes.push(1);
}
}
/**
* Read an unsigned 32 bit varint.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220
*/
export function varint32read(this: ReaderLike): number {
let b = this.buf[this.pos++];
let result = b & 0x7F;
if ((b & 0x80) == 0) {
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 7;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 14;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 21;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
// Extract only last 4 bits
b = this.buf[this.pos++];
result |= (b & 0x0F) << 28;
for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++)
b = this.buf[this.pos++];
if ((b & 0x80) != 0)
throw new Error('invalid varint');
this.assertBounds();
// Result can have 32 bits, convert it to unsigned
return result >>> 0;
}
type ReaderLike = {
buf: Uint8Array;
pos: number;
len: number;
assertBounds(): void;
} | bytes.push((hi >>> 31) & 0x01);
}
| random_line_split |
goog-varint.ts | // Copyright 2008 Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Code generated by the Protocol Buffer compiler is owned by the owner
// of the input file used when generating it. This code is not
// standalone and requires a support library to be linked with it. This
// support library is itself covered by the above license.
/**
* Read a 64 bit varint as two JS numbers.
*
* Returns tuple:
* [0]: low bits
* [0]: high bits
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175
*/
export function varint64read(this: ReaderLike): [number, number] {
let lowBits = 0;
let highBits = 0;
for (let shift = 0; shift < 28; shift += 7) {
let b = this.buf[this.pos++];
lowBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
let middleByte = this.buf[this.pos++];
// last four bits of the first 32 bit number
lowBits |= (middleByte & 0x0F) << 28;
// 3 upper bits are part of the next 32 bit number
highBits = (middleByte & 0x70) >> 4;
if ((middleByte & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
for (let shift = 3; shift <= 31; shift += 7) {
let b = this.buf[this.pos++];
highBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
throw new Error('invalid varint');
}
/**
* Write a 64 bit varint, given as two JS numbers, to the given bytes array.
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344
*/
export function | (lo: number, hi: number, bytes: number[]): void {
for (let i = 0; i < 28; i = i + 7) {
const shift = lo >>> i;
const hasNext = !((shift >>> 7) == 0 && hi == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4);
const hasMoreBits = !((hi >> 3) == 0);
bytes.push(
(hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF);
if (!hasMoreBits) {
return;
}
for (let i = 3; i < 31; i = i + 7) {
const shift = hi >>> i;
const hasNext = !((shift >>> 7) == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
bytes.push((hi >>> 31) & 0x01);
}
// constants for binary math
const TWO_PWR_32_DBL = (1 << 16) * (1 << 16);
/**
* Parse decimal string of 64 bit integer value as two JS numbers.
*
* Returns tuple:
* [0]: minus sign?
* [1]: low bits
* [2]: high bits
*
* Copyright 2008 Google Inc.
*/
export function int64fromString(dec: string): [boolean, number, number] {
// Check for minus sign.
let minus = dec[0] == '-';
if (minus)
dec = dec.slice(1);
// Work 6 decimal digits at a time, acting like we're converting base 1e6
// digits to binary. This is safe to do with floating point math because
// Number.isSafeInteger(ALL_32_BITS * 1e6) == true.
const base = 1e6;
let lowBits = 0;
let highBits = 0;
function add1e6digit(begin: number, end?: number) {
// Note: Number('') is 0.
const digit1e6 = Number(dec.slice(begin, end));
highBits *= base;
lowBits = lowBits * base + digit1e6;
// Carry bits from lowBits to
if (lowBits >= TWO_PWR_32_DBL) {
highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0);
lowBits = lowBits % TWO_PWR_32_DBL;
}
}
add1e6digit(-24, -18);
add1e6digit(-18, -12);
add1e6digit(-12, -6);
add1e6digit(-6);
return [minus, lowBits, highBits];
}
/**
* Format 64 bit integer value (as two JS numbers) to decimal string.
*
* Copyright 2008 Google Inc.
*/
export function int64toString(bitsLow: number, bitsHigh: number): string {
// Skip the expensive conversion if the number is small enough to use the
// built-in conversions.
if (bitsHigh <= 0x1FFFFF) {
return '' + (TWO_PWR_32_DBL * bitsHigh + bitsLow);
}
// What this code is doing is essentially converting the input number from
// base-2 to base-1e7, which allows us to represent the 64-bit range with
// only 3 (very large) digits. Those digits are then trivial to convert to
// a base-10 string.
// The magic numbers used here are -
// 2^24 = 16777216 = (1,6777216) in base-1e7.
// 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7.
// Split 32:32 representation into 16:24:24 representation so our
// intermediate digits don't overflow.
let low = bitsLow & 0xFFFFFF;
let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF;
let high = (bitsHigh >> 16) & 0xFFFF;
// Assemble our three base-1e7 digits, ignoring carries. The maximum
// value in a digit at this step is representable as a 48-bit integer, which
// can be stored in a 64-bit floating point number.
let digitA = low + (mid * 6777216) + (high * 6710656);
let digitB = mid + (high * 8147497);
let digitC = (high * 2);
// Apply carries from A to B and from B to C.
let base = 10000000;
if (digitA >= base) {
digitB += Math.floor(digitA / base);
digitA %= base;
}
if (digitB >= base) {
digitC += Math.floor(digitB / base);
digitB %= base;
}
// Convert base-1e7 digits to base-10, with optional leading zeroes.
function decimalFrom1e7(digit1e7: number, needLeadingZeros: number) {
let partial = digit1e7 ? String(digit1e7) : '';
if (needLeadingZeros) {
return '0000000'.slice(partial.length) + partial;
}
return partial;
}
return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) +
decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) +
// If the final 1e7 digit didn't need leading zeros, we would have
// returned via the trivial code path at the top.
decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1);
}
/**
* Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)`
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144
*/
export function varint32write(value: number, bytes: number[]): void {
if (value >= 0) {
// write value as varint 32
while (value > 0x7f) {
bytes.push((value & 0x7f) | 0x80);
value = value >>> 7;
}
bytes.push(value);
} else {
for (let i = 0; i < 9; i++) {
bytes.push(value & 127 | 128);
value = value >> 7;
}
bytes.push(1);
}
}
/**
* Read an unsigned 32 bit varint.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220
*/
export function varint32read(this: ReaderLike): number {
let b = this.buf[this.pos++];
let result = b & 0x7F;
if ((b & 0x80) == 0) {
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 7;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 14;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 21;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
// Extract only last 4 bits
b = this.buf[this.pos++];
result |= (b & 0x0F) << 28;
for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++)
b = this.buf[this.pos++];
if ((b & 0x80) != 0)
throw new Error('invalid varint');
this.assertBounds();
// Result can have 32 bits, convert it to unsigned
return result >>> 0;
}
type ReaderLike = {
buf: Uint8Array;
pos: number;
len: number;
assertBounds(): void;
}
| varint64write | identifier_name |
goog-varint.ts | // Copyright 2008 Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Code generated by the Protocol Buffer compiler is owned by the owner
// of the input file used when generating it. This code is not
// standalone and requires a support library to be linked with it. This
// support library is itself covered by the above license.
/**
* Read a 64 bit varint as two JS numbers.
*
* Returns tuple:
* [0]: low bits
* [0]: high bits
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175
*/
export function varint64read(this: ReaderLike): [number, number] {
let lowBits = 0;
let highBits = 0;
for (let shift = 0; shift < 28; shift += 7) {
let b = this.buf[this.pos++];
lowBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
let middleByte = this.buf[this.pos++];
// last four bits of the first 32 bit number
lowBits |= (middleByte & 0x0F) << 28;
// 3 upper bits are part of the next 32 bit number
highBits = (middleByte & 0x70) >> 4;
if ((middleByte & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
for (let shift = 3; shift <= 31; shift += 7) {
let b = this.buf[this.pos++];
highBits |= (b & 0x7F) << shift;
if ((b & 0x80) == 0) {
this.assertBounds();
return [lowBits, highBits];
}
}
throw new Error('invalid varint');
}
/**
* Write a 64 bit varint, given as two JS numbers, to the given bytes array.
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344
*/
export function varint64write(lo: number, hi: number, bytes: number[]): void {
for (let i = 0; i < 28; i = i + 7) {
const shift = lo >>> i;
const hasNext = !((shift >>> 7) == 0 && hi == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) {
return;
}
}
const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4);
const hasMoreBits = !((hi >> 3) == 0);
bytes.push(
(hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF);
if (!hasMoreBits) {
return;
}
for (let i = 3; i < 31; i = i + 7) {
const shift = hi >>> i;
const hasNext = !((shift >>> 7) == 0);
const byte = (hasNext ? shift | 0x80 : shift) & 0xFF;
bytes.push(byte);
if (!hasNext) |
}
bytes.push((hi >>> 31) & 0x01);
}
// constants for binary math
const TWO_PWR_32_DBL = (1 << 16) * (1 << 16);
/**
* Parse decimal string of 64 bit integer value as two JS numbers.
*
* Returns tuple:
* [0]: minus sign?
* [1]: low bits
* [2]: high bits
*
* Copyright 2008 Google Inc.
*/
export function int64fromString(dec: string): [boolean, number, number] {
// Check for minus sign.
let minus = dec[0] == '-';
if (minus)
dec = dec.slice(1);
// Work 6 decimal digits at a time, acting like we're converting base 1e6
// digits to binary. This is safe to do with floating point math because
// Number.isSafeInteger(ALL_32_BITS * 1e6) == true.
const base = 1e6;
let lowBits = 0;
let highBits = 0;
function add1e6digit(begin: number, end?: number) {
// Note: Number('') is 0.
const digit1e6 = Number(dec.slice(begin, end));
highBits *= base;
lowBits = lowBits * base + digit1e6;
// Carry bits from lowBits to
if (lowBits >= TWO_PWR_32_DBL) {
highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0);
lowBits = lowBits % TWO_PWR_32_DBL;
}
}
add1e6digit(-24, -18);
add1e6digit(-18, -12);
add1e6digit(-12, -6);
add1e6digit(-6);
return [minus, lowBits, highBits];
}
/**
* Format 64 bit integer value (as two JS numbers) to decimal string.
*
* Copyright 2008 Google Inc.
*/
export function int64toString(bitsLow: number, bitsHigh: number): string {
// Skip the expensive conversion if the number is small enough to use the
// built-in conversions.
if (bitsHigh <= 0x1FFFFF) {
return '' + (TWO_PWR_32_DBL * bitsHigh + bitsLow);
}
// What this code is doing is essentially converting the input number from
// base-2 to base-1e7, which allows us to represent the 64-bit range with
// only 3 (very large) digits. Those digits are then trivial to convert to
// a base-10 string.
// The magic numbers used here are -
// 2^24 = 16777216 = (1,6777216) in base-1e7.
// 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7.
// Split 32:32 representation into 16:24:24 representation so our
// intermediate digits don't overflow.
let low = bitsLow & 0xFFFFFF;
let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF;
let high = (bitsHigh >> 16) & 0xFFFF;
// Assemble our three base-1e7 digits, ignoring carries. The maximum
// value in a digit at this step is representable as a 48-bit integer, which
// can be stored in a 64-bit floating point number.
let digitA = low + (mid * 6777216) + (high * 6710656);
let digitB = mid + (high * 8147497);
let digitC = (high * 2);
// Apply carries from A to B and from B to C.
let base = 10000000;
if (digitA >= base) {
digitB += Math.floor(digitA / base);
digitA %= base;
}
if (digitB >= base) {
digitC += Math.floor(digitB / base);
digitB %= base;
}
// Convert base-1e7 digits to base-10, with optional leading zeroes.
function decimalFrom1e7(digit1e7: number, needLeadingZeros: number) {
let partial = digit1e7 ? String(digit1e7) : '';
if (needLeadingZeros) {
return '0000000'.slice(partial.length) + partial;
}
return partial;
}
return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) +
decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) +
// If the final 1e7 digit didn't need leading zeros, we would have
// returned via the trivial code path at the top.
decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1);
}
/**
* Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)`
*
* Copyright 2008 Google Inc. All rights reserved.
*
* See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144
*/
export function varint32write(value: number, bytes: number[]): void {
if (value >= 0) {
// write value as varint 32
while (value > 0x7f) {
bytes.push((value & 0x7f) | 0x80);
value = value >>> 7;
}
bytes.push(value);
} else {
for (let i = 0; i < 9; i++) {
bytes.push(value & 127 | 128);
value = value >> 7;
}
bytes.push(1);
}
}
/**
* Read an unsigned 32 bit varint.
*
* See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220
*/
export function varint32read(this: ReaderLike): number {
let b = this.buf[this.pos++];
let result = b & 0x7F;
if ((b & 0x80) == 0) {
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 7;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 14;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
b = this.buf[this.pos++];
result |= (b & 0x7F) << 21;
if ((b & 0x80) == 0){
this.assertBounds();
return result;
}
// Extract only last 4 bits
b = this.buf[this.pos++];
result |= (b & 0x0F) << 28;
for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++)
b = this.buf[this.pos++];
if ((b & 0x80) != 0)
throw new Error('invalid varint');
this.assertBounds();
// Result can have 32 bits, convert it to unsigned
return result >>> 0;
}
type ReaderLike = {
buf: Uint8Array;
pos: number;
len: number;
assertBounds(): void;
}
| {
return;
} | conditional_block |
daemon.go | // Copyright (c) 2014-2020 Canonical Ltd
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License version 3 as
// published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package daemon
import (
"context"
"fmt"
"net"
"net/http"
"os"
"os/exec"
"os/signal"
"runtime"
"strings"
"sync"
"syscall"
"time"
"gopkg.in/tomb.v2"
"github.com/gorilla/mux"
"github.com/canonical/pebble/internal/logger"
"github.com/canonical/pebble/internal/osutil"
"github.com/canonical/pebble/internal/osutil/sys"
"github.com/canonical/pebble/internal/overlord"
"github.com/canonical/pebble/internal/overlord/standby"
"github.com/canonical/pebble/internal/overlord/state"
"github.com/canonical/pebble/internal/systemd"
)
var (
ErrRestartSocket = fmt.Errorf("daemon stop requested to wait for socket activation")
systemdSdNotify = systemd.SdNotify
sysGetuid = sys.Getuid
)
// Options holds the daemon setup required for the initialization of a new daemon.
type Options struct {
// Dir is the pebble directory where all setup is found. Defaults to /var/lib/pebble/default.
Dir string
// SocketPath is an optional path for the unix socket used for the client
// to communicate with the daemon. Defaults to a hidden (dotted) name inside
// the pebble directory.
SocketPath string
}
// A Daemon listens for requests and routes them to the right command
type Daemon struct {
Version string
StartTime time.Time
pebbleDir string
normalSocketPath string
untrustedSocketPath string
overlord *overlord.Overlord
state *state.State
generalListener net.Listener
untrustedListener net.Listener
connTracker *connTracker
serve *http.Server
tomb tomb.Tomb
router *mux.Router
standbyOpinions *standby.StandbyOpinions
// set to remember we need to restart the system
restartSystem bool
// set to remember that we need to exit the daemon in a way that
// prevents systemd from restarting it
restartSocket bool
// degradedErr is set when the daemon is in degraded mode
degradedErr error
rebootIsMissing bool
mu sync.Mutex
}
// XXX Placeholder for now.
type userState struct{}
// A ResponseFunc handles one of the individual verbs for a method
type ResponseFunc func(*Command, *http.Request, *userState) Response
// A Command routes a request to an individual per-verb ResponseFUnc
type Command struct {
Path string
PathPrefix string
//
GET ResponseFunc
PUT ResponseFunc
POST ResponseFunc
DELETE ResponseFunc
GuestOK bool
UserOK bool
UntrustedOK bool
AdminOnly bool
d *Daemon
}
type accessResult int
const (
accessOK accessResult = iota
accessUnauthorized
accessForbidden
)
// canAccess checks the following properties:
//
// - if the user is `root` everything is allowed
// - if a user is logged in and the command doesn't have AdminOnly, everything is allowed
// - POST/PUT/DELETE all require the admin, or just login if not AdminOnly
//
// Otherwise for GET requests the following parameters are honored:
// - GuestOK: anyone can access GET
// - UserOK: any uid on the local system can access GET
// - AdminOnly: only the administrator can access this
// - UntrustedOK: can access this via the untrusted socket
func (c *Command) canAccess(r *http.Request, user *userState) accessResult {
if c.AdminOnly && (c.UserOK || c.GuestOK || c.UntrustedOK) {
logger.Panicf("internal error: command cannot have AdminOnly together with any *OK flag")
}
if user != nil && !c.AdminOnly {
// Authenticated users do anything not requiring explicit admin.
return accessOK
}
// isUser means we have a UID for the request
isUser := false
pid, uid, socket, err := ucrednetGet(r.RemoteAddr)
if err == nil {
isUser = true
} else if err != errNoID {
logger.Noticef("unexpected error when attempting to get UID: %s", err)
return accessForbidden
}
isUntrusted := (socket == c.d.untrustedSocketPath)
_ = pid
_ = uid
if isUntrusted {
if c.UntrustedOK {
return accessOK
}
return accessUnauthorized
}
// the !AdminOnly check is redundant, but belt-and-suspenders
if r.Method == "GET" && !c.AdminOnly {
// Guest and user access restricted to GET requests
if c.GuestOK {
return accessOK
}
if isUser && c.UserOK {
return accessOK
}
}
// Remaining admin checks rely on identifying peer uid
if !isUser {
return accessUnauthorized
}
if uid == 0 || sys.UserID(uid) == sysGetuid() {
// Superuser and process owner can do anything.
return accessOK
}
if c.AdminOnly {
return accessUnauthorized
}
return accessUnauthorized
}
func userFromRequest(state interface{}, r *http.Request) (*userState, error) {
return nil, nil
}
func (c *Command) ServeHTTP(w http.ResponseWriter, r *http.Request) {
st := c.d.state
st.Lock()
user, err := userFromRequest(st, r)
if err != nil {
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
st.Unlock()
// check if we are in degradedMode
if c.d.degradedErr != nil && r.Method != "GET" {
statusInternalError(c.d.degradedErr.Error()).ServeHTTP(w, r)
return
}
switch c.canAccess(r, user) {
case accessOK:
// nothing
case accessUnauthorized:
statusUnauthorized("access denied").ServeHTTP(w, r)
return
case accessForbidden:
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
var rspf ResponseFunc
var rsp = statusMethodNotAllowed("method %q not allowed", r.Method)
switch r.Method {
case "GET":
rspf = c.GET
case "PUT":
rspf = c.PUT
case "POST":
rspf = c.POST
case "DELETE":
rspf = c.DELETE
}
if rspf != nil {
rsp = rspf(c, r, user)
}
if rsp, ok := rsp.(*resp); ok {
_, rst := st.Restarting()
switch rst {
case state.RestartSystem:
rsp.transmitMaintenance(errorKindSystemRestart, "system is restarting")
case state.RestartDaemon:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is restarting")
case state.RestartSocket:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is stopping to wait for socket activation")
}
if rsp.Type != ResponseTypeError {
st.Lock()
count, stamp := st.WarningsSummary()
st.Unlock()
rsp.addWarningsToMeta(count, stamp)
}
}
rsp.ServeHTTP(w, r)
}
type wrappedWriter struct {
w http.ResponseWriter
s int
}
func (w *wrappedWriter) Header() http.Header {
return w.w.Header()
}
func (w *wrappedWriter) Write(bs []byte) (int, error) {
return w.w.Write(bs)
}
func (w *wrappedWriter) WriteHeader(s int) {
w.w.WriteHeader(s)
w.s = s
}
func (w *wrappedWriter) Flush() {
if f, ok := w.w.(http.Flusher); ok {
f.Flush()
}
}
func logit(handler http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ww := &wrappedWriter{w: w}
t0 := time.Now()
handler.ServeHTTP(ww, r)
t := time.Now().Sub(t0)
if !strings.Contains(r.URL.String(), "/v1/changes/") {
if strings.HasSuffix(r.RemoteAddr, ";") | else {
logger.Noticef("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
}
}
})
}
// Init sets up the Daemon's internal workings.
// Don't call more than once.
func (d *Daemon) Init() error {
listenerMap := make(map[string]net.Listener)
if listener, err := getListener(d.normalSocketPath, listenerMap); err == nil {
d.generalListener = &ucrednetListener{Listener: listener}
} else {
return fmt.Errorf("when trying to listen on %s: %v", d.normalSocketPath, err)
}
if listener, err := getListener(d.untrustedSocketPath, listenerMap); err == nil {
// This listener may also be nil if that socket wasn't among
// the listeners, so check it before using it.
d.untrustedListener = &ucrednetListener{Listener: listener}
} else {
logger.Debugf("cannot get listener for %q: %v", d.untrustedSocketPath, err)
}
d.addRoutes()
logger.Noticef("Started daemon.")
return nil
}
// SetDegradedMode puts the daemon into an degraded mode which will the
// error given in the "err" argument for commands that are not marked
// as readonlyOK.
//
// This is useful to report errors to the client when the daemon
// cannot work because e.g. a sanity check failed or the system is out
// of diskspace.
//
// When the system is fine again calling "DegradedMode(nil)" is enough
// to put the daemon into full operation again.
func (d *Daemon) SetDegradedMode(err error) {
d.degradedErr = err
}
func (d *Daemon) addRoutes() {
d.router = mux.NewRouter()
for _, c := range api {
c.d = d
if c.PathPrefix == "" {
d.router.Handle(c.Path, c).Name(c.Path)
} else {
d.router.PathPrefix(c.PathPrefix).Handler(c).Name(c.PathPrefix)
}
}
// also maybe add a /favicon.ico handler...
d.router.NotFoundHandler = statusNotFound("invalid API endpoint requested")
}
type connTracker struct {
mu sync.Mutex
conns map[net.Conn]struct{}
}
func (ct *connTracker) CanStandby() bool {
ct.mu.Lock()
defer ct.mu.Unlock()
return len(ct.conns) == 0
}
func (ct *connTracker) trackConn(conn net.Conn, state http.ConnState) {
ct.mu.Lock()
defer ct.mu.Unlock()
// we ignore hijacked connections, if we do things with websockets
// we'll need custom shutdown handling for them
if state == http.StateNew || state == http.StateActive {
ct.conns[conn] = struct{}{}
} else {
delete(ct.conns, conn)
}
}
func (d *Daemon) CanStandby() bool {
return systemd.SocketAvailable()
}
func (d *Daemon) initStandbyHandling() {
d.standbyOpinions = standby.New(d.state)
d.standbyOpinions.AddOpinion(d)
d.standbyOpinions.AddOpinion(d.connTracker)
d.standbyOpinions.AddOpinion(d.overlord)
d.standbyOpinions.Start()
}
func (d *Daemon) Start() {
if d.rebootIsMissing {
// we need to schedule and wait for a system restart
d.tomb.Kill(nil)
// avoid systemd killing us again while we wait
systemdSdNotify("READY=1")
return
}
if d.overlord == nil {
panic("internal error: no Overlord")
}
d.StartTime = time.Now()
d.connTracker = &connTracker{conns: make(map[net.Conn]struct{})}
d.serve = &http.Server{
Handler: logit(d.router),
ConnState: d.connTracker.trackConn,
}
d.initStandbyHandling()
d.overlord.Loop()
d.tomb.Go(func() error {
if d.untrustedListener != nil {
d.tomb.Go(func() error {
if err := d.serve.Serve(d.untrustedListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
}
if err := d.serve.Serve(d.generalListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
// notify systemd that we are ready
systemdSdNotify("READY=1")
}
// HandleRestart implements overlord.RestartBehavior.
func (d *Daemon) HandleRestart(t state.RestartType) {
// die when asked to restart (systemd should get us back up!) etc
switch t {
case state.RestartDaemon:
case state.RestartSystem:
// try to schedule a fallback slow reboot already here
// in case we get stuck shutting down
if err := reboot(rebootWaitTimeout); err != nil {
logger.Noticef("%s", err)
}
d.mu.Lock()
defer d.mu.Unlock()
// remember we need to restart the system
d.restartSystem = true
case state.RestartSocket:
d.mu.Lock()
defer d.mu.Unlock()
d.restartSocket = true
default:
logger.Noticef("internal error: restart handler called with unknown restart type: %v", t)
}
d.tomb.Kill(nil)
}
var (
rebootNoticeWait = 3 * time.Second
rebootWaitTimeout = 10 * time.Minute
rebootRetryWaitTimeout = 5 * time.Minute
rebootMaxTentatives = 3
)
var shutdownTimeout = 25 * time.Second
// Stop shuts down the Daemon.
func (d *Daemon) Stop(sigCh chan<- os.Signal) error {
if d.rebootIsMissing {
// we need to schedule/wait for a system restart again
return d.doReboot(sigCh, rebootRetryWaitTimeout)
}
if d.overlord == nil {
return fmt.Errorf("internal error: no Overlord")
}
d.tomb.Kill(nil)
d.mu.Lock()
restartSystem := d.restartSystem
restartSocket := d.restartSocket
d.mu.Unlock()
d.generalListener.Close()
d.standbyOpinions.Stop()
if d.untrustedListener != nil {
d.untrustedListener.Close()
}
if restartSystem {
// give time to polling clients to notice restart
time.Sleep(rebootNoticeWait)
}
// We're using the background context here because the tomb's
// context will likely already have been cancelled when we are
// called.
ctx, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
d.tomb.Kill(d.serve.Shutdown(ctx))
cancel()
if !restartSystem {
// tell systemd that we are stopping
systemdSdNotify("STOPPING=1")
}
if restartSocket {
// At this point we processed all open requests (and
// stopped accepting new requests) - before going into
// socket activated mode we need to check if any of
// those open requests resulted in something that
// prevents us from going into socket activation mode.
//
// If this is the case we do a "normal" snapd restart
// to process the new changes.
if !d.standbyOpinions.CanStandby() {
d.restartSocket = false
}
}
d.overlord.Stop()
err := d.tomb.Wait()
if err != nil {
// do not stop the shutdown even if the tomb errors
// because we already scheduled a slow shutdown and
// exiting here will just restart snapd (via systemd)
// which will lead to confusing results.
if restartSystem {
logger.Noticef("WARNING: cannot stop daemon: %v", err)
} else {
return err
}
}
if restartSystem {
return d.doReboot(sigCh, rebootWaitTimeout)
}
if d.restartSocket {
return ErrRestartSocket
}
return nil
}
func (d *Daemon) rebootDelay() (time.Duration, error) {
d.state.Lock()
defer d.state.Unlock()
now := time.Now()
// see whether a reboot had already been scheduled
var rebootAt time.Time
err := d.state.Get("daemon-system-restart-at", &rebootAt)
if err != nil && err != state.ErrNoState {
return 0, err
}
rebootDelay := 1 * time.Minute
if err == nil {
rebootDelay = rebootAt.Sub(now)
} else {
ovr := os.Getenv("SNAPD_REBOOT_DELAY") // for tests
if ovr != "" {
d, err := time.ParseDuration(ovr)
if err == nil {
rebootDelay = d
}
}
rebootAt = now.Add(rebootDelay)
d.state.Set("daemon-system-restart-at", rebootAt)
}
return rebootDelay, nil
}
func (d *Daemon) doReboot(sigCh chan<- os.Signal, waitTimeout time.Duration) error {
rebootDelay, err := d.rebootDelay()
if err != nil {
return err
}
// ask for shutdown and wait for it to happen.
// if we exit snapd will be restared by systemd
if err := reboot(rebootDelay); err != nil {
return err
}
// wait for reboot to happen
logger.Noticef("Waiting for system reboot")
if sigCh != nil {
signal.Stop(sigCh)
if len(sigCh) > 0 {
// a signal arrived in between
return nil
}
close(sigCh)
}
time.Sleep(waitTimeout)
return fmt.Errorf("expected reboot did not happen")
}
var shutdownMsg = "reboot scheduled to update the system"
func rebootImpl(rebootDelay time.Duration) error {
if rebootDelay < 0 {
rebootDelay = 0
}
mins := int64(rebootDelay / time.Minute)
cmd := exec.Command("shutdown", "-r", fmt.Sprintf("+%d", mins), shutdownMsg)
if out, err := cmd.CombinedOutput(); err != nil {
return osutil.OutputErr(out, err)
}
return nil
}
var reboot = rebootImpl
func (d *Daemon) Dying() <-chan struct{} {
return d.tomb.Dying()
}
func clearReboot(st *state.State) {
// FIXME See notes in the state package. This logic should be
// centralized in the overlord which is the orchestrator. Right
// now we have the daemon, the overlord, and even the state
// itself all knowing about such details.
st.Set("daemon-system-restart-at", nil)
st.Set("daemon-system-restart-tentative", nil)
}
// RebootIsFine implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsFine(st *state.State) error {
clearReboot(st)
return nil
}
// RebootDidNotHappen implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsMissing(st *state.State) error {
var nTentative int
err := st.Get("daemon-system-restart-tentative", &nTentative)
if err != nil && err != state.ErrNoState {
return err
}
nTentative++
if nTentative > rebootMaxTentatives {
// giving up, proceed normally, some in-progress refresh
// might get rolled back!!
st.ClearReboot()
clearReboot(st)
logger.Noticef("snapd was restarted while a system restart was expected, snapd retried to schedule and waited again for a system restart %d times and is giving up", rebootMaxTentatives)
return nil
}
st.Set("daemon-system-restart-tentative", nTentative)
d.state = st
logger.Noticef("snapd was restarted while a system restart was expected, snapd will try to schedule and wait for a system restart again (tenative %d/%d)", nTentative, rebootMaxTentatives)
return state.ErrExpectedReboot
}
func New(opts *Options) (*Daemon, error) {
d := &Daemon{
pebbleDir: opts.Dir,
normalSocketPath: opts.SocketPath,
untrustedSocketPath: opts.SocketPath + ".untrusted",
}
ovld, err := overlord.New(opts.Dir, d)
if err == state.ErrExpectedReboot {
// we proceed without overlord until we reach Stop
// where we will schedule and wait again for a system restart.
// ATM we cannot do that in New because we need to satisfy
// systemd notify mechanisms.
d.rebootIsMissing = true
return d, nil
}
if err != nil {
return nil, err
}
d.overlord = ovld
d.state = ovld.State()
return d, nil
}
// GetListener tries to get a listener for the given socket path from
// the listener map, and if it fails it tries to set it up directly.
func getListener(socketPath string, listenerMap map[string]net.Listener) (net.Listener, error) {
if listener, ok := listenerMap[socketPath]; ok {
return listener, nil
}
if c, err := net.Dial("unix", socketPath); err == nil {
c.Close()
return nil, fmt.Errorf("socket %q already in use", socketPath)
}
if err := os.Remove(socketPath); err != nil && !os.IsNotExist(err) {
return nil, err
}
address, err := net.ResolveUnixAddr("unix", socketPath)
if err != nil {
return nil, err
}
runtime.LockOSThread()
oldmask := syscall.Umask(0111)
listener, err := net.ListenUnix("unix", address)
syscall.Umask(oldmask)
runtime.UnlockOSThread()
if err != nil {
return nil, err
}
logger.Debugf("socket %q was not activated; listening", socketPath)
return listener, nil
}
| {
logger.Debugf("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
logger.Noticef("%s %s %s %d", r.Method, r.URL, t, ww.s)
} | conditional_block |
daemon.go | // Copyright (c) 2014-2020 Canonical Ltd
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License version 3 as
// published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package daemon
import (
"context"
"fmt"
"net"
"net/http"
"os"
"os/exec"
"os/signal"
"runtime"
"strings"
"sync"
"syscall"
"time"
"gopkg.in/tomb.v2"
"github.com/gorilla/mux"
"github.com/canonical/pebble/internal/logger"
"github.com/canonical/pebble/internal/osutil"
"github.com/canonical/pebble/internal/osutil/sys"
"github.com/canonical/pebble/internal/overlord"
"github.com/canonical/pebble/internal/overlord/standby"
"github.com/canonical/pebble/internal/overlord/state"
"github.com/canonical/pebble/internal/systemd"
)
var (
ErrRestartSocket = fmt.Errorf("daemon stop requested to wait for socket activation")
systemdSdNotify = systemd.SdNotify
sysGetuid = sys.Getuid
)
// Options holds the daemon setup required for the initialization of a new daemon.
type Options struct {
// Dir is the pebble directory where all setup is found. Defaults to /var/lib/pebble/default.
Dir string
// SocketPath is an optional path for the unix socket used for the client
// to communicate with the daemon. Defaults to a hidden (dotted) name inside
// the pebble directory.
SocketPath string
}
// A Daemon listens for requests and routes them to the right command
type Daemon struct {
Version string
StartTime time.Time
pebbleDir string
normalSocketPath string
untrustedSocketPath string
overlord *overlord.Overlord
state *state.State
generalListener net.Listener
untrustedListener net.Listener
connTracker *connTracker
serve *http.Server
tomb tomb.Tomb
router *mux.Router
standbyOpinions *standby.StandbyOpinions
// set to remember we need to restart the system
restartSystem bool
// set to remember that we need to exit the daemon in a way that
// prevents systemd from restarting it
restartSocket bool
// degradedErr is set when the daemon is in degraded mode
degradedErr error
rebootIsMissing bool
mu sync.Mutex
}
// XXX Placeholder for now.
type userState struct{}
// A ResponseFunc handles one of the individual verbs for a method
type ResponseFunc func(*Command, *http.Request, *userState) Response
// A Command routes a request to an individual per-verb ResponseFUnc
type Command struct {
Path string
PathPrefix string
//
GET ResponseFunc
PUT ResponseFunc
POST ResponseFunc
DELETE ResponseFunc
GuestOK bool
UserOK bool
UntrustedOK bool
AdminOnly bool
d *Daemon
}
type accessResult int
const (
accessOK accessResult = iota
accessUnauthorized
accessForbidden
)
// canAccess checks the following properties:
//
// - if the user is `root` everything is allowed
// - if a user is logged in and the command doesn't have AdminOnly, everything is allowed
// - POST/PUT/DELETE all require the admin, or just login if not AdminOnly
//
// Otherwise for GET requests the following parameters are honored:
// - GuestOK: anyone can access GET
// - UserOK: any uid on the local system can access GET
// - AdminOnly: only the administrator can access this
// - UntrustedOK: can access this via the untrusted socket
func (c *Command) canAccess(r *http.Request, user *userState) accessResult {
if c.AdminOnly && (c.UserOK || c.GuestOK || c.UntrustedOK) {
logger.Panicf("internal error: command cannot have AdminOnly together with any *OK flag")
}
if user != nil && !c.AdminOnly {
// Authenticated users do anything not requiring explicit admin.
return accessOK
}
// isUser means we have a UID for the request
isUser := false
pid, uid, socket, err := ucrednetGet(r.RemoteAddr)
if err == nil {
isUser = true
} else if err != errNoID {
logger.Noticef("unexpected error when attempting to get UID: %s", err)
return accessForbidden
}
isUntrusted := (socket == c.d.untrustedSocketPath)
_ = pid
_ = uid
if isUntrusted {
if c.UntrustedOK {
return accessOK
}
return accessUnauthorized
}
// the !AdminOnly check is redundant, but belt-and-suspenders
if r.Method == "GET" && !c.AdminOnly {
// Guest and user access restricted to GET requests
if c.GuestOK {
return accessOK
}
if isUser && c.UserOK {
return accessOK
}
}
// Remaining admin checks rely on identifying peer uid
if !isUser {
return accessUnauthorized
}
if uid == 0 || sys.UserID(uid) == sysGetuid() {
// Superuser and process owner can do anything.
return accessOK
}
if c.AdminOnly {
return accessUnauthorized
}
return accessUnauthorized
}
func userFromRequest(state interface{}, r *http.Request) (*userState, error) {
return nil, nil
}
func (c *Command) ServeHTTP(w http.ResponseWriter, r *http.Request) {
st := c.d.state
st.Lock()
user, err := userFromRequest(st, r)
if err != nil {
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
st.Unlock()
// check if we are in degradedMode
if c.d.degradedErr != nil && r.Method != "GET" {
statusInternalError(c.d.degradedErr.Error()).ServeHTTP(w, r)
return
}
switch c.canAccess(r, user) {
case accessOK:
// nothing
case accessUnauthorized:
statusUnauthorized("access denied").ServeHTTP(w, r)
return
case accessForbidden:
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
var rspf ResponseFunc
var rsp = statusMethodNotAllowed("method %q not allowed", r.Method)
switch r.Method {
case "GET":
rspf = c.GET
case "PUT":
rspf = c.PUT
case "POST":
rspf = c.POST
case "DELETE":
rspf = c.DELETE
}
if rspf != nil {
rsp = rspf(c, r, user)
}
if rsp, ok := rsp.(*resp); ok {
_, rst := st.Restarting()
switch rst {
case state.RestartSystem:
rsp.transmitMaintenance(errorKindSystemRestart, "system is restarting")
case state.RestartDaemon:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is restarting")
case state.RestartSocket:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is stopping to wait for socket activation")
}
if rsp.Type != ResponseTypeError {
st.Lock()
count, stamp := st.WarningsSummary()
st.Unlock()
rsp.addWarningsToMeta(count, stamp)
}
}
rsp.ServeHTTP(w, r)
}
type wrappedWriter struct {
w http.ResponseWriter
s int
}
func (w *wrappedWriter) Header() http.Header {
return w.w.Header()
}
func (w *wrappedWriter) Write(bs []byte) (int, error) {
return w.w.Write(bs)
}
func (w *wrappedWriter) WriteHeader(s int) {
w.w.WriteHeader(s)
w.s = s
}
func (w *wrappedWriter) Flush() {
if f, ok := w.w.(http.Flusher); ok {
f.Flush()
}
}
func logit(handler http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ww := &wrappedWriter{w: w}
t0 := time.Now()
handler.ServeHTTP(ww, r)
t := time.Now().Sub(t0)
if !strings.Contains(r.URL.String(), "/v1/changes/") {
if strings.HasSuffix(r.RemoteAddr, ";") {
logger.Debugf("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
logger.Noticef("%s %s %s %d", r.Method, r.URL, t, ww.s)
} else {
logger.Noticef("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
}
}
})
}
// Init sets up the Daemon's internal workings.
// Don't call more than once.
func (d *Daemon) Init() error {
listenerMap := make(map[string]net.Listener)
if listener, err := getListener(d.normalSocketPath, listenerMap); err == nil {
d.generalListener = &ucrednetListener{Listener: listener}
} else {
return fmt.Errorf("when trying to listen on %s: %v", d.normalSocketPath, err)
}
if listener, err := getListener(d.untrustedSocketPath, listenerMap); err == nil {
// This listener may also be nil if that socket wasn't among
// the listeners, so check it before using it.
d.untrustedListener = &ucrednetListener{Listener: listener}
} else {
logger.Debugf("cannot get listener for %q: %v", d.untrustedSocketPath, err)
}
d.addRoutes()
logger.Noticef("Started daemon.")
return nil
}
// SetDegradedMode puts the daemon into an degraded mode which will the
// error given in the "err" argument for commands that are not marked
// as readonlyOK.
//
// This is useful to report errors to the client when the daemon
// cannot work because e.g. a sanity check failed or the system is out
// of diskspace.
//
// When the system is fine again calling "DegradedMode(nil)" is enough
// to put the daemon into full operation again.
func (d *Daemon) SetDegradedMode(err error) {
d.degradedErr = err
}
func (d *Daemon) addRoutes() {
d.router = mux.NewRouter()
for _, c := range api {
c.d = d
if c.PathPrefix == "" {
d.router.Handle(c.Path, c).Name(c.Path)
} else {
d.router.PathPrefix(c.PathPrefix).Handler(c).Name(c.PathPrefix)
}
}
// also maybe add a /favicon.ico handler...
d.router.NotFoundHandler = statusNotFound("invalid API endpoint requested")
}
type connTracker struct {
mu sync.Mutex
conns map[net.Conn]struct{}
}
func (ct *connTracker) CanStandby() bool {
ct.mu.Lock()
defer ct.mu.Unlock()
return len(ct.conns) == 0
}
func (ct *connTracker) trackConn(conn net.Conn, state http.ConnState) {
ct.mu.Lock()
defer ct.mu.Unlock()
// we ignore hijacked connections, if we do things with websockets
// we'll need custom shutdown handling for them
if state == http.StateNew || state == http.StateActive {
ct.conns[conn] = struct{}{}
} else {
delete(ct.conns, conn)
}
}
func (d *Daemon) CanStandby() bool {
return systemd.SocketAvailable()
}
func (d *Daemon) initStandbyHandling() {
d.standbyOpinions = standby.New(d.state)
d.standbyOpinions.AddOpinion(d)
d.standbyOpinions.AddOpinion(d.connTracker)
d.standbyOpinions.AddOpinion(d.overlord)
d.standbyOpinions.Start()
}
func (d *Daemon) Start() {
if d.rebootIsMissing {
// we need to schedule and wait for a system restart
d.tomb.Kill(nil)
// avoid systemd killing us again while we wait
systemdSdNotify("READY=1")
return
}
if d.overlord == nil {
panic("internal error: no Overlord")
}
d.StartTime = time.Now()
d.connTracker = &connTracker{conns: make(map[net.Conn]struct{})}
d.serve = &http.Server{
Handler: logit(d.router),
ConnState: d.connTracker.trackConn,
}
d.initStandbyHandling()
d.overlord.Loop()
d.tomb.Go(func() error {
if d.untrustedListener != nil {
d.tomb.Go(func() error {
if err := d.serve.Serve(d.untrustedListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
}
if err := d.serve.Serve(d.generalListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
// notify systemd that we are ready
systemdSdNotify("READY=1")
}
// HandleRestart implements overlord.RestartBehavior.
func (d *Daemon) HandleRestart(t state.RestartType) |
var (
rebootNoticeWait = 3 * time.Second
rebootWaitTimeout = 10 * time.Minute
rebootRetryWaitTimeout = 5 * time.Minute
rebootMaxTentatives = 3
)
var shutdownTimeout = 25 * time.Second
// Stop shuts down the Daemon.
func (d *Daemon) Stop(sigCh chan<- os.Signal) error {
if d.rebootIsMissing {
// we need to schedule/wait for a system restart again
return d.doReboot(sigCh, rebootRetryWaitTimeout)
}
if d.overlord == nil {
return fmt.Errorf("internal error: no Overlord")
}
d.tomb.Kill(nil)
d.mu.Lock()
restartSystem := d.restartSystem
restartSocket := d.restartSocket
d.mu.Unlock()
d.generalListener.Close()
d.standbyOpinions.Stop()
if d.untrustedListener != nil {
d.untrustedListener.Close()
}
if restartSystem {
// give time to polling clients to notice restart
time.Sleep(rebootNoticeWait)
}
// We're using the background context here because the tomb's
// context will likely already have been cancelled when we are
// called.
ctx, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
d.tomb.Kill(d.serve.Shutdown(ctx))
cancel()
if !restartSystem {
// tell systemd that we are stopping
systemdSdNotify("STOPPING=1")
}
if restartSocket {
// At this point we processed all open requests (and
// stopped accepting new requests) - before going into
// socket activated mode we need to check if any of
// those open requests resulted in something that
// prevents us from going into socket activation mode.
//
// If this is the case we do a "normal" snapd restart
// to process the new changes.
if !d.standbyOpinions.CanStandby() {
d.restartSocket = false
}
}
d.overlord.Stop()
err := d.tomb.Wait()
if err != nil {
// do not stop the shutdown even if the tomb errors
// because we already scheduled a slow shutdown and
// exiting here will just restart snapd (via systemd)
// which will lead to confusing results.
if restartSystem {
logger.Noticef("WARNING: cannot stop daemon: %v", err)
} else {
return err
}
}
if restartSystem {
return d.doReboot(sigCh, rebootWaitTimeout)
}
if d.restartSocket {
return ErrRestartSocket
}
return nil
}
func (d *Daemon) rebootDelay() (time.Duration, error) {
d.state.Lock()
defer d.state.Unlock()
now := time.Now()
// see whether a reboot had already been scheduled
var rebootAt time.Time
err := d.state.Get("daemon-system-restart-at", &rebootAt)
if err != nil && err != state.ErrNoState {
return 0, err
}
rebootDelay := 1 * time.Minute
if err == nil {
rebootDelay = rebootAt.Sub(now)
} else {
ovr := os.Getenv("SNAPD_REBOOT_DELAY") // for tests
if ovr != "" {
d, err := time.ParseDuration(ovr)
if err == nil {
rebootDelay = d
}
}
rebootAt = now.Add(rebootDelay)
d.state.Set("daemon-system-restart-at", rebootAt)
}
return rebootDelay, nil
}
func (d *Daemon) doReboot(sigCh chan<- os.Signal, waitTimeout time.Duration) error {
rebootDelay, err := d.rebootDelay()
if err != nil {
return err
}
// ask for shutdown and wait for it to happen.
// if we exit snapd will be restared by systemd
if err := reboot(rebootDelay); err != nil {
return err
}
// wait for reboot to happen
logger.Noticef("Waiting for system reboot")
if sigCh != nil {
signal.Stop(sigCh)
if len(sigCh) > 0 {
// a signal arrived in between
return nil
}
close(sigCh)
}
time.Sleep(waitTimeout)
return fmt.Errorf("expected reboot did not happen")
}
var shutdownMsg = "reboot scheduled to update the system"
func rebootImpl(rebootDelay time.Duration) error {
if rebootDelay < 0 {
rebootDelay = 0
}
mins := int64(rebootDelay / time.Minute)
cmd := exec.Command("shutdown", "-r", fmt.Sprintf("+%d", mins), shutdownMsg)
if out, err := cmd.CombinedOutput(); err != nil {
return osutil.OutputErr(out, err)
}
return nil
}
var reboot = rebootImpl
func (d *Daemon) Dying() <-chan struct{} {
return d.tomb.Dying()
}
func clearReboot(st *state.State) {
// FIXME See notes in the state package. This logic should be
// centralized in the overlord which is the orchestrator. Right
// now we have the daemon, the overlord, and even the state
// itself all knowing about such details.
st.Set("daemon-system-restart-at", nil)
st.Set("daemon-system-restart-tentative", nil)
}
// RebootIsFine implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsFine(st *state.State) error {
clearReboot(st)
return nil
}
// RebootDidNotHappen implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsMissing(st *state.State) error {
var nTentative int
err := st.Get("daemon-system-restart-tentative", &nTentative)
if err != nil && err != state.ErrNoState {
return err
}
nTentative++
if nTentative > rebootMaxTentatives {
// giving up, proceed normally, some in-progress refresh
// might get rolled back!!
st.ClearReboot()
clearReboot(st)
logger.Noticef("snapd was restarted while a system restart was expected, snapd retried to schedule and waited again for a system restart %d times and is giving up", rebootMaxTentatives)
return nil
}
st.Set("daemon-system-restart-tentative", nTentative)
d.state = st
logger.Noticef("snapd was restarted while a system restart was expected, snapd will try to schedule and wait for a system restart again (tenative %d/%d)", nTentative, rebootMaxTentatives)
return state.ErrExpectedReboot
}
func New(opts *Options) (*Daemon, error) {
d := &Daemon{
pebbleDir: opts.Dir,
normalSocketPath: opts.SocketPath,
untrustedSocketPath: opts.SocketPath + ".untrusted",
}
ovld, err := overlord.New(opts.Dir, d)
if err == state.ErrExpectedReboot {
// we proceed without overlord until we reach Stop
// where we will schedule and wait again for a system restart.
// ATM we cannot do that in New because we need to satisfy
// systemd notify mechanisms.
d.rebootIsMissing = true
return d, nil
}
if err != nil {
return nil, err
}
d.overlord = ovld
d.state = ovld.State()
return d, nil
}
// GetListener tries to get a listener for the given socket path from
// the listener map, and if it fails it tries to set it up directly.
func getListener(socketPath string, listenerMap map[string]net.Listener) (net.Listener, error) {
if listener, ok := listenerMap[socketPath]; ok {
return listener, nil
}
if c, err := net.Dial("unix", socketPath); err == nil {
c.Close()
return nil, fmt.Errorf("socket %q already in use", socketPath)
}
if err := os.Remove(socketPath); err != nil && !os.IsNotExist(err) {
return nil, err
}
address, err := net.ResolveUnixAddr("unix", socketPath)
if err != nil {
return nil, err
}
runtime.LockOSThread()
oldmask := syscall.Umask(0111)
listener, err := net.ListenUnix("unix", address)
syscall.Umask(oldmask)
runtime.UnlockOSThread()
if err != nil {
return nil, err
}
logger.Debugf("socket %q was not activated; listening", socketPath)
return listener, nil
}
| {
// die when asked to restart (systemd should get us back up!) etc
switch t {
case state.RestartDaemon:
case state.RestartSystem:
// try to schedule a fallback slow reboot already here
// in case we get stuck shutting down
if err := reboot(rebootWaitTimeout); err != nil {
logger.Noticef("%s", err)
}
d.mu.Lock()
defer d.mu.Unlock()
// remember we need to restart the system
d.restartSystem = true
case state.RestartSocket:
d.mu.Lock()
defer d.mu.Unlock()
d.restartSocket = true
default:
logger.Noticef("internal error: restart handler called with unknown restart type: %v", t)
}
d.tomb.Kill(nil)
} | identifier_body |
daemon.go | // Copyright (c) 2014-2020 Canonical Ltd
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License version 3 as
// published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package daemon
import (
"context"
"fmt"
"net"
"net/http"
"os"
"os/exec"
"os/signal"
"runtime"
"strings"
"sync"
"syscall"
"time"
"gopkg.in/tomb.v2"
"github.com/gorilla/mux"
"github.com/canonical/pebble/internal/logger"
"github.com/canonical/pebble/internal/osutil"
"github.com/canonical/pebble/internal/osutil/sys"
"github.com/canonical/pebble/internal/overlord"
"github.com/canonical/pebble/internal/overlord/standby"
"github.com/canonical/pebble/internal/overlord/state"
"github.com/canonical/pebble/internal/systemd"
)
var (
ErrRestartSocket = fmt.Errorf("daemon stop requested to wait for socket activation")
systemdSdNotify = systemd.SdNotify
sysGetuid = sys.Getuid
)
// Options holds the daemon setup required for the initialization of a new daemon.
type Options struct {
// Dir is the pebble directory where all setup is found. Defaults to /var/lib/pebble/default.
Dir string
// SocketPath is an optional path for the unix socket used for the client
// to communicate with the daemon. Defaults to a hidden (dotted) name inside
// the pebble directory.
SocketPath string
}
// A Daemon listens for requests and routes them to the right command
type Daemon struct {
Version string
StartTime time.Time
pebbleDir string
normalSocketPath string
untrustedSocketPath string
overlord *overlord.Overlord
state *state.State
generalListener net.Listener
untrustedListener net.Listener
connTracker *connTracker
serve *http.Server
tomb tomb.Tomb
router *mux.Router
standbyOpinions *standby.StandbyOpinions
// set to remember we need to restart the system
restartSystem bool
// set to remember that we need to exit the daemon in a way that
// prevents systemd from restarting it
restartSocket bool
// degradedErr is set when the daemon is in degraded mode
degradedErr error
rebootIsMissing bool
mu sync.Mutex
}
// XXX Placeholder for now.
type userState struct{}
// A ResponseFunc handles one of the individual verbs for a method
type ResponseFunc func(*Command, *http.Request, *userState) Response
// A Command routes a request to an individual per-verb ResponseFUnc
type Command struct {
Path string
PathPrefix string
//
GET ResponseFunc
PUT ResponseFunc
POST ResponseFunc
DELETE ResponseFunc
GuestOK bool
UserOK bool
UntrustedOK bool
AdminOnly bool
d *Daemon
}
type accessResult int
const (
accessOK accessResult = iota
accessUnauthorized
accessForbidden
)
// canAccess checks the following properties:
//
// - if the user is `root` everything is allowed
// - if a user is logged in and the command doesn't have AdminOnly, everything is allowed
// - POST/PUT/DELETE all require the admin, or just login if not AdminOnly
//
// Otherwise for GET requests the following parameters are honored:
// - GuestOK: anyone can access GET
// - UserOK: any uid on the local system can access GET
// - AdminOnly: only the administrator can access this
// - UntrustedOK: can access this via the untrusted socket
func (c *Command) canAccess(r *http.Request, user *userState) accessResult {
if c.AdminOnly && (c.UserOK || c.GuestOK || c.UntrustedOK) {
logger.Panicf("internal error: command cannot have AdminOnly together with any *OK flag")
}
if user != nil && !c.AdminOnly {
// Authenticated users do anything not requiring explicit admin.
return accessOK
}
// isUser means we have a UID for the request
isUser := false
pid, uid, socket, err := ucrednetGet(r.RemoteAddr)
if err == nil {
isUser = true
} else if err != errNoID {
logger.Noticef("unexpected error when attempting to get UID: %s", err)
return accessForbidden
}
isUntrusted := (socket == c.d.untrustedSocketPath)
_ = pid
_ = uid
if isUntrusted {
if c.UntrustedOK {
return accessOK
}
return accessUnauthorized
}
// the !AdminOnly check is redundant, but belt-and-suspenders
if r.Method == "GET" && !c.AdminOnly {
// Guest and user access restricted to GET requests
if c.GuestOK {
return accessOK
}
if isUser && c.UserOK {
return accessOK
}
}
// Remaining admin checks rely on identifying peer uid
if !isUser {
return accessUnauthorized
}
if uid == 0 || sys.UserID(uid) == sysGetuid() {
// Superuser and process owner can do anything.
return accessOK
}
if c.AdminOnly {
return accessUnauthorized
}
return accessUnauthorized
}
func userFromRequest(state interface{}, r *http.Request) (*userState, error) {
return nil, nil
}
func (c *Command) ServeHTTP(w http.ResponseWriter, r *http.Request) {
st := c.d.state
st.Lock()
user, err := userFromRequest(st, r)
if err != nil {
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
st.Unlock()
// check if we are in degradedMode
if c.d.degradedErr != nil && r.Method != "GET" {
statusInternalError(c.d.degradedErr.Error()).ServeHTTP(w, r)
return
}
switch c.canAccess(r, user) {
case accessOK:
// nothing
case accessUnauthorized:
statusUnauthorized("access denied").ServeHTTP(w, r)
return
case accessForbidden:
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
var rspf ResponseFunc
var rsp = statusMethodNotAllowed("method %q not allowed", r.Method)
switch r.Method {
case "GET":
rspf = c.GET
case "PUT":
rspf = c.PUT
case "POST":
rspf = c.POST
case "DELETE":
rspf = c.DELETE
}
if rspf != nil {
rsp = rspf(c, r, user)
}
if rsp, ok := rsp.(*resp); ok {
_, rst := st.Restarting()
switch rst {
case state.RestartSystem:
rsp.transmitMaintenance(errorKindSystemRestart, "system is restarting")
case state.RestartDaemon:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is restarting")
case state.RestartSocket:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is stopping to wait for socket activation")
}
if rsp.Type != ResponseTypeError {
st.Lock()
count, stamp := st.WarningsSummary()
st.Unlock()
rsp.addWarningsToMeta(count, stamp)
}
}
rsp.ServeHTTP(w, r)
}
type wrappedWriter struct {
w http.ResponseWriter
s int
}
func (w *wrappedWriter) Header() http.Header {
return w.w.Header()
}
func (w *wrappedWriter) Write(bs []byte) (int, error) {
return w.w.Write(bs)
}
func (w *wrappedWriter) WriteHeader(s int) {
w.w.WriteHeader(s)
w.s = s
}
func (w *wrappedWriter) Flush() {
if f, ok := w.w.(http.Flusher); ok {
f.Flush()
}
}
func logit(handler http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ww := &wrappedWriter{w: w}
t0 := time.Now()
handler.ServeHTTP(ww, r)
t := time.Now().Sub(t0)
if !strings.Contains(r.URL.String(), "/v1/changes/") {
if strings.HasSuffix(r.RemoteAddr, ";") {
logger.Debugf("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
logger.Noticef("%s %s %s %d", r.Method, r.URL, t, ww.s)
} else {
logger.Noticef("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
}
}
})
}
// Init sets up the Daemon's internal workings.
// Don't call more than once.
func (d *Daemon) Init() error {
listenerMap := make(map[string]net.Listener)
if listener, err := getListener(d.normalSocketPath, listenerMap); err == nil {
d.generalListener = &ucrednetListener{Listener: listener}
} else {
return fmt.Errorf("when trying to listen on %s: %v", d.normalSocketPath, err)
}
if listener, err := getListener(d.untrustedSocketPath, listenerMap); err == nil {
// This listener may also be nil if that socket wasn't among
// the listeners, so check it before using it.
d.untrustedListener = &ucrednetListener{Listener: listener}
} else {
logger.Debugf("cannot get listener for %q: %v", d.untrustedSocketPath, err)
}
d.addRoutes()
logger.Noticef("Started daemon.")
return nil
}
// SetDegradedMode puts the daemon into an degraded mode which will the
// error given in the "err" argument for commands that are not marked
// as readonlyOK.
//
// This is useful to report errors to the client when the daemon
// cannot work because e.g. a sanity check failed or the system is out
// of diskspace.
//
// When the system is fine again calling "DegradedMode(nil)" is enough
// to put the daemon into full operation again.
func (d *Daemon) SetDegradedMode(err error) {
d.degradedErr = err
}
func (d *Daemon) addRoutes() {
d.router = mux.NewRouter()
for _, c := range api {
c.d = d
if c.PathPrefix == "" {
d.router.Handle(c.Path, c).Name(c.Path)
} else {
d.router.PathPrefix(c.PathPrefix).Handler(c).Name(c.PathPrefix)
}
}
// also maybe add a /favicon.ico handler...
d.router.NotFoundHandler = statusNotFound("invalid API endpoint requested")
}
type connTracker struct {
mu sync.Mutex
conns map[net.Conn]struct{}
}
func (ct *connTracker) CanStandby() bool {
ct.mu.Lock()
defer ct.mu.Unlock()
return len(ct.conns) == 0
}
func (ct *connTracker) trackConn(conn net.Conn, state http.ConnState) {
ct.mu.Lock()
defer ct.mu.Unlock()
// we ignore hijacked connections, if we do things with websockets
// we'll need custom shutdown handling for them
if state == http.StateNew || state == http.StateActive {
ct.conns[conn] = struct{}{}
} else {
delete(ct.conns, conn)
}
}
func (d *Daemon) CanStandby() bool {
return systemd.SocketAvailable()
}
func (d *Daemon) initStandbyHandling() {
d.standbyOpinions = standby.New(d.state)
d.standbyOpinions.AddOpinion(d)
d.standbyOpinions.AddOpinion(d.connTracker)
d.standbyOpinions.AddOpinion(d.overlord)
d.standbyOpinions.Start()
}
func (d *Daemon) Start() {
if d.rebootIsMissing {
// we need to schedule and wait for a system restart
d.tomb.Kill(nil)
// avoid systemd killing us again while we wait
systemdSdNotify("READY=1")
return
}
if d.overlord == nil {
panic("internal error: no Overlord")
}
d.StartTime = time.Now()
d.connTracker = &connTracker{conns: make(map[net.Conn]struct{})}
d.serve = &http.Server{
Handler: logit(d.router),
ConnState: d.connTracker.trackConn,
}
d.initStandbyHandling()
d.overlord.Loop()
d.tomb.Go(func() error {
if d.untrustedListener != nil {
d.tomb.Go(func() error {
if err := d.serve.Serve(d.untrustedListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
}
if err := d.serve.Serve(d.generalListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
// notify systemd that we are ready
systemdSdNotify("READY=1")
}
// HandleRestart implements overlord.RestartBehavior.
func (d *Daemon) HandleRestart(t state.RestartType) {
// die when asked to restart (systemd should get us back up!) etc
switch t {
case state.RestartDaemon:
case state.RestartSystem:
// try to schedule a fallback slow reboot already here
// in case we get stuck shutting down
if err := reboot(rebootWaitTimeout); err != nil {
logger.Noticef("%s", err)
}
d.mu.Lock()
defer d.mu.Unlock()
// remember we need to restart the system
d.restartSystem = true
case state.RestartSocket:
d.mu.Lock()
defer d.mu.Unlock()
d.restartSocket = true
default:
logger.Noticef("internal error: restart handler called with unknown restart type: %v", t)
}
d.tomb.Kill(nil)
}
var (
rebootNoticeWait = 3 * time.Second
rebootWaitTimeout = 10 * time.Minute
rebootRetryWaitTimeout = 5 * time.Minute
rebootMaxTentatives = 3
)
var shutdownTimeout = 25 * time.Second
// Stop shuts down the Daemon.
func (d *Daemon) Stop(sigCh chan<- os.Signal) error {
if d.rebootIsMissing {
// we need to schedule/wait for a system restart again
return d.doReboot(sigCh, rebootRetryWaitTimeout)
}
if d.overlord == nil {
return fmt.Errorf("internal error: no Overlord")
}
d.tomb.Kill(nil)
d.mu.Lock()
restartSystem := d.restartSystem
restartSocket := d.restartSocket
d.mu.Unlock()
d.generalListener.Close()
d.standbyOpinions.Stop()
if d.untrustedListener != nil {
d.untrustedListener.Close()
}
if restartSystem {
// give time to polling clients to notice restart
time.Sleep(rebootNoticeWait)
}
// We're using the background context here because the tomb's
// context will likely already have been cancelled when we are
// called.
ctx, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
d.tomb.Kill(d.serve.Shutdown(ctx))
cancel()
if !restartSystem {
// tell systemd that we are stopping
systemdSdNotify("STOPPING=1")
}
if restartSocket {
// At this point we processed all open requests (and
// stopped accepting new requests) - before going into
// socket activated mode we need to check if any of
// those open requests resulted in something that
// prevents us from going into socket activation mode.
//
// If this is the case we do a "normal" snapd restart
// to process the new changes.
if !d.standbyOpinions.CanStandby() {
d.restartSocket = false
}
}
d.overlord.Stop()
err := d.tomb.Wait()
if err != nil {
// do not stop the shutdown even if the tomb errors
// because we already scheduled a slow shutdown and
// exiting here will just restart snapd (via systemd)
// which will lead to confusing results.
if restartSystem {
logger.Noticef("WARNING: cannot stop daemon: %v", err)
} else {
return err
}
}
if restartSystem {
return d.doReboot(sigCh, rebootWaitTimeout)
}
if d.restartSocket {
return ErrRestartSocket
}
return nil
}
func (d *Daemon) rebootDelay() (time.Duration, error) {
d.state.Lock()
defer d.state.Unlock()
now := time.Now()
// see whether a reboot had already been scheduled
var rebootAt time.Time
err := d.state.Get("daemon-system-restart-at", &rebootAt)
if err != nil && err != state.ErrNoState {
return 0, err
}
rebootDelay := 1 * time.Minute
if err == nil {
rebootDelay = rebootAt.Sub(now)
} else {
ovr := os.Getenv("SNAPD_REBOOT_DELAY") // for tests
if ovr != "" {
d, err := time.ParseDuration(ovr)
if err == nil {
rebootDelay = d | }
}
rebootAt = now.Add(rebootDelay)
d.state.Set("daemon-system-restart-at", rebootAt)
}
return rebootDelay, nil
}
func (d *Daemon) doReboot(sigCh chan<- os.Signal, waitTimeout time.Duration) error {
rebootDelay, err := d.rebootDelay()
if err != nil {
return err
}
// ask for shutdown and wait for it to happen.
// if we exit snapd will be restared by systemd
if err := reboot(rebootDelay); err != nil {
return err
}
// wait for reboot to happen
logger.Noticef("Waiting for system reboot")
if sigCh != nil {
signal.Stop(sigCh)
if len(sigCh) > 0 {
// a signal arrived in between
return nil
}
close(sigCh)
}
time.Sleep(waitTimeout)
return fmt.Errorf("expected reboot did not happen")
}
var shutdownMsg = "reboot scheduled to update the system"
func rebootImpl(rebootDelay time.Duration) error {
if rebootDelay < 0 {
rebootDelay = 0
}
mins := int64(rebootDelay / time.Minute)
cmd := exec.Command("shutdown", "-r", fmt.Sprintf("+%d", mins), shutdownMsg)
if out, err := cmd.CombinedOutput(); err != nil {
return osutil.OutputErr(out, err)
}
return nil
}
var reboot = rebootImpl
func (d *Daemon) Dying() <-chan struct{} {
return d.tomb.Dying()
}
func clearReboot(st *state.State) {
// FIXME See notes in the state package. This logic should be
// centralized in the overlord which is the orchestrator. Right
// now we have the daemon, the overlord, and even the state
// itself all knowing about such details.
st.Set("daemon-system-restart-at", nil)
st.Set("daemon-system-restart-tentative", nil)
}
// RebootIsFine implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsFine(st *state.State) error {
clearReboot(st)
return nil
}
// RebootDidNotHappen implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsMissing(st *state.State) error {
var nTentative int
err := st.Get("daemon-system-restart-tentative", &nTentative)
if err != nil && err != state.ErrNoState {
return err
}
nTentative++
if nTentative > rebootMaxTentatives {
// giving up, proceed normally, some in-progress refresh
// might get rolled back!!
st.ClearReboot()
clearReboot(st)
logger.Noticef("snapd was restarted while a system restart was expected, snapd retried to schedule and waited again for a system restart %d times and is giving up", rebootMaxTentatives)
return nil
}
st.Set("daemon-system-restart-tentative", nTentative)
d.state = st
logger.Noticef("snapd was restarted while a system restart was expected, snapd will try to schedule and wait for a system restart again (tenative %d/%d)", nTentative, rebootMaxTentatives)
return state.ErrExpectedReboot
}
func New(opts *Options) (*Daemon, error) {
d := &Daemon{
pebbleDir: opts.Dir,
normalSocketPath: opts.SocketPath,
untrustedSocketPath: opts.SocketPath + ".untrusted",
}
ovld, err := overlord.New(opts.Dir, d)
if err == state.ErrExpectedReboot {
// we proceed without overlord until we reach Stop
// where we will schedule and wait again for a system restart.
// ATM we cannot do that in New because we need to satisfy
// systemd notify mechanisms.
d.rebootIsMissing = true
return d, nil
}
if err != nil {
return nil, err
}
d.overlord = ovld
d.state = ovld.State()
return d, nil
}
// GetListener tries to get a listener for the given socket path from
// the listener map, and if it fails it tries to set it up directly.
func getListener(socketPath string, listenerMap map[string]net.Listener) (net.Listener, error) {
if listener, ok := listenerMap[socketPath]; ok {
return listener, nil
}
if c, err := net.Dial("unix", socketPath); err == nil {
c.Close()
return nil, fmt.Errorf("socket %q already in use", socketPath)
}
if err := os.Remove(socketPath); err != nil && !os.IsNotExist(err) {
return nil, err
}
address, err := net.ResolveUnixAddr("unix", socketPath)
if err != nil {
return nil, err
}
runtime.LockOSThread()
oldmask := syscall.Umask(0111)
listener, err := net.ListenUnix("unix", address)
syscall.Umask(oldmask)
runtime.UnlockOSThread()
if err != nil {
return nil, err
}
logger.Debugf("socket %q was not activated; listening", socketPath)
return listener, nil
} | random_line_split | |
daemon.go | // Copyright (c) 2014-2020 Canonical Ltd
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License version 3 as
// published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package daemon
import (
"context"
"fmt"
"net"
"net/http"
"os"
"os/exec"
"os/signal"
"runtime"
"strings"
"sync"
"syscall"
"time"
"gopkg.in/tomb.v2"
"github.com/gorilla/mux"
"github.com/canonical/pebble/internal/logger"
"github.com/canonical/pebble/internal/osutil"
"github.com/canonical/pebble/internal/osutil/sys"
"github.com/canonical/pebble/internal/overlord"
"github.com/canonical/pebble/internal/overlord/standby"
"github.com/canonical/pebble/internal/overlord/state"
"github.com/canonical/pebble/internal/systemd"
)
var (
ErrRestartSocket = fmt.Errorf("daemon stop requested to wait for socket activation")
systemdSdNotify = systemd.SdNotify
sysGetuid = sys.Getuid
)
// Options holds the daemon setup required for the initialization of a new daemon.
type Options struct {
// Dir is the pebble directory where all setup is found. Defaults to /var/lib/pebble/default.
Dir string
// SocketPath is an optional path for the unix socket used for the client
// to communicate with the daemon. Defaults to a hidden (dotted) name inside
// the pebble directory.
SocketPath string
}
// A Daemon listens for requests and routes them to the right command
type Daemon struct {
Version string
StartTime time.Time
pebbleDir string
normalSocketPath string
untrustedSocketPath string
overlord *overlord.Overlord
state *state.State
generalListener net.Listener
untrustedListener net.Listener
connTracker *connTracker
serve *http.Server
tomb tomb.Tomb
router *mux.Router
standbyOpinions *standby.StandbyOpinions
// set to remember we need to restart the system
restartSystem bool
// set to remember that we need to exit the daemon in a way that
// prevents systemd from restarting it
restartSocket bool
// degradedErr is set when the daemon is in degraded mode
degradedErr error
rebootIsMissing bool
mu sync.Mutex
}
// XXX Placeholder for now.
type userState struct{}
// A ResponseFunc handles one of the individual verbs for a method
type ResponseFunc func(*Command, *http.Request, *userState) Response
// A Command routes a request to an individual per-verb ResponseFUnc
type Command struct {
Path string
PathPrefix string
//
GET ResponseFunc
PUT ResponseFunc
POST ResponseFunc
DELETE ResponseFunc
GuestOK bool
UserOK bool
UntrustedOK bool
AdminOnly bool
d *Daemon
}
type accessResult int
const (
accessOK accessResult = iota
accessUnauthorized
accessForbidden
)
// canAccess checks the following properties:
//
// - if the user is `root` everything is allowed
// - if a user is logged in and the command doesn't have AdminOnly, everything is allowed
// - POST/PUT/DELETE all require the admin, or just login if not AdminOnly
//
// Otherwise for GET requests the following parameters are honored:
// - GuestOK: anyone can access GET
// - UserOK: any uid on the local system can access GET
// - AdminOnly: only the administrator can access this
// - UntrustedOK: can access this via the untrusted socket
func (c *Command) canAccess(r *http.Request, user *userState) accessResult {
if c.AdminOnly && (c.UserOK || c.GuestOK || c.UntrustedOK) {
logger.Panicf("internal error: command cannot have AdminOnly together with any *OK flag")
}
if user != nil && !c.AdminOnly {
// Authenticated users do anything not requiring explicit admin.
return accessOK
}
// isUser means we have a UID for the request
isUser := false
pid, uid, socket, err := ucrednetGet(r.RemoteAddr)
if err == nil {
isUser = true
} else if err != errNoID {
logger.Noticef("unexpected error when attempting to get UID: %s", err)
return accessForbidden
}
isUntrusted := (socket == c.d.untrustedSocketPath)
_ = pid
_ = uid
if isUntrusted {
if c.UntrustedOK {
return accessOK
}
return accessUnauthorized
}
// the !AdminOnly check is redundant, but belt-and-suspenders
if r.Method == "GET" && !c.AdminOnly {
// Guest and user access restricted to GET requests
if c.GuestOK {
return accessOK
}
if isUser && c.UserOK {
return accessOK
}
}
// Remaining admin checks rely on identifying peer uid
if !isUser {
return accessUnauthorized
}
if uid == 0 || sys.UserID(uid) == sysGetuid() {
// Superuser and process owner can do anything.
return accessOK
}
if c.AdminOnly {
return accessUnauthorized
}
return accessUnauthorized
}
func userFromRequest(state interface{}, r *http.Request) (*userState, error) {
return nil, nil
}
func (c *Command) ServeHTTP(w http.ResponseWriter, r *http.Request) {
st := c.d.state
st.Lock()
user, err := userFromRequest(st, r)
if err != nil {
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
st.Unlock()
// check if we are in degradedMode
if c.d.degradedErr != nil && r.Method != "GET" {
statusInternalError(c.d.degradedErr.Error()).ServeHTTP(w, r)
return
}
switch c.canAccess(r, user) {
case accessOK:
// nothing
case accessUnauthorized:
statusUnauthorized("access denied").ServeHTTP(w, r)
return
case accessForbidden:
statusForbidden("forbidden").ServeHTTP(w, r)
return
}
var rspf ResponseFunc
var rsp = statusMethodNotAllowed("method %q not allowed", r.Method)
switch r.Method {
case "GET":
rspf = c.GET
case "PUT":
rspf = c.PUT
case "POST":
rspf = c.POST
case "DELETE":
rspf = c.DELETE
}
if rspf != nil {
rsp = rspf(c, r, user)
}
if rsp, ok := rsp.(*resp); ok {
_, rst := st.Restarting()
switch rst {
case state.RestartSystem:
rsp.transmitMaintenance(errorKindSystemRestart, "system is restarting")
case state.RestartDaemon:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is restarting")
case state.RestartSocket:
rsp.transmitMaintenance(errorKindDaemonRestart, "daemon is stopping to wait for socket activation")
}
if rsp.Type != ResponseTypeError {
st.Lock()
count, stamp := st.WarningsSummary()
st.Unlock()
rsp.addWarningsToMeta(count, stamp)
}
}
rsp.ServeHTTP(w, r)
}
type wrappedWriter struct {
w http.ResponseWriter
s int
}
func (w *wrappedWriter) Header() http.Header {
return w.w.Header()
}
func (w *wrappedWriter) Write(bs []byte) (int, error) {
return w.w.Write(bs)
}
func (w *wrappedWriter) WriteHeader(s int) {
w.w.WriteHeader(s)
w.s = s
}
func (w *wrappedWriter) Flush() {
if f, ok := w.w.(http.Flusher); ok {
f.Flush()
}
}
func logit(handler http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ww := &wrappedWriter{w: w}
t0 := time.Now()
handler.ServeHTTP(ww, r)
t := time.Now().Sub(t0)
if !strings.Contains(r.URL.String(), "/v1/changes/") {
if strings.HasSuffix(r.RemoteAddr, ";") {
logger.Debugf("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
logger.Noticef("%s %s %s %d", r.Method, r.URL, t, ww.s)
} else {
logger.Noticef("%s %s %s %s %d", r.RemoteAddr, r.Method, r.URL, t, ww.s)
}
}
})
}
// Init sets up the Daemon's internal workings.
// Don't call more than once.
func (d *Daemon) Init() error {
listenerMap := make(map[string]net.Listener)
if listener, err := getListener(d.normalSocketPath, listenerMap); err == nil {
d.generalListener = &ucrednetListener{Listener: listener}
} else {
return fmt.Errorf("when trying to listen on %s: %v", d.normalSocketPath, err)
}
if listener, err := getListener(d.untrustedSocketPath, listenerMap); err == nil {
// This listener may also be nil if that socket wasn't among
// the listeners, so check it before using it.
d.untrustedListener = &ucrednetListener{Listener: listener}
} else {
logger.Debugf("cannot get listener for %q: %v", d.untrustedSocketPath, err)
}
d.addRoutes()
logger.Noticef("Started daemon.")
return nil
}
// SetDegradedMode puts the daemon into an degraded mode which will the
// error given in the "err" argument for commands that are not marked
// as readonlyOK.
//
// This is useful to report errors to the client when the daemon
// cannot work because e.g. a sanity check failed or the system is out
// of diskspace.
//
// When the system is fine again calling "DegradedMode(nil)" is enough
// to put the daemon into full operation again.
func (d *Daemon) | (err error) {
d.degradedErr = err
}
func (d *Daemon) addRoutes() {
d.router = mux.NewRouter()
for _, c := range api {
c.d = d
if c.PathPrefix == "" {
d.router.Handle(c.Path, c).Name(c.Path)
} else {
d.router.PathPrefix(c.PathPrefix).Handler(c).Name(c.PathPrefix)
}
}
// also maybe add a /favicon.ico handler...
d.router.NotFoundHandler = statusNotFound("invalid API endpoint requested")
}
type connTracker struct {
mu sync.Mutex
conns map[net.Conn]struct{}
}
func (ct *connTracker) CanStandby() bool {
ct.mu.Lock()
defer ct.mu.Unlock()
return len(ct.conns) == 0
}
func (ct *connTracker) trackConn(conn net.Conn, state http.ConnState) {
ct.mu.Lock()
defer ct.mu.Unlock()
// we ignore hijacked connections, if we do things with websockets
// we'll need custom shutdown handling for them
if state == http.StateNew || state == http.StateActive {
ct.conns[conn] = struct{}{}
} else {
delete(ct.conns, conn)
}
}
func (d *Daemon) CanStandby() bool {
return systemd.SocketAvailable()
}
func (d *Daemon) initStandbyHandling() {
d.standbyOpinions = standby.New(d.state)
d.standbyOpinions.AddOpinion(d)
d.standbyOpinions.AddOpinion(d.connTracker)
d.standbyOpinions.AddOpinion(d.overlord)
d.standbyOpinions.Start()
}
func (d *Daemon) Start() {
if d.rebootIsMissing {
// we need to schedule and wait for a system restart
d.tomb.Kill(nil)
// avoid systemd killing us again while we wait
systemdSdNotify("READY=1")
return
}
if d.overlord == nil {
panic("internal error: no Overlord")
}
d.StartTime = time.Now()
d.connTracker = &connTracker{conns: make(map[net.Conn]struct{})}
d.serve = &http.Server{
Handler: logit(d.router),
ConnState: d.connTracker.trackConn,
}
d.initStandbyHandling()
d.overlord.Loop()
d.tomb.Go(func() error {
if d.untrustedListener != nil {
d.tomb.Go(func() error {
if err := d.serve.Serve(d.untrustedListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
}
if err := d.serve.Serve(d.generalListener); err != http.ErrServerClosed && d.tomb.Err() == tomb.ErrStillAlive {
return err
}
return nil
})
// notify systemd that we are ready
systemdSdNotify("READY=1")
}
// HandleRestart implements overlord.RestartBehavior.
func (d *Daemon) HandleRestart(t state.RestartType) {
// die when asked to restart (systemd should get us back up!) etc
switch t {
case state.RestartDaemon:
case state.RestartSystem:
// try to schedule a fallback slow reboot already here
// in case we get stuck shutting down
if err := reboot(rebootWaitTimeout); err != nil {
logger.Noticef("%s", err)
}
d.mu.Lock()
defer d.mu.Unlock()
// remember we need to restart the system
d.restartSystem = true
case state.RestartSocket:
d.mu.Lock()
defer d.mu.Unlock()
d.restartSocket = true
default:
logger.Noticef("internal error: restart handler called with unknown restart type: %v", t)
}
d.tomb.Kill(nil)
}
var (
rebootNoticeWait = 3 * time.Second
rebootWaitTimeout = 10 * time.Minute
rebootRetryWaitTimeout = 5 * time.Minute
rebootMaxTentatives = 3
)
var shutdownTimeout = 25 * time.Second
// Stop shuts down the Daemon.
func (d *Daemon) Stop(sigCh chan<- os.Signal) error {
if d.rebootIsMissing {
// we need to schedule/wait for a system restart again
return d.doReboot(sigCh, rebootRetryWaitTimeout)
}
if d.overlord == nil {
return fmt.Errorf("internal error: no Overlord")
}
d.tomb.Kill(nil)
d.mu.Lock()
restartSystem := d.restartSystem
restartSocket := d.restartSocket
d.mu.Unlock()
d.generalListener.Close()
d.standbyOpinions.Stop()
if d.untrustedListener != nil {
d.untrustedListener.Close()
}
if restartSystem {
// give time to polling clients to notice restart
time.Sleep(rebootNoticeWait)
}
// We're using the background context here because the tomb's
// context will likely already have been cancelled when we are
// called.
ctx, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
d.tomb.Kill(d.serve.Shutdown(ctx))
cancel()
if !restartSystem {
// tell systemd that we are stopping
systemdSdNotify("STOPPING=1")
}
if restartSocket {
// At this point we processed all open requests (and
// stopped accepting new requests) - before going into
// socket activated mode we need to check if any of
// those open requests resulted in something that
// prevents us from going into socket activation mode.
//
// If this is the case we do a "normal" snapd restart
// to process the new changes.
if !d.standbyOpinions.CanStandby() {
d.restartSocket = false
}
}
d.overlord.Stop()
err := d.tomb.Wait()
if err != nil {
// do not stop the shutdown even if the tomb errors
// because we already scheduled a slow shutdown and
// exiting here will just restart snapd (via systemd)
// which will lead to confusing results.
if restartSystem {
logger.Noticef("WARNING: cannot stop daemon: %v", err)
} else {
return err
}
}
if restartSystem {
return d.doReboot(sigCh, rebootWaitTimeout)
}
if d.restartSocket {
return ErrRestartSocket
}
return nil
}
func (d *Daemon) rebootDelay() (time.Duration, error) {
d.state.Lock()
defer d.state.Unlock()
now := time.Now()
// see whether a reboot had already been scheduled
var rebootAt time.Time
err := d.state.Get("daemon-system-restart-at", &rebootAt)
if err != nil && err != state.ErrNoState {
return 0, err
}
rebootDelay := 1 * time.Minute
if err == nil {
rebootDelay = rebootAt.Sub(now)
} else {
ovr := os.Getenv("SNAPD_REBOOT_DELAY") // for tests
if ovr != "" {
d, err := time.ParseDuration(ovr)
if err == nil {
rebootDelay = d
}
}
rebootAt = now.Add(rebootDelay)
d.state.Set("daemon-system-restart-at", rebootAt)
}
return rebootDelay, nil
}
func (d *Daemon) doReboot(sigCh chan<- os.Signal, waitTimeout time.Duration) error {
rebootDelay, err := d.rebootDelay()
if err != nil {
return err
}
// ask for shutdown and wait for it to happen.
// if we exit snapd will be restared by systemd
if err := reboot(rebootDelay); err != nil {
return err
}
// wait for reboot to happen
logger.Noticef("Waiting for system reboot")
if sigCh != nil {
signal.Stop(sigCh)
if len(sigCh) > 0 {
// a signal arrived in between
return nil
}
close(sigCh)
}
time.Sleep(waitTimeout)
return fmt.Errorf("expected reboot did not happen")
}
var shutdownMsg = "reboot scheduled to update the system"
func rebootImpl(rebootDelay time.Duration) error {
if rebootDelay < 0 {
rebootDelay = 0
}
mins := int64(rebootDelay / time.Minute)
cmd := exec.Command("shutdown", "-r", fmt.Sprintf("+%d", mins), shutdownMsg)
if out, err := cmd.CombinedOutput(); err != nil {
return osutil.OutputErr(out, err)
}
return nil
}
var reboot = rebootImpl
func (d *Daemon) Dying() <-chan struct{} {
return d.tomb.Dying()
}
func clearReboot(st *state.State) {
// FIXME See notes in the state package. This logic should be
// centralized in the overlord which is the orchestrator. Right
// now we have the daemon, the overlord, and even the state
// itself all knowing about such details.
st.Set("daemon-system-restart-at", nil)
st.Set("daemon-system-restart-tentative", nil)
}
// RebootIsFine implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsFine(st *state.State) error {
clearReboot(st)
return nil
}
// RebootDidNotHappen implements part of overlord.RestartBehavior.
func (d *Daemon) RebootIsMissing(st *state.State) error {
var nTentative int
err := st.Get("daemon-system-restart-tentative", &nTentative)
if err != nil && err != state.ErrNoState {
return err
}
nTentative++
if nTentative > rebootMaxTentatives {
// giving up, proceed normally, some in-progress refresh
// might get rolled back!!
st.ClearReboot()
clearReboot(st)
logger.Noticef("snapd was restarted while a system restart was expected, snapd retried to schedule and waited again for a system restart %d times and is giving up", rebootMaxTentatives)
return nil
}
st.Set("daemon-system-restart-tentative", nTentative)
d.state = st
logger.Noticef("snapd was restarted while a system restart was expected, snapd will try to schedule and wait for a system restart again (tenative %d/%d)", nTentative, rebootMaxTentatives)
return state.ErrExpectedReboot
}
func New(opts *Options) (*Daemon, error) {
d := &Daemon{
pebbleDir: opts.Dir,
normalSocketPath: opts.SocketPath,
untrustedSocketPath: opts.SocketPath + ".untrusted",
}
ovld, err := overlord.New(opts.Dir, d)
if err == state.ErrExpectedReboot {
// we proceed without overlord until we reach Stop
// where we will schedule and wait again for a system restart.
// ATM we cannot do that in New because we need to satisfy
// systemd notify mechanisms.
d.rebootIsMissing = true
return d, nil
}
if err != nil {
return nil, err
}
d.overlord = ovld
d.state = ovld.State()
return d, nil
}
// GetListener tries to get a listener for the given socket path from
// the listener map, and if it fails it tries to set it up directly.
func getListener(socketPath string, listenerMap map[string]net.Listener) (net.Listener, error) {
if listener, ok := listenerMap[socketPath]; ok {
return listener, nil
}
if c, err := net.Dial("unix", socketPath); err == nil {
c.Close()
return nil, fmt.Errorf("socket %q already in use", socketPath)
}
if err := os.Remove(socketPath); err != nil && !os.IsNotExist(err) {
return nil, err
}
address, err := net.ResolveUnixAddr("unix", socketPath)
if err != nil {
return nil, err
}
runtime.LockOSThread()
oldmask := syscall.Umask(0111)
listener, err := net.ListenUnix("unix", address)
syscall.Umask(oldmask)
runtime.UnlockOSThread()
if err != nil {
return nil, err
}
logger.Debugf("socket %q was not activated; listening", socketPath)
return listener, nil
}
| SetDegradedMode | identifier_name |
base.py | """The base command class. All implemented commands should extend this class."""
from ..AgentPool import AgentPool
import json
import os.path
from subprocess import call
import paramiko
from paramiko import SSHClient
from paramiko.agent import AgentRequestHandler
import socket
import subprocess, os
class Base(object):
temp_filepath = os.path.expanduser("~/.acs/tmp")
def __init__(self, config, options, *args, **kwargs):
self.log = ACSLog("Base")
self.config = config
self.options = options
self.args = args
self.kwargs = kwargs
os.makedirs(self.temp_filepath, exist_ok=True)
self.login()
def login(self):
p = subprocess.Popen(["azure", "account", "show"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
# Not currently logged in
p = subprocess.Popen(["azure", "login"], stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
return "Failed to login: " + errors.decode("utf-8")
return "Logged in to Azure"
def _hostnameResolves(self, hostname):
try:
socket.gethostbyname(hostname)
return True
except socket.error:
return False
def getManagementEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'mgmt.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def getAgentEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'agents.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def createResourceGroup(self):
self.log.debug("Creating Resource Group")
command = "azure group create " + self.config.get('Group', 'name') + " " + self.config.get('Group', 'region')
os.system(command)
def run(self):
raise NotImplementedError("You must implement the run() method in your commands")
def help(self):
raise NotImplementedError("You must implement the help method. In most cases you will simply do 'print(__doc__)'")
def getAgentIPs(self):
# return a list of Agent IPs in this cluster
agentPool = AgentPool(self.config)
nics = agentPool.getNICs()
ips = []
for nic in nics:
try:
ip = nic["ipConfigurations"][0]["privateIPAddress"]
self.log.debug("IP for " + nic["name"] + " is: " + str(ip))
ips.append(ip)
except KeyError:
self.log.warning("NIC doesn't seem to have the information we need")
self.log.debug("Agent IPs: " + str(ips))
return ips
def executeOnAgent(self, cmd, ip):
"""
Execute command on an agent identified by agent_name
"""
sshadd = "ssh-add " + self.config.get("SSH", "privatekey")
self.shell_execute(sshadd)
sshAgentConnection = "ssh -o StrictHostKeyChecking=no " + self.config.get('ACS', 'username') + '@' + ip
self.log.debug("SSH Connection to agent: " + sshAgentConnection)
self.log.debug("Command to run on agent: " + cmd)
sshCmd = sshAgentConnection + ' \'' + cmd + '\''
self.shell_execute("exit")
result = self.executeOnMaster(sshCmd)
return result
def executeOnMaster(self, cmd):
"""
Execute command on the current master leader
"""
if self._hostnameResolves(self.getManagementEndpoint()):
ssh = SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
self.getManagementEndpoint(),
username = self.config.get('ACS', "username"),
port = 2200,
key_filename = os.path.expanduser(self.config.get('SSH', "privatekey")))
session = ssh.get_transport().open_session()
self.log.debug("Session opened on master.")
self.log.debug("Executing on master: " + cmd)
AgentRequestHandler(session)
stdin, stdout, stderr = ssh.exec_command(cmd)
stdin.close()
result = ""
for line in stdout.read().splitlines():
self.log.debug(line.decude("utf-8"))
result = result + line.decode("utf-8") + "\n"
for line in stderr.read().splitlines():
self.log.error(line.decode("utf-8"))
else:
self.log.error("Endpoint " + self.getManagementEndpoint() + " does not exist, cannot SSH into it.")
result = "Exception: No cluster is available at " + self.getManagementEndpoint()
ssh.close()
return result
def getClusterSetup(self):
"""
Get all the data about how this cluster is configured.
"""
data = {}
data["parameters"] = self.config.getACSParams()
fqdn = {}
fqdn["master"] = self.getManagementEndpoint()
fqdn["agent"] = self.getAgentEndpoint()
data["domains"] = fqdn
data["sshTunnel"] = "ssh -o StrictHostKeyChecking=no -L 80:localhost:80 -N " + self.config.get('ACS', 'username') + "@" + self.getManagementEndpoint() + " -p 2200"
azure = {}
azure['resourceGroup'] = self.config.get('Group', 'name')
data["azure"] = azure
return data
def | (self, cmd):
""" Execute a command on the client in a bash shell. """
self.log.debug("Executing command in shell: " + str(cmd))
dcos_config = os.path.expanduser('~/.dcos/dcos.toml')
os.environ['PATH'] = ':'.join([os.getenv('PATH'), '/src/bin'])
os.environ['DCOS_CONFIG'] = dcos_config
os.makedirs(os.path.dirname(dcos_config), exist_ok=True)
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
except OSError as e:
self.log.error("Error executing command " + str(cmd) + ". " + e)
raise e
return output.decode("utf-8"), errors.decode("utf-8")
"""The cofiguration for an ACS cluster to work with"""
from acs.ACSLogs import ACSLog
import configparser
import os
class Config(object):
def __init__(self, filename):
self.log = ACSLog("Config")
if not filename:
filename = "~/.acs/default.ini"
self.filename = os.path.expanduser(filename)
self.log.debug("Using config file at " + self.filename)
if not os.path.isfile(self.filename):
self.log.debug("Config file does not exist. Creating a new one.")
dns = input("What is the DNS prefix for this cluster?\n")
group = input("What is the name of the resource group you want to use/create?\n")
region = input("In which region do you want to deploy the resource group (default: westus)?\n") or 'westus'
username = input("What is your username (default: azureuser)?\n") or 'azureuser'
orchestrator = input("Which orchestrator do you want to use (Swarm or DCOS, default: DCOS)?\n") or 'DCOS'
masterCount = input("How many masters do you want in your cluster (1, 3 or 5, default: 3)?\n") or '3'
agentCount = input("How many agents do you want in your cluster (default: 3)?\n") or '3'
agentSize = input("Agent size required (default: Standard_D2_v2)?\n") or 'Standard_D2_v2'
tmpl = open("config/cluster.ini.tmpl")
output = open(self.filename, 'w')
for s in tmpl:
s = s.replace("MY-DNS-PREFIX", dns)
s = s.replace("MY-RESOURCE-REGION", region)
s = s.replace("MY-RESOURCE-GROUP-NAME", group)
s = s.replace("MY-USERNAME", username)
s = s.replace("MY-ORCHESTRATOR", orchestrator)
s = s.replace("MY-MASTER-COUNT", masterCount)
s = s.replace("MY-AGENT-COUNT", agentCount)
s = s.replace("MY-AGENT-SIZE", agentSize)
output.write(s)
self.log.debug("Writing config line: " + s)
tmpl.close()
output.close()
defaults = {"orchestratorType": "DCOS"}
config = configparser.ConfigParser(defaults)
config.read(self.filename)
config.set('Group', 'name', config.get('Group', 'name'))
self.config_parser = config
def get(self, section, name):
value = self.config_parser.get(section, name)
if section == "SSH":
public_filepath = os.path.expanduser(self.config_parser.get('SSH', 'publicKey'))
private_filepath = os.path.expanduser(self.config_parser.get('SSH', 'privatekey'))
if name == "privateKey":
self.log.debug("Checking if private SSH key exists: " + private_filepath)
if not os.path.isfile(private_filepath):
self.log.debug("Key does not exist")
self._generateSSHKey(private_filepath, public_filepath)
with open(private_filepath, 'r') as sshfile:
self.log.debug("Key does not exist")
value = sshfile.read().replace('\n', '')
elif name == "publickey":
self.log.debug("Checking if public SSH key exists: " + public_filepath)
if not os.path.isfile(public_filepath):
self._generateSSHKey(private_filepath, public_filepath)
with open(public_filepath, 'r') as sshfile:
value = sshfile.read().replace('\n', '')
return value
def getint(self, section, name):
return self.config_parser.getint(section, name)
def value(self, set_to):
value = {}
value["value"] = set_to
return value
def getACSParams(self):
"""
Get a dictionary of all ACS parameters. Note that
this is not all the parameters provided in the config
file, only the ones needed by the ACS Resource Provider'
"""
params = {}
params["dnsNamePrefix"] = self.value(self.get('ACS', 'dnsPrefix'))
params["orchestratorType"] = self.value(self.get('ACS', 'orchestratorType'))
params["agentCount"] = self.value(self.getint('ACS', 'agentCount'))
params["agentVMSize"] = self.value(self.get('ACS', 'agentVMSize'))
params["masterCount"] = self.value(self.getint('ACS', 'masterCount'))
params["linuxAdminUsername"] = self.value(self.get('ACS', 'username'))
params["sshRSAPublicKey"] = self.value(self.get('SSH', 'publickey'))
return params
def _generateSSHKey(self, private_filepath, public_filepath):
"""
Generate public and private keys. The filepath parameters
are the paths top the respective publoic and private key files.
"""
self.log.debug("Writing SSH keys to: " + private_filepath + " and " + public_filepath)
(ssh_dir, filename) = os.path.split(os.path.expanduser(private_filepath))
if not os.path.exists(ssh_dir):
self.log.debug("SSH Directory doesn't exist, creating " + ssh_dir)
os.makedirs(ssh_dir)
key = paramiko.RSAKey.generate(1024)
key.write_private_key_file(os.path.expanduser(private_filepath))
with open(os.path.expanduser(public_filepath),"w") as public:
public.write("%s %s" % (key.get_name(), key.get_base64()))
public.close()
| shell_execute | identifier_name |
base.py | """The base command class. All implemented commands should extend this class."""
from ..AgentPool import AgentPool
import json
import os.path
from subprocess import call
import paramiko
from paramiko import SSHClient
from paramiko.agent import AgentRequestHandler
import socket
import subprocess, os
class Base(object):
temp_filepath = os.path.expanduser("~/.acs/tmp")
def __init__(self, config, options, *args, **kwargs):
self.log = ACSLog("Base")
self.config = config
self.options = options
self.args = args
self.kwargs = kwargs
os.makedirs(self.temp_filepath, exist_ok=True)
self.login()
def login(self):
p = subprocess.Popen(["azure", "account", "show"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
# Not currently logged in
p = subprocess.Popen(["azure", "login"], stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
return "Failed to login: " + errors.decode("utf-8")
return "Logged in to Azure"
def _hostnameResolves(self, hostname):
try:
socket.gethostbyname(hostname)
return True
except socket.error:
return False
def getManagementEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'mgmt.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def getAgentEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'agents.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def createResourceGroup(self):
self.log.debug("Creating Resource Group")
command = "azure group create " + self.config.get('Group', 'name') + " " + self.config.get('Group', 'region')
os.system(command)
def run(self):
raise NotImplementedError("You must implement the run() method in your commands")
def help(self):
raise NotImplementedError("You must implement the help method. In most cases you will simply do 'print(__doc__)'")
def getAgentIPs(self):
# return a list of Agent IPs in this cluster
agentPool = AgentPool(self.config)
nics = agentPool.getNICs()
ips = []
for nic in nics:
try:
ip = nic["ipConfigurations"][0]["privateIPAddress"]
self.log.debug("IP for " + nic["name"] + " is: " + str(ip))
ips.append(ip)
except KeyError:
self.log.warning("NIC doesn't seem to have the information we need")
self.log.debug("Agent IPs: " + str(ips))
return ips
def executeOnAgent(self, cmd, ip):
"""
Execute command on an agent identified by agent_name
"""
sshadd = "ssh-add " + self.config.get("SSH", "privatekey")
self.shell_execute(sshadd)
sshAgentConnection = "ssh -o StrictHostKeyChecking=no " + self.config.get('ACS', 'username') + '@' + ip
self.log.debug("SSH Connection to agent: " + sshAgentConnection)
self.log.debug("Command to run on agent: " + cmd)
sshCmd = sshAgentConnection + ' \'' + cmd + '\''
self.shell_execute("exit")
result = self.executeOnMaster(sshCmd)
return result
def executeOnMaster(self, cmd):
"""
Execute command on the current master leader
"""
if self._hostnameResolves(self.getManagementEndpoint()):
ssh = SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
self.getManagementEndpoint(),
username = self.config.get('ACS', "username"),
port = 2200,
key_filename = os.path.expanduser(self.config.get('SSH', "privatekey")))
session = ssh.get_transport().open_session()
self.log.debug("Session opened on master.")
self.log.debug("Executing on master: " + cmd)
AgentRequestHandler(session)
stdin, stdout, stderr = ssh.exec_command(cmd)
stdin.close()
result = ""
for line in stdout.read().splitlines():
self.log.debug(line.decude("utf-8"))
result = result + line.decode("utf-8") + "\n"
for line in stderr.read().splitlines():
self.log.error(line.decode("utf-8"))
else:
self.log.error("Endpoint " + self.getManagementEndpoint() + " does not exist, cannot SSH into it.")
result = "Exception: No cluster is available at " + self.getManagementEndpoint()
ssh.close()
return result
def getClusterSetup(self):
"""
Get all the data about how this cluster is configured.
"""
data = {}
data["parameters"] = self.config.getACSParams()
fqdn = {}
fqdn["master"] = self.getManagementEndpoint()
fqdn["agent"] = self.getAgentEndpoint()
data["domains"] = fqdn
data["sshTunnel"] = "ssh -o StrictHostKeyChecking=no -L 80:localhost:80 -N " + self.config.get('ACS', 'username') + "@" + self.getManagementEndpoint() + " -p 2200"
azure = {}
azure['resourceGroup'] = self.config.get('Group', 'name')
data["azure"] = azure
return data
def shell_execute(self, cmd):
""" Execute a command on the client in a bash shell. """
self.log.debug("Executing command in shell: " + str(cmd))
dcos_config = os.path.expanduser('~/.dcos/dcos.toml')
os.environ['PATH'] = ':'.join([os.getenv('PATH'), '/src/bin'])
os.environ['DCOS_CONFIG'] = dcos_config
os.makedirs(os.path.dirname(dcos_config), exist_ok=True)
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
except OSError as e:
self.log.error("Error executing command " + str(cmd) + ". " + e)
raise e
return output.decode("utf-8"), errors.decode("utf-8")
"""The cofiguration for an ACS cluster to work with"""
from acs.ACSLogs import ACSLog
import configparser
import os
class Config(object):
def __init__(self, filename):
self.log = ACSLog("Config")
if not filename:
filename = "~/.acs/default.ini"
self.filename = os.path.expanduser(filename)
self.log.debug("Using config file at " + self.filename)
if not os.path.isfile(self.filename):
self.log.debug("Config file does not exist. Creating a new one.")
dns = input("What is the DNS prefix for this cluster?\n")
group = input("What is the name of the resource group you want to use/create?\n")
region = input("In which region do you want to deploy the resource group (default: westus)?\n") or 'westus'
username = input("What is your username (default: azureuser)?\n") or 'azureuser'
orchestrator = input("Which orchestrator do you want to use (Swarm or DCOS, default: DCOS)?\n") or 'DCOS'
masterCount = input("How many masters do you want in your cluster (1, 3 or 5, default: 3)?\n") or '3'
agentCount = input("How many agents do you want in your cluster (default: 3)?\n") or '3'
agentSize = input("Agent size required (default: Standard_D2_v2)?\n") or 'Standard_D2_v2'
tmpl = open("config/cluster.ini.tmpl")
output = open(self.filename, 'w')
for s in tmpl:
s = s.replace("MY-DNS-PREFIX", dns)
s = s.replace("MY-RESOURCE-REGION", region)
s = s.replace("MY-RESOURCE-GROUP-NAME", group)
s = s.replace("MY-USERNAME", username)
s = s.replace("MY-ORCHESTRATOR", orchestrator)
s = s.replace("MY-MASTER-COUNT", masterCount)
s = s.replace("MY-AGENT-COUNT", agentCount)
s = s.replace("MY-AGENT-SIZE", agentSize)
output.write(s)
self.log.debug("Writing config line: " + s)
tmpl.close()
output.close()
defaults = {"orchestratorType": "DCOS"}
config = configparser.ConfigParser(defaults)
config.read(self.filename)
config.set('Group', 'name', config.get('Group', 'name'))
self.config_parser = config
def get(self, section, name):
value = self.config_parser.get(section, name)
if section == "SSH":
public_filepath = os.path.expanduser(self.config_parser.get('SSH', 'publicKey'))
private_filepath = os.path.expanduser(self.config_parser.get('SSH', 'privatekey'))
if name == "privateKey":
|
elif name == "publickey":
self.log.debug("Checking if public SSH key exists: " + public_filepath)
if not os.path.isfile(public_filepath):
self._generateSSHKey(private_filepath, public_filepath)
with open(public_filepath, 'r') as sshfile:
value = sshfile.read().replace('\n', '')
return value
def getint(self, section, name):
return self.config_parser.getint(section, name)
def value(self, set_to):
value = {}
value["value"] = set_to
return value
def getACSParams(self):
"""
Get a dictionary of all ACS parameters. Note that
this is not all the parameters provided in the config
file, only the ones needed by the ACS Resource Provider'
"""
params = {}
params["dnsNamePrefix"] = self.value(self.get('ACS', 'dnsPrefix'))
params["orchestratorType"] = self.value(self.get('ACS', 'orchestratorType'))
params["agentCount"] = self.value(self.getint('ACS', 'agentCount'))
params["agentVMSize"] = self.value(self.get('ACS', 'agentVMSize'))
params["masterCount"] = self.value(self.getint('ACS', 'masterCount'))
params["linuxAdminUsername"] = self.value(self.get('ACS', 'username'))
params["sshRSAPublicKey"] = self.value(self.get('SSH', 'publickey'))
return params
def _generateSSHKey(self, private_filepath, public_filepath):
"""
Generate public and private keys. The filepath parameters
are the paths top the respective publoic and private key files.
"""
self.log.debug("Writing SSH keys to: " + private_filepath + " and " + public_filepath)
(ssh_dir, filename) = os.path.split(os.path.expanduser(private_filepath))
if not os.path.exists(ssh_dir):
self.log.debug("SSH Directory doesn't exist, creating " + ssh_dir)
os.makedirs(ssh_dir)
key = paramiko.RSAKey.generate(1024)
key.write_private_key_file(os.path.expanduser(private_filepath))
with open(os.path.expanduser(public_filepath),"w") as public:
public.write("%s %s" % (key.get_name(), key.get_base64()))
public.close()
| self.log.debug("Checking if private SSH key exists: " + private_filepath)
if not os.path.isfile(private_filepath):
self.log.debug("Key does not exist")
self._generateSSHKey(private_filepath, public_filepath)
with open(private_filepath, 'r') as sshfile:
self.log.debug("Key does not exist")
value = sshfile.read().replace('\n', '') | conditional_block |
base.py | """The base command class. All implemented commands should extend this class."""
from ..AgentPool import AgentPool
import json
import os.path
from subprocess import call
import paramiko
from paramiko import SSHClient
from paramiko.agent import AgentRequestHandler
import socket
import subprocess, os
class Base(object):
temp_filepath = os.path.expanduser("~/.acs/tmp")
def __init__(self, config, options, *args, **kwargs):
self.log = ACSLog("Base")
self.config = config
self.options = options
self.args = args
self.kwargs = kwargs
os.makedirs(self.temp_filepath, exist_ok=True)
self.login()
def login(self):
p = subprocess.Popen(["azure", "account", "show"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
# Not currently logged in
p = subprocess.Popen(["azure", "login"], stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
return "Failed to login: " + errors.decode("utf-8")
return "Logged in to Azure"
def _hostnameResolves(self, hostname):
try:
socket.gethostbyname(hostname)
return True
except socket.error:
return False
def getManagementEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'mgmt.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def getAgentEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'agents.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def createResourceGroup(self):
self.log.debug("Creating Resource Group")
command = "azure group create " + self.config.get('Group', 'name') + " " + self.config.get('Group', 'region')
os.system(command)
def run(self):
raise NotImplementedError("You must implement the run() method in your commands")
def help(self):
raise NotImplementedError("You must implement the help method. In most cases you will simply do 'print(__doc__)'")
def getAgentIPs(self):
# return a list of Agent IPs in this cluster
agentPool = AgentPool(self.config)
nics = agentPool.getNICs()
ips = []
for nic in nics:
try:
ip = nic["ipConfigurations"][0]["privateIPAddress"]
self.log.debug("IP for " + nic["name"] + " is: " + str(ip))
ips.append(ip)
except KeyError:
self.log.warning("NIC doesn't seem to have the information we need")
self.log.debug("Agent IPs: " + str(ips))
return ips
def executeOnAgent(self, cmd, ip):
"""
Execute command on an agent identified by agent_name
"""
sshadd = "ssh-add " + self.config.get("SSH", "privatekey")
self.shell_execute(sshadd)
sshAgentConnection = "ssh -o StrictHostKeyChecking=no " + self.config.get('ACS', 'username') + '@' + ip
self.log.debug("SSH Connection to agent: " + sshAgentConnection)
self.log.debug("Command to run on agent: " + cmd)
sshCmd = sshAgentConnection + ' \'' + cmd + '\''
self.shell_execute("exit")
result = self.executeOnMaster(sshCmd)
return result
def executeOnMaster(self, cmd):
"""
Execute command on the current master leader
"""
if self._hostnameResolves(self.getManagementEndpoint()):
ssh = SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
self.getManagementEndpoint(),
username = self.config.get('ACS', "username"),
port = 2200,
key_filename = os.path.expanduser(self.config.get('SSH', "privatekey")))
session = ssh.get_transport().open_session()
self.log.debug("Session opened on master.")
self.log.debug("Executing on master: " + cmd)
AgentRequestHandler(session)
stdin, stdout, stderr = ssh.exec_command(cmd)
stdin.close()
result = ""
for line in stdout.read().splitlines():
self.log.debug(line.decude("utf-8"))
result = result + line.decode("utf-8") + "\n"
for line in stderr.read().splitlines():
self.log.error(line.decode("utf-8"))
else:
self.log.error("Endpoint " + self.getManagementEndpoint() + " does not exist, cannot SSH into it.")
result = "Exception: No cluster is available at " + self.getManagementEndpoint()
ssh.close()
return result
def getClusterSetup(self):
"""
Get all the data about how this cluster is configured.
"""
data = {}
data["parameters"] = self.config.getACSParams()
fqdn = {}
fqdn["master"] = self.getManagementEndpoint()
fqdn["agent"] = self.getAgentEndpoint()
data["domains"] = fqdn
data["sshTunnel"] = "ssh -o StrictHostKeyChecking=no -L 80:localhost:80 -N " + self.config.get('ACS', 'username') + "@" + self.getManagementEndpoint() + " -p 2200"
azure = {}
azure['resourceGroup'] = self.config.get('Group', 'name')
data["azure"] = azure
return data
| def shell_execute(self, cmd):
""" Execute a command on the client in a bash shell. """
self.log.debug("Executing command in shell: " + str(cmd))
dcos_config = os.path.expanduser('~/.dcos/dcos.toml')
os.environ['PATH'] = ':'.join([os.getenv('PATH'), '/src/bin'])
os.environ['DCOS_CONFIG'] = dcos_config
os.makedirs(os.path.dirname(dcos_config), exist_ok=True)
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
except OSError as e:
self.log.error("Error executing command " + str(cmd) + ". " + e)
raise e
return output.decode("utf-8"), errors.decode("utf-8")
"""The cofiguration for an ACS cluster to work with"""
from acs.ACSLogs import ACSLog
import configparser
import os
class Config(object):
def __init__(self, filename):
self.log = ACSLog("Config")
if not filename:
filename = "~/.acs/default.ini"
self.filename = os.path.expanduser(filename)
self.log.debug("Using config file at " + self.filename)
if not os.path.isfile(self.filename):
self.log.debug("Config file does not exist. Creating a new one.")
dns = input("What is the DNS prefix for this cluster?\n")
group = input("What is the name of the resource group you want to use/create?\n")
region = input("In which region do you want to deploy the resource group (default: westus)?\n") or 'westus'
username = input("What is your username (default: azureuser)?\n") or 'azureuser'
orchestrator = input("Which orchestrator do you want to use (Swarm or DCOS, default: DCOS)?\n") or 'DCOS'
masterCount = input("How many masters do you want in your cluster (1, 3 or 5, default: 3)?\n") or '3'
agentCount = input("How many agents do you want in your cluster (default: 3)?\n") or '3'
agentSize = input("Agent size required (default: Standard_D2_v2)?\n") or 'Standard_D2_v2'
tmpl = open("config/cluster.ini.tmpl")
output = open(self.filename, 'w')
for s in tmpl:
s = s.replace("MY-DNS-PREFIX", dns)
s = s.replace("MY-RESOURCE-REGION", region)
s = s.replace("MY-RESOURCE-GROUP-NAME", group)
s = s.replace("MY-USERNAME", username)
s = s.replace("MY-ORCHESTRATOR", orchestrator)
s = s.replace("MY-MASTER-COUNT", masterCount)
s = s.replace("MY-AGENT-COUNT", agentCount)
s = s.replace("MY-AGENT-SIZE", agentSize)
output.write(s)
self.log.debug("Writing config line: " + s)
tmpl.close()
output.close()
defaults = {"orchestratorType": "DCOS"}
config = configparser.ConfigParser(defaults)
config.read(self.filename)
config.set('Group', 'name', config.get('Group', 'name'))
self.config_parser = config
def get(self, section, name):
value = self.config_parser.get(section, name)
if section == "SSH":
public_filepath = os.path.expanduser(self.config_parser.get('SSH', 'publicKey'))
private_filepath = os.path.expanduser(self.config_parser.get('SSH', 'privatekey'))
if name == "privateKey":
self.log.debug("Checking if private SSH key exists: " + private_filepath)
if not os.path.isfile(private_filepath):
self.log.debug("Key does not exist")
self._generateSSHKey(private_filepath, public_filepath)
with open(private_filepath, 'r') as sshfile:
self.log.debug("Key does not exist")
value = sshfile.read().replace('\n', '')
elif name == "publickey":
self.log.debug("Checking if public SSH key exists: " + public_filepath)
if not os.path.isfile(public_filepath):
self._generateSSHKey(private_filepath, public_filepath)
with open(public_filepath, 'r') as sshfile:
value = sshfile.read().replace('\n', '')
return value
def getint(self, section, name):
return self.config_parser.getint(section, name)
def value(self, set_to):
value = {}
value["value"] = set_to
return value
def getACSParams(self):
"""
Get a dictionary of all ACS parameters. Note that
this is not all the parameters provided in the config
file, only the ones needed by the ACS Resource Provider'
"""
params = {}
params["dnsNamePrefix"] = self.value(self.get('ACS', 'dnsPrefix'))
params["orchestratorType"] = self.value(self.get('ACS', 'orchestratorType'))
params["agentCount"] = self.value(self.getint('ACS', 'agentCount'))
params["agentVMSize"] = self.value(self.get('ACS', 'agentVMSize'))
params["masterCount"] = self.value(self.getint('ACS', 'masterCount'))
params["linuxAdminUsername"] = self.value(self.get('ACS', 'username'))
params["sshRSAPublicKey"] = self.value(self.get('SSH', 'publickey'))
return params
def _generateSSHKey(self, private_filepath, public_filepath):
"""
Generate public and private keys. The filepath parameters
are the paths top the respective publoic and private key files.
"""
self.log.debug("Writing SSH keys to: " + private_filepath + " and " + public_filepath)
(ssh_dir, filename) = os.path.split(os.path.expanduser(private_filepath))
if not os.path.exists(ssh_dir):
self.log.debug("SSH Directory doesn't exist, creating " + ssh_dir)
os.makedirs(ssh_dir)
key = paramiko.RSAKey.generate(1024)
key.write_private_key_file(os.path.expanduser(private_filepath))
with open(os.path.expanduser(public_filepath),"w") as public:
public.write("%s %s" % (key.get_name(), key.get_base64()))
public.close() | random_line_split | |
base.py | """The base command class. All implemented commands should extend this class."""
from ..AgentPool import AgentPool
import json
import os.path
from subprocess import call
import paramiko
from paramiko import SSHClient
from paramiko.agent import AgentRequestHandler
import socket
import subprocess, os
class Base(object):
temp_filepath = os.path.expanduser("~/.acs/tmp")
def __init__(self, config, options, *args, **kwargs):
self.log = ACSLog("Base")
self.config = config
self.options = options
self.args = args
self.kwargs = kwargs
os.makedirs(self.temp_filepath, exist_ok=True)
self.login()
def login(self):
p = subprocess.Popen(["azure", "account", "show"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
# Not currently logged in
p = subprocess.Popen(["azure", "login"], stderr=subprocess.PIPE)
output, errors = p.communicate()
if errors:
return "Failed to login: " + errors.decode("utf-8")
return "Logged in to Azure"
def _hostnameResolves(self, hostname):
try:
socket.gethostbyname(hostname)
return True
except socket.error:
return False
def getManagementEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'mgmt.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def getAgentEndpoint(self):
return self.config.get('ACS', 'dnsPrefix') + 'agents.' + self.config.get('Group', 'region').replace(" ", "").replace('"', '') + '.cloudapp.azure.com'
def createResourceGroup(self):
|
def run(self):
raise NotImplementedError("You must implement the run() method in your commands")
def help(self):
raise NotImplementedError("You must implement the help method. In most cases you will simply do 'print(__doc__)'")
def getAgentIPs(self):
# return a list of Agent IPs in this cluster
agentPool = AgentPool(self.config)
nics = agentPool.getNICs()
ips = []
for nic in nics:
try:
ip = nic["ipConfigurations"][0]["privateIPAddress"]
self.log.debug("IP for " + nic["name"] + " is: " + str(ip))
ips.append(ip)
except KeyError:
self.log.warning("NIC doesn't seem to have the information we need")
self.log.debug("Agent IPs: " + str(ips))
return ips
def executeOnAgent(self, cmd, ip):
"""
Execute command on an agent identified by agent_name
"""
sshadd = "ssh-add " + self.config.get("SSH", "privatekey")
self.shell_execute(sshadd)
sshAgentConnection = "ssh -o StrictHostKeyChecking=no " + self.config.get('ACS', 'username') + '@' + ip
self.log.debug("SSH Connection to agent: " + sshAgentConnection)
self.log.debug("Command to run on agent: " + cmd)
sshCmd = sshAgentConnection + ' \'' + cmd + '\''
self.shell_execute("exit")
result = self.executeOnMaster(sshCmd)
return result
def executeOnMaster(self, cmd):
"""
Execute command on the current master leader
"""
if self._hostnameResolves(self.getManagementEndpoint()):
ssh = SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(
self.getManagementEndpoint(),
username = self.config.get('ACS', "username"),
port = 2200,
key_filename = os.path.expanduser(self.config.get('SSH', "privatekey")))
session = ssh.get_transport().open_session()
self.log.debug("Session opened on master.")
self.log.debug("Executing on master: " + cmd)
AgentRequestHandler(session)
stdin, stdout, stderr = ssh.exec_command(cmd)
stdin.close()
result = ""
for line in stdout.read().splitlines():
self.log.debug(line.decude("utf-8"))
result = result + line.decode("utf-8") + "\n"
for line in stderr.read().splitlines():
self.log.error(line.decode("utf-8"))
else:
self.log.error("Endpoint " + self.getManagementEndpoint() + " does not exist, cannot SSH into it.")
result = "Exception: No cluster is available at " + self.getManagementEndpoint()
ssh.close()
return result
def getClusterSetup(self):
"""
Get all the data about how this cluster is configured.
"""
data = {}
data["parameters"] = self.config.getACSParams()
fqdn = {}
fqdn["master"] = self.getManagementEndpoint()
fqdn["agent"] = self.getAgentEndpoint()
data["domains"] = fqdn
data["sshTunnel"] = "ssh -o StrictHostKeyChecking=no -L 80:localhost:80 -N " + self.config.get('ACS', 'username') + "@" + self.getManagementEndpoint() + " -p 2200"
azure = {}
azure['resourceGroup'] = self.config.get('Group', 'name')
data["azure"] = azure
return data
def shell_execute(self, cmd):
""" Execute a command on the client in a bash shell. """
self.log.debug("Executing command in shell: " + str(cmd))
dcos_config = os.path.expanduser('~/.dcos/dcos.toml')
os.environ['PATH'] = ':'.join([os.getenv('PATH'), '/src/bin'])
os.environ['DCOS_CONFIG'] = dcos_config
os.makedirs(os.path.dirname(dcos_config), exist_ok=True)
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, errors = p.communicate()
except OSError as e:
self.log.error("Error executing command " + str(cmd) + ". " + e)
raise e
return output.decode("utf-8"), errors.decode("utf-8")
"""The cofiguration for an ACS cluster to work with"""
from acs.ACSLogs import ACSLog
import configparser
import os
class Config(object):
def __init__(self, filename):
self.log = ACSLog("Config")
if not filename:
filename = "~/.acs/default.ini"
self.filename = os.path.expanduser(filename)
self.log.debug("Using config file at " + self.filename)
if not os.path.isfile(self.filename):
self.log.debug("Config file does not exist. Creating a new one.")
dns = input("What is the DNS prefix for this cluster?\n")
group = input("What is the name of the resource group you want to use/create?\n")
region = input("In which region do you want to deploy the resource group (default: westus)?\n") or 'westus'
username = input("What is your username (default: azureuser)?\n") or 'azureuser'
orchestrator = input("Which orchestrator do you want to use (Swarm or DCOS, default: DCOS)?\n") or 'DCOS'
masterCount = input("How many masters do you want in your cluster (1, 3 or 5, default: 3)?\n") or '3'
agentCount = input("How many agents do you want in your cluster (default: 3)?\n") or '3'
agentSize = input("Agent size required (default: Standard_D2_v2)?\n") or 'Standard_D2_v2'
tmpl = open("config/cluster.ini.tmpl")
output = open(self.filename, 'w')
for s in tmpl:
s = s.replace("MY-DNS-PREFIX", dns)
s = s.replace("MY-RESOURCE-REGION", region)
s = s.replace("MY-RESOURCE-GROUP-NAME", group)
s = s.replace("MY-USERNAME", username)
s = s.replace("MY-ORCHESTRATOR", orchestrator)
s = s.replace("MY-MASTER-COUNT", masterCount)
s = s.replace("MY-AGENT-COUNT", agentCount)
s = s.replace("MY-AGENT-SIZE", agentSize)
output.write(s)
self.log.debug("Writing config line: " + s)
tmpl.close()
output.close()
defaults = {"orchestratorType": "DCOS"}
config = configparser.ConfigParser(defaults)
config.read(self.filename)
config.set('Group', 'name', config.get('Group', 'name'))
self.config_parser = config
def get(self, section, name):
value = self.config_parser.get(section, name)
if section == "SSH":
public_filepath = os.path.expanduser(self.config_parser.get('SSH', 'publicKey'))
private_filepath = os.path.expanduser(self.config_parser.get('SSH', 'privatekey'))
if name == "privateKey":
self.log.debug("Checking if private SSH key exists: " + private_filepath)
if not os.path.isfile(private_filepath):
self.log.debug("Key does not exist")
self._generateSSHKey(private_filepath, public_filepath)
with open(private_filepath, 'r') as sshfile:
self.log.debug("Key does not exist")
value = sshfile.read().replace('\n', '')
elif name == "publickey":
self.log.debug("Checking if public SSH key exists: " + public_filepath)
if not os.path.isfile(public_filepath):
self._generateSSHKey(private_filepath, public_filepath)
with open(public_filepath, 'r') as sshfile:
value = sshfile.read().replace('\n', '')
return value
def getint(self, section, name):
return self.config_parser.getint(section, name)
def value(self, set_to):
value = {}
value["value"] = set_to
return value
def getACSParams(self):
"""
Get a dictionary of all ACS parameters. Note that
this is not all the parameters provided in the config
file, only the ones needed by the ACS Resource Provider'
"""
params = {}
params["dnsNamePrefix"] = self.value(self.get('ACS', 'dnsPrefix'))
params["orchestratorType"] = self.value(self.get('ACS', 'orchestratorType'))
params["agentCount"] = self.value(self.getint('ACS', 'agentCount'))
params["agentVMSize"] = self.value(self.get('ACS', 'agentVMSize'))
params["masterCount"] = self.value(self.getint('ACS', 'masterCount'))
params["linuxAdminUsername"] = self.value(self.get('ACS', 'username'))
params["sshRSAPublicKey"] = self.value(self.get('SSH', 'publickey'))
return params
def _generateSSHKey(self, private_filepath, public_filepath):
"""
Generate public and private keys. The filepath parameters
are the paths top the respective publoic and private key files.
"""
self.log.debug("Writing SSH keys to: " + private_filepath + " and " + public_filepath)
(ssh_dir, filename) = os.path.split(os.path.expanduser(private_filepath))
if not os.path.exists(ssh_dir):
self.log.debug("SSH Directory doesn't exist, creating " + ssh_dir)
os.makedirs(ssh_dir)
key = paramiko.RSAKey.generate(1024)
key.write_private_key_file(os.path.expanduser(private_filepath))
with open(os.path.expanduser(public_filepath),"w") as public:
public.write("%s %s" % (key.get_name(), key.get_base64()))
public.close()
| self.log.debug("Creating Resource Group")
command = "azure group create " + self.config.get('Group', 'name') + " " + self.config.get('Group', 'region')
os.system(command) | identifier_body |
fetch.rs | use std::io::{self, Write};
use std::cmp::min;
use std::collections::{HashMap, HashSet, VecDeque};
use std::net::SocketAddr;
use std::str::from_utf8;
use std::sync::{Arc, Mutex};
use std::time::{Instant, Duration};
use std::u64;
use abstract_ns::Address;
use futures::{Sink, Async, Stream};
use futures::future::{Future, join_all, ok, FutureResult};
use tokio_core::net::TcpStream;
use tokio_core::reactor::Timeout;
use tk_easyloop::{handle, timeout_at};
use tk_http::{Version, Status};
use tk_http::client::{Proto, Config, Error, Codec};
use tk_http::client::{Encoder, EncoderDone, Head, RecvMode};
use url::Url;
use ns_router::Router;
#[cfg(feature="tls_native")] use native_tls::TlsConnector;
#[cfg(feature="tls_native")] use tokio_tls::TlsConnectorExt;
#[cfg(feature="tls_rustls")] use rustls::ClientConfig;
#[cfg(feature="tls_rustls")] use tokio_rustls::ClientConfigExt; | #[derive(Debug)]
struct State {
offset: u64,
eof: u32,
last_line: Vec<u8>,
last_request: Instant,
}
#[derive(Debug)]
struct Cursor {
url: Arc<Url>,
state: Option<State>,
}
struct Requests {
cursors: VecDeque<Arc<Mutex<Cursor>>>,
timeout: Timeout,
}
#[derive(Debug)]
pub struct Request {
cursor: Arc<Mutex<Cursor>>,
range: Option<(u64, u64, u64)>,
}
pub fn group_addrs(vec: Vec<(Address, Vec<Arc<Url>>)>)
-> HashMap<SocketAddr, Vec<Arc<Url>>>
{
let mut urls_by_ip = HashMap::new();
for (addr, urls) in vec {
for sa in addr.addresses_at(0) {
let set = urls_by_ip.entry(sa)
.or_insert_with(HashSet::new);
for url in &urls {
set.insert(url.clone());
}
}
}
let mut ordered = urls_by_ip.iter().collect::<Vec<_>>();
ordered.sort_by_key(|&(_, y)| y.len());
let mut active_ips = HashMap::new();
let mut visited_urls = HashSet::new();
for (ip, urls) in ordered {
let urls = urls.difference(&visited_urls).cloned().collect::<Vec<_>>();
if urls.len() == 0 {
continue;
}
visited_urls.extend(urls.iter().cloned());
active_ips.insert(*ip, urls);
}
return active_ips;
}
#[allow(dead_code)]
pub fn tls_host(host: &str) -> &str {
match host.find(':') {
Some(x) => &host[..x],
None => host,
}
}
pub fn http(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
let resolver = resolver.clone();
let cfg = Config::new()
.keep_alive_timeout(Duration::new(25, 0))
.done();
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
let h1 = host.clone();
resolver.resolve_auto(&host, 80).map(|ips| (ips, list))
.map_err(move |e| error!("Error resolving {:?}: {}", h1, e))
}))
.map(group_addrs)
.and_then(move |map| {
join_all(map.into_iter().map(move |(ip, urls)| {
let cfg = cfg.clone();
TcpStream::connect(&ip, &handle())
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
#[cfg(not(any(feature="tls_native", feature="tls_rustls")))]
pub fn https(_resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use futures::future::err;
if urls_by_host.len() > 0 {
eprintln!("Compiled without TLS support");
return Box::new(err(()));
}
return Box::new(ok(()));
}
#[cfg(feature="tls_native")]
pub fn https(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use std::io;
if urls_by_host.len() == 0 {
return Box::new(ok(()));
}
let resolver = resolver.clone();
let cfg = Config::new().done();
let cx = TlsConnector::builder().expect("tls builder can be created works")
.build().expect("tls builder works");
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
let h1 = host.clone();
resolver.resolve_auto(&host, 80).map(|addr| (host, addr, list))
.map_err(move |e| error!("Error resolving {:?}: {}", h1, e))
}))
.and_then(move |map| {
join_all(map.into_iter().map(move |(host, addr, urls)| {
let ip = addr.pick_one().expect("no IPs");
let cfg = cfg.clone();
let cx = cx.clone();
TcpStream::connect(&ip, &handle())
.and_then(move |sock| {
cx.connect_async(tls_host(&host), sock).map_err(|e| {
io::Error::new(io::ErrorKind::Other, e)
})
})
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
#[cfg(feature="tls_rustls")]
pub fn https(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use std::io::BufReader;
use std::fs::File;
if urls_by_host.len() == 0 {
return Box::new(ok(()));
}
let resolver = resolver.clone();
let tls = Arc::new({
let mut cfg = ClientConfig::new();
let read_root = File::open("/etc/ssl/certs/ca-certificates.crt")
.map_err(|e| format!("{}", e))
.and_then(|f|
cfg.root_store.add_pem_file(&mut BufReader::new(f))
.map_err(|()| format!("unrecognized format")));
match read_root {
Ok((_, _)) => {} // TODO(tailhook) log numbers
Err(e) => {
warn!("Can find root certificates at {:?}: {}. \
Using embedded ones.",
"/etc/ssl/certs/ca-certificates.crt", e);
}
}
cfg.root_store.add_server_trust_anchors(
&webpki_roots::TLS_SERVER_ROOTS);
cfg
});
let cfg = Config::new().done();
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
resolver.resolve_auto(&host, 80).map(|addr| (host, addr, list))
}))
.map_err(|e| error!("Error resolving: {}", e))
.and_then(move |map| {
join_all(map.into_iter().map(move |(host, addr, urls)| {
let ip = addr.pick_one().expect("no ips");
let cfg = cfg.clone();
let tls = tls.clone();
TcpStream::connect(&ip, &handle())
.and_then(move |sock| {
tls.connect_async(tls_host(&host), sock)
})
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
fn request(cur: &Arc<Mutex<Cursor>>) -> Result<Request, Instant> {
let intr = cur.lock().unwrap();
match intr.state {
None => return Ok(Request {
cursor: cur.clone(),
range: None,
}),
Some(ref state) => {
if state.eof == 0 {
return Ok(Request {
cursor: cur.clone(),
range: None,
});
}
let next = state.last_request +
Duration::from_millis(100 * min(state.eof as u64, 70));
if next < Instant::now() {
return Ok(Request {
cursor: cur.clone(),
range: None,
});
}
return Err(next);
}
}
}
impl Requests {
fn new(urls: Vec<Arc<Url>>) -> Requests {
Requests {
cursors: urls.into_iter().map(|u| Arc::new(Mutex::new(Cursor {
url: u,
state: None,
}))).collect(),
timeout: timeout_at(Instant::now()),
}
}
}
impl Stream for Requests {
type Item = Request;
type Error = Error;
fn poll(&mut self) -> Result<Async<Option<Request>>, Error> {
loop {
match self.timeout.poll().unwrap() {
Async::Ready(()) => {}
Async::NotReady => return Ok(Async::NotReady),
}
let mut min_time = Instant::now() + Duration::new(7, 0);
for _ in 0..self.cursors.len() {
let cur = self.cursors.pop_front().unwrap();
let req = request(&cur);
self.cursors.push_back(cur);
match req {
Ok(req) => return Ok(Async::Ready(Some(req))),
Err(time) if min_time > time => min_time = time,
Err(_) => {}
}
}
self.timeout = timeout_at(min_time);
}
}
}
impl<S> Codec<S> for Request {
type Future = FutureResult<EncoderDone<S>, Error>;
fn start_write(&mut self, mut e: Encoder<S>) -> Self::Future {
let cur = self.cursor.lock().unwrap();
e.request_line("GET", cur.url.path(), Version::Http11);
cur.url.host_str().map(|x| {
e.add_header("Host", x).unwrap();
});
match cur.state {
Some(State { offset, .. }) => {
e.format_header("Range",
format_args!("bytes={}-{}",
offset-1, offset+65535)).unwrap();
}
None => {
e.add_header("Range", "bytes=-4096").unwrap();
}
}
e.done_headers().unwrap();
ok(e.done())
}
fn headers_received(&mut self, headers: &Head) -> Result<RecvMode, Error> {
let status = headers.status();
// TODO(tailhook) better error
if status != Some(Status::PartialContent) {
return Err(Error::custom(
format!("Server returned invalid status: {:?}", status)));
}
for (name, value) in headers.headers() {
if name == "Content-Range" {
let str_value = from_utf8(value)
.expect("valid content-range header");
if !str_value.starts_with("bytes ") {
panic!("invalid content-range header");
}
let slash = str_value.find("/")
.expect("valid content-range header");
let dash = str_value[..slash].find("-")
.expect("valid content-range header");
let from = str_value[6..dash].parse::<u64>()
.expect("valid content-range header");
let mut to = str_value[dash+1..slash].parse::<u64>()
.expect("valid content-range header");
let total = str_value[slash+1..].parse::<u64>()
.expect("valid content-range header");
// bug in cantal :(
if to == u64::MAX {
to = 0;
}
self.range = Some((from, to, total));
}
}
Ok(RecvMode::buffered(65536))
}
fn data_received(&mut self, data: &[u8], end: bool)
-> Result<Async<usize>, Error>
{
assert!(end);
let consumed = data.len();
let (from, to, total) = self.range.unwrap();
let mut cur = self.cursor.lock().unwrap();
let (pos, eof, mut last_line) = match cur.state.take() {
Some(state) => (Some(state.offset), state.eof, state.last_line),
None => (None, 0, b"".to_vec()),
};
let data = if pos.is_some() {
if pos != Some(from+1) {
last_line.clear();
println!("[.. skipped ..]");
&data
} else if data.len() > 0 {
&data[1..]
} else {
&data
}
} else {
&data
};
let (last_line, end) = match data.iter().rposition(|&x| x == b'\n') {
Some(end) => (data[end+1..].to_vec(), end+1),
None => ({last_line.extend(data); last_line}, 0)
};
cur.state = Some(State {
eof: if to+1 == total {
if data.len() > 0 { 1 } else { eof.saturating_add(1) }
} else { 0 },
offset: to+1,
last_line: last_line,
last_request: Instant::now(),
});
io::stdout().write_all(&data[..end]).unwrap();
io::stdout().flush().unwrap();
Ok(Async::Ready(consumed))
}
} | #[cfg(feature="tls_rustls")] use webpki_roots;
| random_line_split |
fetch.rs | use std::io::{self, Write};
use std::cmp::min;
use std::collections::{HashMap, HashSet, VecDeque};
use std::net::SocketAddr;
use std::str::from_utf8;
use std::sync::{Arc, Mutex};
use std::time::{Instant, Duration};
use std::u64;
use abstract_ns::Address;
use futures::{Sink, Async, Stream};
use futures::future::{Future, join_all, ok, FutureResult};
use tokio_core::net::TcpStream;
use tokio_core::reactor::Timeout;
use tk_easyloop::{handle, timeout_at};
use tk_http::{Version, Status};
use tk_http::client::{Proto, Config, Error, Codec};
use tk_http::client::{Encoder, EncoderDone, Head, RecvMode};
use url::Url;
use ns_router::Router;
#[cfg(feature="tls_native")] use native_tls::TlsConnector;
#[cfg(feature="tls_native")] use tokio_tls::TlsConnectorExt;
#[cfg(feature="tls_rustls")] use rustls::ClientConfig;
#[cfg(feature="tls_rustls")] use tokio_rustls::ClientConfigExt;
#[cfg(feature="tls_rustls")] use webpki_roots;
#[derive(Debug)]
struct State {
offset: u64,
eof: u32,
last_line: Vec<u8>,
last_request: Instant,
}
#[derive(Debug)]
struct Cursor {
url: Arc<Url>,
state: Option<State>,
}
struct Requests {
cursors: VecDeque<Arc<Mutex<Cursor>>>,
timeout: Timeout,
}
#[derive(Debug)]
pub struct Request {
cursor: Arc<Mutex<Cursor>>,
range: Option<(u64, u64, u64)>,
}
pub fn group_addrs(vec: Vec<(Address, Vec<Arc<Url>>)>)
-> HashMap<SocketAddr, Vec<Arc<Url>>>
{
let mut urls_by_ip = HashMap::new();
for (addr, urls) in vec {
for sa in addr.addresses_at(0) {
let set = urls_by_ip.entry(sa)
.or_insert_with(HashSet::new);
for url in &urls {
set.insert(url.clone());
}
}
}
let mut ordered = urls_by_ip.iter().collect::<Vec<_>>();
ordered.sort_by_key(|&(_, y)| y.len());
let mut active_ips = HashMap::new();
let mut visited_urls = HashSet::new();
for (ip, urls) in ordered {
let urls = urls.difference(&visited_urls).cloned().collect::<Vec<_>>();
if urls.len() == 0 {
continue;
}
visited_urls.extend(urls.iter().cloned());
active_ips.insert(*ip, urls);
}
return active_ips;
}
#[allow(dead_code)]
pub fn tls_host(host: &str) -> &str {
match host.find(':') {
Some(x) => &host[..x],
None => host,
}
}
pub fn http(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
let resolver = resolver.clone();
let cfg = Config::new()
.keep_alive_timeout(Duration::new(25, 0))
.done();
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
let h1 = host.clone();
resolver.resolve_auto(&host, 80).map(|ips| (ips, list))
.map_err(move |e| error!("Error resolving {:?}: {}", h1, e))
}))
.map(group_addrs)
.and_then(move |map| {
join_all(map.into_iter().map(move |(ip, urls)| {
let cfg = cfg.clone();
TcpStream::connect(&ip, &handle())
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
#[cfg(not(any(feature="tls_native", feature="tls_rustls")))]
pub fn https(_resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use futures::future::err;
if urls_by_host.len() > 0 {
eprintln!("Compiled without TLS support");
return Box::new(err(()));
}
return Box::new(ok(()));
}
#[cfg(feature="tls_native")]
pub fn https(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use std::io;
if urls_by_host.len() == 0 {
return Box::new(ok(()));
}
let resolver = resolver.clone();
let cfg = Config::new().done();
let cx = TlsConnector::builder().expect("tls builder can be created works")
.build().expect("tls builder works");
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
let h1 = host.clone();
resolver.resolve_auto(&host, 80).map(|addr| (host, addr, list))
.map_err(move |e| error!("Error resolving {:?}: {}", h1, e))
}))
.and_then(move |map| {
join_all(map.into_iter().map(move |(host, addr, urls)| {
let ip = addr.pick_one().expect("no IPs");
let cfg = cfg.clone();
let cx = cx.clone();
TcpStream::connect(&ip, &handle())
.and_then(move |sock| {
cx.connect_async(tls_host(&host), sock).map_err(|e| {
io::Error::new(io::ErrorKind::Other, e)
})
})
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
#[cfg(feature="tls_rustls")]
pub fn https(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
|
fn request(cur: &Arc<Mutex<Cursor>>) -> Result<Request, Instant> {
let intr = cur.lock().unwrap();
match intr.state {
None => return Ok(Request {
cursor: cur.clone(),
range: None,
}),
Some(ref state) => {
if state.eof == 0 {
return Ok(Request {
cursor: cur.clone(),
range: None,
});
}
let next = state.last_request +
Duration::from_millis(100 * min(state.eof as u64, 70));
if next < Instant::now() {
return Ok(Request {
cursor: cur.clone(),
range: None,
});
}
return Err(next);
}
}
}
impl Requests {
fn new(urls: Vec<Arc<Url>>) -> Requests {
Requests {
cursors: urls.into_iter().map(|u| Arc::new(Mutex::new(Cursor {
url: u,
state: None,
}))).collect(),
timeout: timeout_at(Instant::now()),
}
}
}
impl Stream for Requests {
type Item = Request;
type Error = Error;
fn poll(&mut self) -> Result<Async<Option<Request>>, Error> {
loop {
match self.timeout.poll().unwrap() {
Async::Ready(()) => {}
Async::NotReady => return Ok(Async::NotReady),
}
let mut min_time = Instant::now() + Duration::new(7, 0);
for _ in 0..self.cursors.len() {
let cur = self.cursors.pop_front().unwrap();
let req = request(&cur);
self.cursors.push_back(cur);
match req {
Ok(req) => return Ok(Async::Ready(Some(req))),
Err(time) if min_time > time => min_time = time,
Err(_) => {}
}
}
self.timeout = timeout_at(min_time);
}
}
}
impl<S> Codec<S> for Request {
type Future = FutureResult<EncoderDone<S>, Error>;
fn start_write(&mut self, mut e: Encoder<S>) -> Self::Future {
let cur = self.cursor.lock().unwrap();
e.request_line("GET", cur.url.path(), Version::Http11);
cur.url.host_str().map(|x| {
e.add_header("Host", x).unwrap();
});
match cur.state {
Some(State { offset, .. }) => {
e.format_header("Range",
format_args!("bytes={}-{}",
offset-1, offset+65535)).unwrap();
}
None => {
e.add_header("Range", "bytes=-4096").unwrap();
}
}
e.done_headers().unwrap();
ok(e.done())
}
fn headers_received(&mut self, headers: &Head) -> Result<RecvMode, Error> {
let status = headers.status();
// TODO(tailhook) better error
if status != Some(Status::PartialContent) {
return Err(Error::custom(
format!("Server returned invalid status: {:?}", status)));
}
for (name, value) in headers.headers() {
if name == "Content-Range" {
let str_value = from_utf8(value)
.expect("valid content-range header");
if !str_value.starts_with("bytes ") {
panic!("invalid content-range header");
}
let slash = str_value.find("/")
.expect("valid content-range header");
let dash = str_value[..slash].find("-")
.expect("valid content-range header");
let from = str_value[6..dash].parse::<u64>()
.expect("valid content-range header");
let mut to = str_value[dash+1..slash].parse::<u64>()
.expect("valid content-range header");
let total = str_value[slash+1..].parse::<u64>()
.expect("valid content-range header");
// bug in cantal :(
if to == u64::MAX {
to = 0;
}
self.range = Some((from, to, total));
}
}
Ok(RecvMode::buffered(65536))
}
fn data_received(&mut self, data: &[u8], end: bool)
-> Result<Async<usize>, Error>
{
assert!(end);
let consumed = data.len();
let (from, to, total) = self.range.unwrap();
let mut cur = self.cursor.lock().unwrap();
let (pos, eof, mut last_line) = match cur.state.take() {
Some(state) => (Some(state.offset), state.eof, state.last_line),
None => (None, 0, b"".to_vec()),
};
let data = if pos.is_some() {
if pos != Some(from+1) {
last_line.clear();
println!("[.. skipped ..]");
&data
} else if data.len() > 0 {
&data[1..]
} else {
&data
}
} else {
&data
};
let (last_line, end) = match data.iter().rposition(|&x| x == b'\n') {
Some(end) => (data[end+1..].to_vec(), end+1),
None => ({last_line.extend(data); last_line}, 0)
};
cur.state = Some(State {
eof: if to+1 == total {
if data.len() > 0 { 1 } else { eof.saturating_add(1) }
} else { 0 },
offset: to+1,
last_line: last_line,
last_request: Instant::now(),
});
io::stdout().write_all(&data[..end]).unwrap();
io::stdout().flush().unwrap();
Ok(Async::Ready(consumed))
}
}
| {
use std::io::BufReader;
use std::fs::File;
if urls_by_host.len() == 0 {
return Box::new(ok(()));
}
let resolver = resolver.clone();
let tls = Arc::new({
let mut cfg = ClientConfig::new();
let read_root = File::open("/etc/ssl/certs/ca-certificates.crt")
.map_err(|e| format!("{}", e))
.and_then(|f|
cfg.root_store.add_pem_file(&mut BufReader::new(f))
.map_err(|()| format!("unrecognized format")));
match read_root {
Ok((_, _)) => {} // TODO(tailhook) log numbers
Err(e) => {
warn!("Can find root certificates at {:?}: {}. \
Using embedded ones.",
"/etc/ssl/certs/ca-certificates.crt", e);
}
}
cfg.root_store.add_server_trust_anchors(
&webpki_roots::TLS_SERVER_ROOTS);
cfg
});
let cfg = Config::new().done();
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
resolver.resolve_auto(&host, 80).map(|addr| (host, addr, list))
}))
.map_err(|e| error!("Error resolving: {}", e))
.and_then(move |map| {
join_all(map.into_iter().map(move |(host, addr, urls)| {
let ip = addr.pick_one().expect("no ips");
let cfg = cfg.clone();
let tls = tls.clone();
TcpStream::connect(&ip, &handle())
.and_then(move |sock| {
tls.connect_async(tls_host(&host), sock)
})
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
} | identifier_body |
fetch.rs | use std::io::{self, Write};
use std::cmp::min;
use std::collections::{HashMap, HashSet, VecDeque};
use std::net::SocketAddr;
use std::str::from_utf8;
use std::sync::{Arc, Mutex};
use std::time::{Instant, Duration};
use std::u64;
use abstract_ns::Address;
use futures::{Sink, Async, Stream};
use futures::future::{Future, join_all, ok, FutureResult};
use tokio_core::net::TcpStream;
use tokio_core::reactor::Timeout;
use tk_easyloop::{handle, timeout_at};
use tk_http::{Version, Status};
use tk_http::client::{Proto, Config, Error, Codec};
use tk_http::client::{Encoder, EncoderDone, Head, RecvMode};
use url::Url;
use ns_router::Router;
#[cfg(feature="tls_native")] use native_tls::TlsConnector;
#[cfg(feature="tls_native")] use tokio_tls::TlsConnectorExt;
#[cfg(feature="tls_rustls")] use rustls::ClientConfig;
#[cfg(feature="tls_rustls")] use tokio_rustls::ClientConfigExt;
#[cfg(feature="tls_rustls")] use webpki_roots;
#[derive(Debug)]
struct State {
offset: u64,
eof: u32,
last_line: Vec<u8>,
last_request: Instant,
}
#[derive(Debug)]
struct Cursor {
url: Arc<Url>,
state: Option<State>,
}
struct Requests {
cursors: VecDeque<Arc<Mutex<Cursor>>>,
timeout: Timeout,
}
#[derive(Debug)]
pub struct Request {
cursor: Arc<Mutex<Cursor>>,
range: Option<(u64, u64, u64)>,
}
pub fn group_addrs(vec: Vec<(Address, Vec<Arc<Url>>)>)
-> HashMap<SocketAddr, Vec<Arc<Url>>>
{
let mut urls_by_ip = HashMap::new();
for (addr, urls) in vec {
for sa in addr.addresses_at(0) {
let set = urls_by_ip.entry(sa)
.or_insert_with(HashSet::new);
for url in &urls {
set.insert(url.clone());
}
}
}
let mut ordered = urls_by_ip.iter().collect::<Vec<_>>();
ordered.sort_by_key(|&(_, y)| y.len());
let mut active_ips = HashMap::new();
let mut visited_urls = HashSet::new();
for (ip, urls) in ordered {
let urls = urls.difference(&visited_urls).cloned().collect::<Vec<_>>();
if urls.len() == 0 {
continue;
}
visited_urls.extend(urls.iter().cloned());
active_ips.insert(*ip, urls);
}
return active_ips;
}
#[allow(dead_code)]
pub fn tls_host(host: &str) -> &str {
match host.find(':') {
Some(x) => &host[..x],
None => host,
}
}
pub fn | (resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
let resolver = resolver.clone();
let cfg = Config::new()
.keep_alive_timeout(Duration::new(25, 0))
.done();
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
let h1 = host.clone();
resolver.resolve_auto(&host, 80).map(|ips| (ips, list))
.map_err(move |e| error!("Error resolving {:?}: {}", h1, e))
}))
.map(group_addrs)
.and_then(move |map| {
join_all(map.into_iter().map(move |(ip, urls)| {
let cfg = cfg.clone();
TcpStream::connect(&ip, &handle())
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
#[cfg(not(any(feature="tls_native", feature="tls_rustls")))]
pub fn https(_resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use futures::future::err;
if urls_by_host.len() > 0 {
eprintln!("Compiled without TLS support");
return Box::new(err(()));
}
return Box::new(ok(()));
}
#[cfg(feature="tls_native")]
pub fn https(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use std::io;
if urls_by_host.len() == 0 {
return Box::new(ok(()));
}
let resolver = resolver.clone();
let cfg = Config::new().done();
let cx = TlsConnector::builder().expect("tls builder can be created works")
.build().expect("tls builder works");
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
let h1 = host.clone();
resolver.resolve_auto(&host, 80).map(|addr| (host, addr, list))
.map_err(move |e| error!("Error resolving {:?}: {}", h1, e))
}))
.and_then(move |map| {
join_all(map.into_iter().map(move |(host, addr, urls)| {
let ip = addr.pick_one().expect("no IPs");
let cfg = cfg.clone();
let cx = cx.clone();
TcpStream::connect(&ip, &handle())
.and_then(move |sock| {
cx.connect_async(tls_host(&host), sock).map_err(|e| {
io::Error::new(io::ErrorKind::Other, e)
})
})
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
#[cfg(feature="tls_rustls")]
pub fn https(resolver: &Router, urls_by_host: HashMap<String, Vec<Arc<Url>>>)
-> Box<Future<Item=(), Error=()>>
{
use std::io::BufReader;
use std::fs::File;
if urls_by_host.len() == 0 {
return Box::new(ok(()));
}
let resolver = resolver.clone();
let tls = Arc::new({
let mut cfg = ClientConfig::new();
let read_root = File::open("/etc/ssl/certs/ca-certificates.crt")
.map_err(|e| format!("{}", e))
.and_then(|f|
cfg.root_store.add_pem_file(&mut BufReader::new(f))
.map_err(|()| format!("unrecognized format")));
match read_root {
Ok((_, _)) => {} // TODO(tailhook) log numbers
Err(e) => {
warn!("Can find root certificates at {:?}: {}. \
Using embedded ones.",
"/etc/ssl/certs/ca-certificates.crt", e);
}
}
cfg.root_store.add_server_trust_anchors(
&webpki_roots::TLS_SERVER_ROOTS);
cfg
});
let cfg = Config::new().done();
return Box::new(
join_all(urls_by_host.into_iter().map(move |(host, list)| {
resolver.resolve_auto(&host, 80).map(|addr| (host, addr, list))
}))
.map_err(|e| error!("Error resolving: {}", e))
.and_then(move |map| {
join_all(map.into_iter().map(move |(host, addr, urls)| {
let ip = addr.pick_one().expect("no ips");
let cfg = cfg.clone();
let tls = tls.clone();
TcpStream::connect(&ip, &handle())
.and_then(move |sock| {
tls.connect_async(tls_host(&host), sock)
})
.map_err(move |e| {
error!("Error connecting to {}: {}", ip, e);
})
.and_then(move |sock| {
Proto::new(sock, &handle(), &cfg)
.send_all(Requests::new(urls))
.map(|_| unreachable!())
.map_err(move |e| {
error!("Error (ip: {}): {}", ip, e);
})
})
}))
.map(|_| ())
}));
}
fn request(cur: &Arc<Mutex<Cursor>>) -> Result<Request, Instant> {
let intr = cur.lock().unwrap();
match intr.state {
None => return Ok(Request {
cursor: cur.clone(),
range: None,
}),
Some(ref state) => {
if state.eof == 0 {
return Ok(Request {
cursor: cur.clone(),
range: None,
});
}
let next = state.last_request +
Duration::from_millis(100 * min(state.eof as u64, 70));
if next < Instant::now() {
return Ok(Request {
cursor: cur.clone(),
range: None,
});
}
return Err(next);
}
}
}
impl Requests {
fn new(urls: Vec<Arc<Url>>) -> Requests {
Requests {
cursors: urls.into_iter().map(|u| Arc::new(Mutex::new(Cursor {
url: u,
state: None,
}))).collect(),
timeout: timeout_at(Instant::now()),
}
}
}
impl Stream for Requests {
type Item = Request;
type Error = Error;
fn poll(&mut self) -> Result<Async<Option<Request>>, Error> {
loop {
match self.timeout.poll().unwrap() {
Async::Ready(()) => {}
Async::NotReady => return Ok(Async::NotReady),
}
let mut min_time = Instant::now() + Duration::new(7, 0);
for _ in 0..self.cursors.len() {
let cur = self.cursors.pop_front().unwrap();
let req = request(&cur);
self.cursors.push_back(cur);
match req {
Ok(req) => return Ok(Async::Ready(Some(req))),
Err(time) if min_time > time => min_time = time,
Err(_) => {}
}
}
self.timeout = timeout_at(min_time);
}
}
}
impl<S> Codec<S> for Request {
type Future = FutureResult<EncoderDone<S>, Error>;
fn start_write(&mut self, mut e: Encoder<S>) -> Self::Future {
let cur = self.cursor.lock().unwrap();
e.request_line("GET", cur.url.path(), Version::Http11);
cur.url.host_str().map(|x| {
e.add_header("Host", x).unwrap();
});
match cur.state {
Some(State { offset, .. }) => {
e.format_header("Range",
format_args!("bytes={}-{}",
offset-1, offset+65535)).unwrap();
}
None => {
e.add_header("Range", "bytes=-4096").unwrap();
}
}
e.done_headers().unwrap();
ok(e.done())
}
fn headers_received(&mut self, headers: &Head) -> Result<RecvMode, Error> {
let status = headers.status();
// TODO(tailhook) better error
if status != Some(Status::PartialContent) {
return Err(Error::custom(
format!("Server returned invalid status: {:?}", status)));
}
for (name, value) in headers.headers() {
if name == "Content-Range" {
let str_value = from_utf8(value)
.expect("valid content-range header");
if !str_value.starts_with("bytes ") {
panic!("invalid content-range header");
}
let slash = str_value.find("/")
.expect("valid content-range header");
let dash = str_value[..slash].find("-")
.expect("valid content-range header");
let from = str_value[6..dash].parse::<u64>()
.expect("valid content-range header");
let mut to = str_value[dash+1..slash].parse::<u64>()
.expect("valid content-range header");
let total = str_value[slash+1..].parse::<u64>()
.expect("valid content-range header");
// bug in cantal :(
if to == u64::MAX {
to = 0;
}
self.range = Some((from, to, total));
}
}
Ok(RecvMode::buffered(65536))
}
fn data_received(&mut self, data: &[u8], end: bool)
-> Result<Async<usize>, Error>
{
assert!(end);
let consumed = data.len();
let (from, to, total) = self.range.unwrap();
let mut cur = self.cursor.lock().unwrap();
let (pos, eof, mut last_line) = match cur.state.take() {
Some(state) => (Some(state.offset), state.eof, state.last_line),
None => (None, 0, b"".to_vec()),
};
let data = if pos.is_some() {
if pos != Some(from+1) {
last_line.clear();
println!("[.. skipped ..]");
&data
} else if data.len() > 0 {
&data[1..]
} else {
&data
}
} else {
&data
};
let (last_line, end) = match data.iter().rposition(|&x| x == b'\n') {
Some(end) => (data[end+1..].to_vec(), end+1),
None => ({last_line.extend(data); last_line}, 0)
};
cur.state = Some(State {
eof: if to+1 == total {
if data.len() > 0 { 1 } else { eof.saturating_add(1) }
} else { 0 },
offset: to+1,
last_line: last_line,
last_request: Instant::now(),
});
io::stdout().write_all(&data[..end]).unwrap();
io::stdout().flush().unwrap();
Ok(Async::Ready(consumed))
}
}
| http | identifier_name |
dyn_cor_rel.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 3 15:56:13 2020
@author: dean
"""
#functions to generate stimuli given parameter set
import os, sys
from tqdm import tqdm
import math
import numpy as np
import matplotlib.pyplot as plt
import torch
import xarray as xr
from itertools import product
from torchvision import models
import torch.nn as nn
import pandas as pd
def norm(x, dim):
x = x - x.mean(dim)
x = x/(x**2).sum(dim)**0.5
return x
def cor(x, y, dim):
y = norm(y, dim)
x = norm(x, dim)
r = x.dot(y, dim)
return r
def rc(x):
reverse = (slice(None, None, -1),) * x.ndim
return x[reverse].conj()
def auto_corr(x, y, dim, pad=None):
xn = norm(x, dim);
yn = norm(y, dim);
if pad is None:
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
else:
xn = np.pad(xn, pad_width=pad)
yn = np.pad(yn, pad_width=pad)
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
return cor
def pol2cart(rho, phi):
x = rho * np.cos(phi)
y = rho * np.sin(phi)
return(x, y)
def sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0):
x_coords = np.arange(0, nx, dtype=np.float64) - x_0
y_coords = np.arange(0, ny, dtype=np.float64) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
mu_0, nu_0 = pol2cart(sf, np.deg2rad(ori + 90))
s = np.sin(2*np.pi*(mu_0*xx + nu_0*yy) + np.deg2rad(phase + 90))
s = s + bg
return s
def window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.zeros((int(nx), int(ny)))
w[d<=radius] = 1
return w
def cos_window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.cos(d*np.pi*(1/radius)) + 1
w[d>radius] = 0
return w
def colorize(s, lum, by, rg):
s_c = s[..., np.newaxis]
l_by_rg = np.array([[1, 1, 1],
[1/3, 1/3, -2/3],
[1, -1, 0]]
)
l_by_rg = (l_by_rg/((l_by_rg**2).sum(1, keepdims=True)**0.5))
rgb = l_by_rg[0]*lum + l_by_rg[1]*by + l_by_rg[2]*rg
s_c = s_c*rgb[np.newaxis,np.newaxis]
return s_c
def sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0):
s = sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0)
s_c = colorize(s, lum, by, rg)
return scale_im(s_c)
def sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lum, by, rg, bg=0,
sf2=None, rel_ori=None, phase2=None,
lum2=None, by2=None, rg2=None, make_window=False, radius=None):
if sf2 is None:
sf2 = sf
if rel_ori is None:
rel_ori = 0
if phase2 is None:
phase2 = phase
if lum2 is None:
lum2 = lum
if by2 is None:
by2 = by
if rg2 is None:
rg2 = rg
s1 = sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0)
s2 = sine_chrom(nx, ny, x_0, y_0, sf2, ori+rel_ori, phase2,
lum2, by2, rg2)
s = s1+s2
if make_window:
w = window(radius, x_0, y_0, nx, ny)
s = w[..., np.newaxis]*s
return s
def norm_im(im):
im = im - im.min()
im = im/im.max()
return im
def scale_im(im):
im = im - im.min()
im = 2*im/im.max()
im = im-1
return im
mod = models.alexnet(pretrained=True).features[:1]
w = list(mod.parameters())[0].detach().numpy()
w_da = xr.DataArray(w, dims=('unit', 'channel', 'row', 'col'))
n_units = w.shape[0]
w_da_noise = w_da.copy(deep=True)
w_da_noise[...] = np.random.normal(size=w_da.shape, scale=0.1)
#%%
nx = ny = 11
stims = []
ori = list(np.linspace(0, 180-180/64, 64))
phase = list(np.linspace(0, 360-360/8, 8))
sf = list(np.logspace(np.log10(0.1), np.log10(.25), 8))
contrast = [1,]
lbr = [1,0,0]
make_window = False
param_nms = ['ori', 'sf', 'phase']
params = [ori, sf, phase]
for i, p in enumerate(params):
if not type(p) is list:
params[i] = [p,]
cart_prod_params = np.array(list(product(*params)))
da = xr.DataArray(np.zeros(tuple(len(p) for p in params )),
dims=param_nms,
coords=params )
da_stims = da.squeeze(drop=True).expand_dims({'row':range(11),
'col':range(11),
'channel':range(3)})
da_stims = da_stims.transpose('ori', 'sf', 'phase', 'row', 'col', 'channel').copy()
x_0 = y_0 = 5
stim =[]
for p in (cart_prod_params):
#plt.figure()
ori, sf, phase = p
im = sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lbr[0], lbr[1], lbr[2], bg=0,
sf2=sf, rel_ori=0, phase2=phase,
lum2=lbr[0], by2=lbr[1], rg2=lbr[2],
make_window=make_window)
#plt.imshow(norm_im(w[...,np.newaxis]*im))
stim.append(im)
stims.append(stim)
stims = np.array(stims).squeeze()
rs = []
for stim, param in zip(stims, cart_prod_params):
ori, sf, phase = param
da_stims.loc[ori, sf, phase] = stim.copy()
#%%
w_da = w_da/(w_da**2).sum(('channel', 'row', 'col'))**0.5
#da_stims = da_stims/(da_stims**2).sum(('channel', 'row', 'col'))**0.5
da_sig = da_stims.dot(w_da)
da_noise = da_stims.dot(w_da_noise)
#%%
n_units = len(da_sig.coords['unit'].values)
unit_coords = list(product(range(n_units),range(n_units),))
mod_cor = xr.DataArray(np.zeros((n_units, n_units, 4, 2)),
dims=['unit_r', 'unit_c', 'vars', 'sn'],
coords=[range(n_units), range(n_units),
['cor', 'dyn', 'ori_ind', 'phase_ind'],
['s','n']])
sf_ind = 0
dim=('phase', 'ori')
for ind1, ind2 in tqdm((unit_coords)):
for i, da in enumerate([da_sig, da_noise]):
u1 = (da.isel(unit=ind1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2 = (da.isel(unit=ind2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
corr = auto_corr(u1, u2, dim=('ori'), pad=None)
r = np.max(np.real(corr))
mod_cor[ind1, ind2, 0, i] = r
mod_cor[ind1, ind2, 1, i] = u1.std()*u2.std()
mod_cor[ind1, ind2, 2:, i] = np.array(np.unravel_index(np.argmax(corr), corr.shape))
#%%
dfs = [mod_cor[...,i,:].to_dataframe(name=str(mod_cor.coords['vars'][i].values)).drop('vars', axis=1)
for i in range(len(mod_cor.coords['vars']))]
df = pd.concat(dfs, 1)
m_inds = np.array([np.array(a) for a in df.index.values])
drop_inds = m_inds[:,0]<m_inds[:,1]
df_d = df[drop_inds]
df_d = df_d.reorder_levels([2,0,1])
#%%
def fz(r):
return 0.5*(np.log((1+r)/(1-r)))
from scipy import stats
plt.figure(figsize=(4,3))
df = df_d.loc['s']
rs = []
for df in [df_d.loc['s'], df_d.loc['n']]:
r,p = (stats.spearmanr(df['dyn'], df['cor']**2))
rs.append(r)
plt.scatter(df['dyn'], df['cor']**2, s=1);plt.semilogx();
print(p)
plt.xlim(0.01, 1000)
plt.ylim(0,1.1)
plt.xlabel('Dynamic range')
plt.ylabel('$r^2_{ER}$')
plt.title('Trained $r=$' + str(np.round(rs[0],2)) +
', untrained $r=$' + str(np.round(rs[1],2)))
inds = []
plt.legend(['Trained', 'Untrained'])
df = df_d.loc['s']
for i, ind in enumerate([0,-12,-100, 0,-12, -100]):
if i<=2:
ranks = (df['cor'].rank() + df['dyn'].rank()).sort_values()[::-1]
else:
ranks = (df['cor'].rank() - df['dyn'].rank()).sort_values()[::-1] |
u1, u2 = ranks.index.values[ind]
inds.append([u1,u2])
plt.scatter(df['dyn'][u1,u2], df['cor'][u1,u2]**2, s=10, c='r');plt.semilogx();
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
u1r = (da_sig.isel(unit=u1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2r = (da_sig.isel(unit=u2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u1])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u1r.var().values).round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u2])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u2r.var().values).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
plt.tight_layout()
plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u1, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u2, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.tight_layout()
#plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(4,8))
xticks = np.linspace(0, 180, 5)
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
u1r = da_sig.isel(unit=u1, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1r = (u1r**2).sum('phase')**0.5
u1r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticklabels([]);plt.xlabel('')
if j==1:
''#plt.legend(['0','90','180','270'], loc='lower right', title='Phase (deg)')
plt.gca().set_xticks(xticks)
if j==11:
plt.gca().set_xticklabels(np.round(xticks).astype(int))
plt.xlabel('Orientation (deg)')
plt.ylabel('Response')
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
u2r = da_sig.isel(unit=u2, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u2r = (u2r**2).sum('phase')**0.5
u2r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticks(xticks)
plt.gca().set_xticklabels([]);plt.xlabel('');
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
plt.tight_layout()
plt.savefig('example_filters_resp.pdf')
#%% matching stim for a given unit
for ind in inds:
plt.figure()
shifts = df.loc[u1,u2]
b_inds = np.unravel_index(np.argmax(u1r.values), np.shape(u1r))
u1_stim = da_stims.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
s_stim = da_stims.roll({'phase':int(shifts['phase_ind']), 'ori':int(shifts['ori_ind'])})
u2_match_stim = s_stim.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
plt.imshow(norm_im(u1_stim))
plt.figure()
plt.imshow(norm_im(u2_match_stim))
#%% now plot stim
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(6., 6.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(4, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.isel(sf=0).sel(ori=np.linspace(0, 180-180/8, 8),
phase=[0, 90, 180, 270], method='nearest').stack(c = ('phase', 'ori')).transpose('c', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
i,j = np.unravel_index(i, (4, 8))
if i==0:
ax.set_title(int(im.coords['c'].values.item()[1]))
if j==0:
ax.set_ylabel(int(im.coords['c'].values.item()[0]))
if i==0 and j==0:
ax.set_ylabel('phase (deg) \n' + str(int(im.coords['c'].values.item()[0])))
ax.set_title( 'ori (deg) \n' + str(int(im.coords['c'].values.item()[1])))
plt.savefig('example_stim_phase_ori.pdf')
#%%
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(8., 3.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(1, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.sel(ori=0,phase=0, method='nearest').transpose('sf', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
sf = im.coords['sf'].values.item()
ax.set_title(np.round(1/sf,1))
if i==0:
ax.set_title('Spatial Period\n ' + str(np.round(1/sf,1)) + str(' (pix)'))
plt.savefig('period.pdf')
#%%
plt.figure(figsize=(3,9))
plt.subplot(311)
f = norm_im(w[55])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Filter')
plt.subplot(312)
u1 = da_sig.isel(unit=55, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1.plot.line(x='ori')
plt.title('Response')
plt.legend([0, 90, 180, 270], title='phase', loc='lower left')
plt.subplot(313)
(((u1**2)**0.5).mean('phase')).plot.line(x='ori')
plt.title('Avg. Response magnitude across phase')
plt.tight_layout() | random_line_split | |
dyn_cor_rel.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 3 15:56:13 2020
@author: dean
"""
#functions to generate stimuli given parameter set
import os, sys
from tqdm import tqdm
import math
import numpy as np
import matplotlib.pyplot as plt
import torch
import xarray as xr
from itertools import product
from torchvision import models
import torch.nn as nn
import pandas as pd
def norm(x, dim):
x = x - x.mean(dim)
x = x/(x**2).sum(dim)**0.5
return x
def cor(x, y, dim):
y = norm(y, dim)
x = norm(x, dim)
r = x.dot(y, dim)
return r
def rc(x):
reverse = (slice(None, None, -1),) * x.ndim
return x[reverse].conj()
def auto_corr(x, y, dim, pad=None):
xn = norm(x, dim);
yn = norm(y, dim);
if pad is None:
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
else:
xn = np.pad(xn, pad_width=pad)
yn = np.pad(yn, pad_width=pad)
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
return cor
def pol2cart(rho, phi):
x = rho * np.cos(phi)
y = rho * np.sin(phi)
return(x, y)
def sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0):
x_coords = np.arange(0, nx, dtype=np.float64) - x_0
y_coords = np.arange(0, ny, dtype=np.float64) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
mu_0, nu_0 = pol2cart(sf, np.deg2rad(ori + 90))
s = np.sin(2*np.pi*(mu_0*xx + nu_0*yy) + np.deg2rad(phase + 90))
s = s + bg
return s
def window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.zeros((int(nx), int(ny)))
w[d<=radius] = 1
return w
def cos_window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.cos(d*np.pi*(1/radius)) + 1
w[d>radius] = 0
return w
def colorize(s, lum, by, rg):
s_c = s[..., np.newaxis]
l_by_rg = np.array([[1, 1, 1],
[1/3, 1/3, -2/3],
[1, -1, 0]]
)
l_by_rg = (l_by_rg/((l_by_rg**2).sum(1, keepdims=True)**0.5))
rgb = l_by_rg[0]*lum + l_by_rg[1]*by + l_by_rg[2]*rg
s_c = s_c*rgb[np.newaxis,np.newaxis]
return s_c
def sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0):
s = sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0)
s_c = colorize(s, lum, by, rg)
return scale_im(s_c)
def sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lum, by, rg, bg=0,
sf2=None, rel_ori=None, phase2=None,
lum2=None, by2=None, rg2=None, make_window=False, radius=None):
if sf2 is None:
sf2 = sf
if rel_ori is None:
rel_ori = 0
if phase2 is None:
phase2 = phase
if lum2 is None:
|
if by2 is None:
by2 = by
if rg2 is None:
rg2 = rg
s1 = sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0)
s2 = sine_chrom(nx, ny, x_0, y_0, sf2, ori+rel_ori, phase2,
lum2, by2, rg2)
s = s1+s2
if make_window:
w = window(radius, x_0, y_0, nx, ny)
s = w[..., np.newaxis]*s
return s
def norm_im(im):
im = im - im.min()
im = im/im.max()
return im
def scale_im(im):
im = im - im.min()
im = 2*im/im.max()
im = im-1
return im
mod = models.alexnet(pretrained=True).features[:1]
w = list(mod.parameters())[0].detach().numpy()
w_da = xr.DataArray(w, dims=('unit', 'channel', 'row', 'col'))
n_units = w.shape[0]
w_da_noise = w_da.copy(deep=True)
w_da_noise[...] = np.random.normal(size=w_da.shape, scale=0.1)
#%%
nx = ny = 11
stims = []
ori = list(np.linspace(0, 180-180/64, 64))
phase = list(np.linspace(0, 360-360/8, 8))
sf = list(np.logspace(np.log10(0.1), np.log10(.25), 8))
contrast = [1,]
lbr = [1,0,0]
make_window = False
param_nms = ['ori', 'sf', 'phase']
params = [ori, sf, phase]
for i, p in enumerate(params):
if not type(p) is list:
params[i] = [p,]
cart_prod_params = np.array(list(product(*params)))
da = xr.DataArray(np.zeros(tuple(len(p) for p in params )),
dims=param_nms,
coords=params )
da_stims = da.squeeze(drop=True).expand_dims({'row':range(11),
'col':range(11),
'channel':range(3)})
da_stims = da_stims.transpose('ori', 'sf', 'phase', 'row', 'col', 'channel').copy()
x_0 = y_0 = 5
stim =[]
for p in (cart_prod_params):
#plt.figure()
ori, sf, phase = p
im = sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lbr[0], lbr[1], lbr[2], bg=0,
sf2=sf, rel_ori=0, phase2=phase,
lum2=lbr[0], by2=lbr[1], rg2=lbr[2],
make_window=make_window)
#plt.imshow(norm_im(w[...,np.newaxis]*im))
stim.append(im)
stims.append(stim)
stims = np.array(stims).squeeze()
rs = []
for stim, param in zip(stims, cart_prod_params):
ori, sf, phase = param
da_stims.loc[ori, sf, phase] = stim.copy()
#%%
w_da = w_da/(w_da**2).sum(('channel', 'row', 'col'))**0.5
#da_stims = da_stims/(da_stims**2).sum(('channel', 'row', 'col'))**0.5
da_sig = da_stims.dot(w_da)
da_noise = da_stims.dot(w_da_noise)
#%%
n_units = len(da_sig.coords['unit'].values)
unit_coords = list(product(range(n_units),range(n_units),))
mod_cor = xr.DataArray(np.zeros((n_units, n_units, 4, 2)),
dims=['unit_r', 'unit_c', 'vars', 'sn'],
coords=[range(n_units), range(n_units),
['cor', 'dyn', 'ori_ind', 'phase_ind'],
['s','n']])
sf_ind = 0
dim=('phase', 'ori')
for ind1, ind2 in tqdm((unit_coords)):
for i, da in enumerate([da_sig, da_noise]):
u1 = (da.isel(unit=ind1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2 = (da.isel(unit=ind2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
corr = auto_corr(u1, u2, dim=('ori'), pad=None)
r = np.max(np.real(corr))
mod_cor[ind1, ind2, 0, i] = r
mod_cor[ind1, ind2, 1, i] = u1.std()*u2.std()
mod_cor[ind1, ind2, 2:, i] = np.array(np.unravel_index(np.argmax(corr), corr.shape))
#%%
dfs = [mod_cor[...,i,:].to_dataframe(name=str(mod_cor.coords['vars'][i].values)).drop('vars', axis=1)
for i in range(len(mod_cor.coords['vars']))]
df = pd.concat(dfs, 1)
m_inds = np.array([np.array(a) for a in df.index.values])
drop_inds = m_inds[:,0]<m_inds[:,1]
df_d = df[drop_inds]
df_d = df_d.reorder_levels([2,0,1])
#%%
def fz(r):
return 0.5*(np.log((1+r)/(1-r)))
from scipy import stats
plt.figure(figsize=(4,3))
df = df_d.loc['s']
rs = []
for df in [df_d.loc['s'], df_d.loc['n']]:
r,p = (stats.spearmanr(df['dyn'], df['cor']**2))
rs.append(r)
plt.scatter(df['dyn'], df['cor']**2, s=1);plt.semilogx();
print(p)
plt.xlim(0.01, 1000)
plt.ylim(0,1.1)
plt.xlabel('Dynamic range')
plt.ylabel('$r^2_{ER}$')
plt.title('Trained $r=$' + str(np.round(rs[0],2)) +
', untrained $r=$' + str(np.round(rs[1],2)))
inds = []
plt.legend(['Trained', 'Untrained'])
df = df_d.loc['s']
for i, ind in enumerate([0,-12,-100, 0,-12, -100]):
if i<=2:
ranks = (df['cor'].rank() + df['dyn'].rank()).sort_values()[::-1]
else:
ranks = (df['cor'].rank() - df['dyn'].rank()).sort_values()[::-1]
u1, u2 = ranks.index.values[ind]
inds.append([u1,u2])
plt.scatter(df['dyn'][u1,u2], df['cor'][u1,u2]**2, s=10, c='r');plt.semilogx();
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
u1r = (da_sig.isel(unit=u1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2r = (da_sig.isel(unit=u2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u1])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u1r.var().values).round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u2])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u2r.var().values).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
plt.tight_layout()
plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u1, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u2, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.tight_layout()
#plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(4,8))
xticks = np.linspace(0, 180, 5)
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
u1r = da_sig.isel(unit=u1, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1r = (u1r**2).sum('phase')**0.5
u1r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticklabels([]);plt.xlabel('')
if j==1:
''#plt.legend(['0','90','180','270'], loc='lower right', title='Phase (deg)')
plt.gca().set_xticks(xticks)
if j==11:
plt.gca().set_xticklabels(np.round(xticks).astype(int))
plt.xlabel('Orientation (deg)')
plt.ylabel('Response')
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
u2r = da_sig.isel(unit=u2, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u2r = (u2r**2).sum('phase')**0.5
u2r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticks(xticks)
plt.gca().set_xticklabels([]);plt.xlabel('');
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
plt.tight_layout()
plt.savefig('example_filters_resp.pdf')
#%% matching stim for a given unit
for ind in inds:
plt.figure()
shifts = df.loc[u1,u2]
b_inds = np.unravel_index(np.argmax(u1r.values), np.shape(u1r))
u1_stim = da_stims.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
s_stim = da_stims.roll({'phase':int(shifts['phase_ind']), 'ori':int(shifts['ori_ind'])})
u2_match_stim = s_stim.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
plt.imshow(norm_im(u1_stim))
plt.figure()
plt.imshow(norm_im(u2_match_stim))
#%% now plot stim
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(6., 6.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(4, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.isel(sf=0).sel(ori=np.linspace(0, 180-180/8, 8),
phase=[0, 90, 180, 270], method='nearest').stack(c = ('phase', 'ori')).transpose('c', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
i,j = np.unravel_index(i, (4, 8))
if i==0:
ax.set_title(int(im.coords['c'].values.item()[1]))
if j==0:
ax.set_ylabel(int(im.coords['c'].values.item()[0]))
if i==0 and j==0:
ax.set_ylabel('phase (deg) \n' + str(int(im.coords['c'].values.item()[0])))
ax.set_title( 'ori (deg) \n' + str(int(im.coords['c'].values.item()[1])))
plt.savefig('example_stim_phase_ori.pdf')
#%%
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(8., 3.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(1, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.sel(ori=0,phase=0, method='nearest').transpose('sf', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
sf = im.coords['sf'].values.item()
ax.set_title(np.round(1/sf,1))
if i==0:
ax.set_title('Spatial Period\n ' + str(np.round(1/sf,1)) + str(' (pix)'))
plt.savefig('period.pdf')
#%%
plt.figure(figsize=(3,9))
plt.subplot(311)
f = norm_im(w[55])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Filter')
plt.subplot(312)
u1 = da_sig.isel(unit=55, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1.plot.line(x='ori')
plt.title('Response')
plt.legend([0, 90, 180, 270], title='phase', loc='lower left')
plt.subplot(313)
(((u1**2)**0.5).mean('phase')).plot.line(x='ori')
plt.title('Avg. Response magnitude across phase')
plt.tight_layout()
| lum2 = lum | conditional_block |
dyn_cor_rel.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 3 15:56:13 2020
@author: dean
"""
#functions to generate stimuli given parameter set
import os, sys
from tqdm import tqdm
import math
import numpy as np
import matplotlib.pyplot as plt
import torch
import xarray as xr
from itertools import product
from torchvision import models
import torch.nn as nn
import pandas as pd
def norm(x, dim):
x = x - x.mean(dim)
x = x/(x**2).sum(dim)**0.5
return x
def cor(x, y, dim):
y = norm(y, dim)
x = norm(x, dim)
r = x.dot(y, dim)
return r
def rc(x):
reverse = (slice(None, None, -1),) * x.ndim
return x[reverse].conj()
def auto_corr(x, y, dim, pad=None):
xn = norm(x, dim);
yn = norm(y, dim);
if pad is None:
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
else:
xn = np.pad(xn, pad_width=pad)
yn = np.pad(yn, pad_width=pad)
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
return cor
def pol2cart(rho, phi):
x = rho * np.cos(phi)
y = rho * np.sin(phi)
return(x, y)
def sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0):
x_coords = np.arange(0, nx, dtype=np.float64) - x_0
y_coords = np.arange(0, ny, dtype=np.float64) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
mu_0, nu_0 = pol2cart(sf, np.deg2rad(ori + 90))
s = np.sin(2*np.pi*(mu_0*xx + nu_0*yy) + np.deg2rad(phase + 90))
s = s + bg
return s
def | (radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.zeros((int(nx), int(ny)))
w[d<=radius] = 1
return w
def cos_window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.cos(d*np.pi*(1/radius)) + 1
w[d>radius] = 0
return w
def colorize(s, lum, by, rg):
s_c = s[..., np.newaxis]
l_by_rg = np.array([[1, 1, 1],
[1/3, 1/3, -2/3],
[1, -1, 0]]
)
l_by_rg = (l_by_rg/((l_by_rg**2).sum(1, keepdims=True)**0.5))
rgb = l_by_rg[0]*lum + l_by_rg[1]*by + l_by_rg[2]*rg
s_c = s_c*rgb[np.newaxis,np.newaxis]
return s_c
def sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0):
s = sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0)
s_c = colorize(s, lum, by, rg)
return scale_im(s_c)
def sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lum, by, rg, bg=0,
sf2=None, rel_ori=None, phase2=None,
lum2=None, by2=None, rg2=None, make_window=False, radius=None):
if sf2 is None:
sf2 = sf
if rel_ori is None:
rel_ori = 0
if phase2 is None:
phase2 = phase
if lum2 is None:
lum2 = lum
if by2 is None:
by2 = by
if rg2 is None:
rg2 = rg
s1 = sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0)
s2 = sine_chrom(nx, ny, x_0, y_0, sf2, ori+rel_ori, phase2,
lum2, by2, rg2)
s = s1+s2
if make_window:
w = window(radius, x_0, y_0, nx, ny)
s = w[..., np.newaxis]*s
return s
def norm_im(im):
im = im - im.min()
im = im/im.max()
return im
def scale_im(im):
im = im - im.min()
im = 2*im/im.max()
im = im-1
return im
mod = models.alexnet(pretrained=True).features[:1]
w = list(mod.parameters())[0].detach().numpy()
w_da = xr.DataArray(w, dims=('unit', 'channel', 'row', 'col'))
n_units = w.shape[0]
w_da_noise = w_da.copy(deep=True)
w_da_noise[...] = np.random.normal(size=w_da.shape, scale=0.1)
#%%
nx = ny = 11
stims = []
ori = list(np.linspace(0, 180-180/64, 64))
phase = list(np.linspace(0, 360-360/8, 8))
sf = list(np.logspace(np.log10(0.1), np.log10(.25), 8))
contrast = [1,]
lbr = [1,0,0]
make_window = False
param_nms = ['ori', 'sf', 'phase']
params = [ori, sf, phase]
for i, p in enumerate(params):
if not type(p) is list:
params[i] = [p,]
cart_prod_params = np.array(list(product(*params)))
da = xr.DataArray(np.zeros(tuple(len(p) for p in params )),
dims=param_nms,
coords=params )
da_stims = da.squeeze(drop=True).expand_dims({'row':range(11),
'col':range(11),
'channel':range(3)})
da_stims = da_stims.transpose('ori', 'sf', 'phase', 'row', 'col', 'channel').copy()
x_0 = y_0 = 5
stim =[]
for p in (cart_prod_params):
#plt.figure()
ori, sf, phase = p
im = sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lbr[0], lbr[1], lbr[2], bg=0,
sf2=sf, rel_ori=0, phase2=phase,
lum2=lbr[0], by2=lbr[1], rg2=lbr[2],
make_window=make_window)
#plt.imshow(norm_im(w[...,np.newaxis]*im))
stim.append(im)
stims.append(stim)
stims = np.array(stims).squeeze()
rs = []
for stim, param in zip(stims, cart_prod_params):
ori, sf, phase = param
da_stims.loc[ori, sf, phase] = stim.copy()
#%%
w_da = w_da/(w_da**2).sum(('channel', 'row', 'col'))**0.5
#da_stims = da_stims/(da_stims**2).sum(('channel', 'row', 'col'))**0.5
da_sig = da_stims.dot(w_da)
da_noise = da_stims.dot(w_da_noise)
#%%
n_units = len(da_sig.coords['unit'].values)
unit_coords = list(product(range(n_units),range(n_units),))
mod_cor = xr.DataArray(np.zeros((n_units, n_units, 4, 2)),
dims=['unit_r', 'unit_c', 'vars', 'sn'],
coords=[range(n_units), range(n_units),
['cor', 'dyn', 'ori_ind', 'phase_ind'],
['s','n']])
sf_ind = 0
dim=('phase', 'ori')
for ind1, ind2 in tqdm((unit_coords)):
for i, da in enumerate([da_sig, da_noise]):
u1 = (da.isel(unit=ind1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2 = (da.isel(unit=ind2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
corr = auto_corr(u1, u2, dim=('ori'), pad=None)
r = np.max(np.real(corr))
mod_cor[ind1, ind2, 0, i] = r
mod_cor[ind1, ind2, 1, i] = u1.std()*u2.std()
mod_cor[ind1, ind2, 2:, i] = np.array(np.unravel_index(np.argmax(corr), corr.shape))
#%%
dfs = [mod_cor[...,i,:].to_dataframe(name=str(mod_cor.coords['vars'][i].values)).drop('vars', axis=1)
for i in range(len(mod_cor.coords['vars']))]
df = pd.concat(dfs, 1)
m_inds = np.array([np.array(a) for a in df.index.values])
drop_inds = m_inds[:,0]<m_inds[:,1]
df_d = df[drop_inds]
df_d = df_d.reorder_levels([2,0,1])
#%%
def fz(r):
return 0.5*(np.log((1+r)/(1-r)))
from scipy import stats
plt.figure(figsize=(4,3))
df = df_d.loc['s']
rs = []
for df in [df_d.loc['s'], df_d.loc['n']]:
r,p = (stats.spearmanr(df['dyn'], df['cor']**2))
rs.append(r)
plt.scatter(df['dyn'], df['cor']**2, s=1);plt.semilogx();
print(p)
plt.xlim(0.01, 1000)
plt.ylim(0,1.1)
plt.xlabel('Dynamic range')
plt.ylabel('$r^2_{ER}$')
plt.title('Trained $r=$' + str(np.round(rs[0],2)) +
', untrained $r=$' + str(np.round(rs[1],2)))
inds = []
plt.legend(['Trained', 'Untrained'])
df = df_d.loc['s']
for i, ind in enumerate([0,-12,-100, 0,-12, -100]):
if i<=2:
ranks = (df['cor'].rank() + df['dyn'].rank()).sort_values()[::-1]
else:
ranks = (df['cor'].rank() - df['dyn'].rank()).sort_values()[::-1]
u1, u2 = ranks.index.values[ind]
inds.append([u1,u2])
plt.scatter(df['dyn'][u1,u2], df['cor'][u1,u2]**2, s=10, c='r');plt.semilogx();
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
u1r = (da_sig.isel(unit=u1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2r = (da_sig.isel(unit=u2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u1])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u1r.var().values).round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u2])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u2r.var().values).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
plt.tight_layout()
plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u1, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u2, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.tight_layout()
#plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(4,8))
xticks = np.linspace(0, 180, 5)
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
u1r = da_sig.isel(unit=u1, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1r = (u1r**2).sum('phase')**0.5
u1r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticklabels([]);plt.xlabel('')
if j==1:
''#plt.legend(['0','90','180','270'], loc='lower right', title='Phase (deg)')
plt.gca().set_xticks(xticks)
if j==11:
plt.gca().set_xticklabels(np.round(xticks).astype(int))
plt.xlabel('Orientation (deg)')
plt.ylabel('Response')
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
u2r = da_sig.isel(unit=u2, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u2r = (u2r**2).sum('phase')**0.5
u2r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticks(xticks)
plt.gca().set_xticklabels([]);plt.xlabel('');
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
plt.tight_layout()
plt.savefig('example_filters_resp.pdf')
#%% matching stim for a given unit
for ind in inds:
plt.figure()
shifts = df.loc[u1,u2]
b_inds = np.unravel_index(np.argmax(u1r.values), np.shape(u1r))
u1_stim = da_stims.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
s_stim = da_stims.roll({'phase':int(shifts['phase_ind']), 'ori':int(shifts['ori_ind'])})
u2_match_stim = s_stim.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
plt.imshow(norm_im(u1_stim))
plt.figure()
plt.imshow(norm_im(u2_match_stim))
#%% now plot stim
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(6., 6.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(4, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.isel(sf=0).sel(ori=np.linspace(0, 180-180/8, 8),
phase=[0, 90, 180, 270], method='nearest').stack(c = ('phase', 'ori')).transpose('c', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
i,j = np.unravel_index(i, (4, 8))
if i==0:
ax.set_title(int(im.coords['c'].values.item()[1]))
if j==0:
ax.set_ylabel(int(im.coords['c'].values.item()[0]))
if i==0 and j==0:
ax.set_ylabel('phase (deg) \n' + str(int(im.coords['c'].values.item()[0])))
ax.set_title( 'ori (deg) \n' + str(int(im.coords['c'].values.item()[1])))
plt.savefig('example_stim_phase_ori.pdf')
#%%
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(8., 3.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(1, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.sel(ori=0,phase=0, method='nearest').transpose('sf', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
sf = im.coords['sf'].values.item()
ax.set_title(np.round(1/sf,1))
if i==0:
ax.set_title('Spatial Period\n ' + str(np.round(1/sf,1)) + str(' (pix)'))
plt.savefig('period.pdf')
#%%
plt.figure(figsize=(3,9))
plt.subplot(311)
f = norm_im(w[55])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Filter')
plt.subplot(312)
u1 = da_sig.isel(unit=55, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1.plot.line(x='ori')
plt.title('Response')
plt.legend([0, 90, 180, 270], title='phase', loc='lower left')
plt.subplot(313)
(((u1**2)**0.5).mean('phase')).plot.line(x='ori')
plt.title('Avg. Response magnitude across phase')
plt.tight_layout()
| window | identifier_name |
dyn_cor_rel.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 3 15:56:13 2020
@author: dean
"""
#functions to generate stimuli given parameter set
import os, sys
from tqdm import tqdm
import math
import numpy as np
import matplotlib.pyplot as plt
import torch
import xarray as xr
from itertools import product
from torchvision import models
import torch.nn as nn
import pandas as pd
def norm(x, dim):
x = x - x.mean(dim)
x = x/(x**2).sum(dim)**0.5
return x
def cor(x, y, dim):
y = norm(y, dim)
x = norm(x, dim)
r = x.dot(y, dim)
return r
def rc(x):
reverse = (slice(None, None, -1),) * x.ndim
return x[reverse].conj()
def auto_corr(x, y, dim, pad=None):
xn = norm(x, dim);
yn = norm(y, dim);
if pad is None:
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
else:
xn = np.pad(xn, pad_width=pad)
yn = np.pad(yn, pad_width=pad)
cor = np.fft.ifftn(np.fft.fftn(xn)*(np.fft.fftn(rc(yn))))
return cor
def pol2cart(rho, phi):
x = rho * np.cos(phi)
y = rho * np.sin(phi)
return(x, y)
def sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0):
|
def window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.zeros((int(nx), int(ny)))
w[d<=radius] = 1
return w
def cos_window(radius, x_0, y_0, nx, ny):
x_coords = np.arange(0, nx, dtype=np.float128) - x_0
y_coords = np.arange(0, ny, dtype=np.float128) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
d = (xx**2 + yy**2)**0.5
w = np.cos(d*np.pi*(1/radius)) + 1
w[d>radius] = 0
return w
def colorize(s, lum, by, rg):
s_c = s[..., np.newaxis]
l_by_rg = np.array([[1, 1, 1],
[1/3, 1/3, -2/3],
[1, -1, 0]]
)
l_by_rg = (l_by_rg/((l_by_rg**2).sum(1, keepdims=True)**0.5))
rgb = l_by_rg[0]*lum + l_by_rg[1]*by + l_by_rg[2]*rg
s_c = s_c*rgb[np.newaxis,np.newaxis]
return s_c
def sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0):
s = sinusoid_2d(nx, ny, x_0, y_0, sf, ori, phase, bg=0)
s_c = colorize(s, lum, by, rg)
return scale_im(s_c)
def sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lum, by, rg, bg=0,
sf2=None, rel_ori=None, phase2=None,
lum2=None, by2=None, rg2=None, make_window=False, radius=None):
if sf2 is None:
sf2 = sf
if rel_ori is None:
rel_ori = 0
if phase2 is None:
phase2 = phase
if lum2 is None:
lum2 = lum
if by2 is None:
by2 = by
if rg2 is None:
rg2 = rg
s1 = sine_chrom(nx, ny, x_0, y_0, sf, ori, phase, lum, by, rg, bg=0)
s2 = sine_chrom(nx, ny, x_0, y_0, sf2, ori+rel_ori, phase2,
lum2, by2, rg2)
s = s1+s2
if make_window:
w = window(radius, x_0, y_0, nx, ny)
s = w[..., np.newaxis]*s
return s
def norm_im(im):
im = im - im.min()
im = im/im.max()
return im
def scale_im(im):
im = im - im.min()
im = 2*im/im.max()
im = im-1
return im
mod = models.alexnet(pretrained=True).features[:1]
w = list(mod.parameters())[0].detach().numpy()
w_da = xr.DataArray(w, dims=('unit', 'channel', 'row', 'col'))
n_units = w.shape[0]
w_da_noise = w_da.copy(deep=True)
w_da_noise[...] = np.random.normal(size=w_da.shape, scale=0.1)
#%%
nx = ny = 11
stims = []
ori = list(np.linspace(0, 180-180/64, 64))
phase = list(np.linspace(0, 360-360/8, 8))
sf = list(np.logspace(np.log10(0.1), np.log10(.25), 8))
contrast = [1,]
lbr = [1,0,0]
make_window = False
param_nms = ['ori', 'sf', 'phase']
params = [ori, sf, phase]
for i, p in enumerate(params):
if not type(p) is list:
params[i] = [p,]
cart_prod_params = np.array(list(product(*params)))
da = xr.DataArray(np.zeros(tuple(len(p) for p in params )),
dims=param_nms,
coords=params )
da_stims = da.squeeze(drop=True).expand_dims({'row':range(11),
'col':range(11),
'channel':range(3)})
da_stims = da_stims.transpose('ori', 'sf', 'phase', 'row', 'col', 'channel').copy()
x_0 = y_0 = 5
stim =[]
for p in (cart_prod_params):
#plt.figure()
ori, sf, phase = p
im = sine_chrom_dual(nx, ny, x_0, y_0, sf, ori, phase,
lbr[0], lbr[1], lbr[2], bg=0,
sf2=sf, rel_ori=0, phase2=phase,
lum2=lbr[0], by2=lbr[1], rg2=lbr[2],
make_window=make_window)
#plt.imshow(norm_im(w[...,np.newaxis]*im))
stim.append(im)
stims.append(stim)
stims = np.array(stims).squeeze()
rs = []
for stim, param in zip(stims, cart_prod_params):
ori, sf, phase = param
da_stims.loc[ori, sf, phase] = stim.copy()
#%%
w_da = w_da/(w_da**2).sum(('channel', 'row', 'col'))**0.5
#da_stims = da_stims/(da_stims**2).sum(('channel', 'row', 'col'))**0.5
da_sig = da_stims.dot(w_da)
da_noise = da_stims.dot(w_da_noise)
#%%
n_units = len(da_sig.coords['unit'].values)
unit_coords = list(product(range(n_units),range(n_units),))
mod_cor = xr.DataArray(np.zeros((n_units, n_units, 4, 2)),
dims=['unit_r', 'unit_c', 'vars', 'sn'],
coords=[range(n_units), range(n_units),
['cor', 'dyn', 'ori_ind', 'phase_ind'],
['s','n']])
sf_ind = 0
dim=('phase', 'ori')
for ind1, ind2 in tqdm((unit_coords)):
for i, da in enumerate([da_sig, da_noise]):
u1 = (da.isel(unit=ind1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2 = (da.isel(unit=ind2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
corr = auto_corr(u1, u2, dim=('ori'), pad=None)
r = np.max(np.real(corr))
mod_cor[ind1, ind2, 0, i] = r
mod_cor[ind1, ind2, 1, i] = u1.std()*u2.std()
mod_cor[ind1, ind2, 2:, i] = np.array(np.unravel_index(np.argmax(corr), corr.shape))
#%%
dfs = [mod_cor[...,i,:].to_dataframe(name=str(mod_cor.coords['vars'][i].values)).drop('vars', axis=1)
for i in range(len(mod_cor.coords['vars']))]
df = pd.concat(dfs, 1)
m_inds = np.array([np.array(a) for a in df.index.values])
drop_inds = m_inds[:,0]<m_inds[:,1]
df_d = df[drop_inds]
df_d = df_d.reorder_levels([2,0,1])
#%%
def fz(r):
return 0.5*(np.log((1+r)/(1-r)))
from scipy import stats
plt.figure(figsize=(4,3))
df = df_d.loc['s']
rs = []
for df in [df_d.loc['s'], df_d.loc['n']]:
r,p = (stats.spearmanr(df['dyn'], df['cor']**2))
rs.append(r)
plt.scatter(df['dyn'], df['cor']**2, s=1);plt.semilogx();
print(p)
plt.xlim(0.01, 1000)
plt.ylim(0,1.1)
plt.xlabel('Dynamic range')
plt.ylabel('$r^2_{ER}$')
plt.title('Trained $r=$' + str(np.round(rs[0],2)) +
', untrained $r=$' + str(np.round(rs[1],2)))
inds = []
plt.legend(['Trained', 'Untrained'])
df = df_d.loc['s']
for i, ind in enumerate([0,-12,-100, 0,-12, -100]):
if i<=2:
ranks = (df['cor'].rank() + df['dyn'].rank()).sort_values()[::-1]
else:
ranks = (df['cor'].rank() - df['dyn'].rank()).sort_values()[::-1]
u1, u2 = ranks.index.values[ind]
inds.append([u1,u2])
plt.scatter(df['dyn'][u1,u2], df['cor'][u1,u2]**2, s=10, c='r');plt.semilogx();
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
u1r = (da_sig.isel(unit=u1, sf=sf_ind).squeeze()**2).sum('phase')**0.5
u2r = (da_sig.isel(unit=u2, sf=sf_ind).squeeze()**2).sum('phase')**0.5
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u1])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u1r.var().values).round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
f = norm_im(w[u2])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str((u2r.var().values).round(2)))
#plt.imshow(f.mean(0), cmap='gray')
plt.tight_layout()
plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(3,8))
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u1, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
j+=1
plt.subplot(6, 2, j)
a = da_sig.isel(unit=u2, sf=sf_ind)
sta = (da_stims.isel(sf=sf_ind)*a).mean(('ori', 'phase'))
plt.imshow(norm_im(sta));plt.xticks([]);plt.yticks([])
plt.tight_layout()
#plt.savefig('example_filters.pdf')
#%%
j=0
plt.figure(figsize=(4,8))
xticks = np.linspace(0, 180, 5)
for ind in inds:
u1, u2 = ind
j+=1
plt.subplot(6, 2, j)
u1r = da_sig.isel(unit=u1, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1r = (u1r**2).sum('phase')**0.5
u1r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticklabels([]);plt.xlabel('')
if j==1:
''#plt.legend(['0','90','180','270'], loc='lower right', title='Phase (deg)')
plt.gca().set_xticks(xticks)
if j==11:
plt.gca().set_xticklabels(np.round(xticks).astype(int))
plt.xlabel('Orientation (deg)')
plt.ylabel('Response')
#plt.imshow(f.mean(0), cmap='gray')
j+=1
plt.subplot(6, 2, j)
u2r = da_sig.isel(unit=u2, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u2r = (u2r**2).sum('phase')**0.5
u2r.plot.line(x='ori', add_legend=False);plt.title('');plt.gca().set_xticks(xticks)
plt.gca().set_xticklabels([]);plt.xlabel('');
plt.title('Dyn='+str(df['dyn'][u1,u2].round(2)) +
', $r_{ER}^2$=' + str((df['cor'][u1,u2]**2).round(2)))
plt.tight_layout()
plt.savefig('example_filters_resp.pdf')
#%% matching stim for a given unit
for ind in inds:
plt.figure()
shifts = df.loc[u1,u2]
b_inds = np.unravel_index(np.argmax(u1r.values), np.shape(u1r))
u1_stim = da_stims.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
s_stim = da_stims.roll({'phase':int(shifts['phase_ind']), 'ori':int(shifts['ori_ind'])})
u2_match_stim = s_stim.isel(sf=sf_ind, ori=b_inds[0], phase=b_inds[1])
plt.imshow(norm_im(u1_stim))
plt.figure()
plt.imshow(norm_im(u2_match_stim))
#%% now plot stim
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(6., 6.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(4, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.isel(sf=0).sel(ori=np.linspace(0, 180-180/8, 8),
phase=[0, 90, 180, 270], method='nearest').stack(c = ('phase', 'ori')).transpose('c', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
i,j = np.unravel_index(i, (4, 8))
if i==0:
ax.set_title(int(im.coords['c'].values.item()[1]))
if j==0:
ax.set_ylabel(int(im.coords['c'].values.item()[0]))
if i==0 and j==0:
ax.set_ylabel('phase (deg) \n' + str(int(im.coords['c'].values.item()[0])))
ax.set_title( 'ori (deg) \n' + str(int(im.coords['c'].values.item()[1])))
plt.savefig('example_stim_phase_ori.pdf')
#%%
from mpl_toolkits.axes_grid1 import ImageGrid
fig = plt.figure(figsize=(8., 3.))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols=(1, 8), # creates 2x2 grid of axes
axes_pad=0.05, # pad between axes in inch.
)
ims = da_stims.sel(ori=0,phase=0, method='nearest').transpose('sf', 'row', 'col', 'channel')
for i, ax, im in zip(range(len(grid)), grid, ims):
# Iterating over the grid returns the Axes.
ax.imshow(norm_im(im));ax.set_xticks([]);ax.set_yticks([]);
sf = im.coords['sf'].values.item()
ax.set_title(np.round(1/sf,1))
if i==0:
ax.set_title('Spatial Period\n ' + str(np.round(1/sf,1)) + str(' (pix)'))
plt.savefig('period.pdf')
#%%
plt.figure(figsize=(3,9))
plt.subplot(311)
f = norm_im(w[55])
plt.imshow(np.transpose(f, (1,2,0)));plt.xticks([]);plt.yticks([])
plt.title('Filter')
plt.subplot(312)
u1 = da_sig.isel(unit=55, sf=sf_ind).sel( phase=[0, 90, 180, 270], method='nearest')
u1.plot.line(x='ori')
plt.title('Response')
plt.legend([0, 90, 180, 270], title='phase', loc='lower left')
plt.subplot(313)
(((u1**2)**0.5).mean('phase')).plot.line(x='ori')
plt.title('Avg. Response magnitude across phase')
plt.tight_layout()
| x_coords = np.arange(0, nx, dtype=np.float64) - x_0
y_coords = np.arange(0, ny, dtype=np.float64) - y_0
xx, yy = np.meshgrid(x_coords, y_coords)
mu_0, nu_0 = pol2cart(sf, np.deg2rad(ori + 90))
s = np.sin(2*np.pi*(mu_0*xx + nu_0*yy) + np.deg2rad(phase + 90))
s = s + bg
return s | identifier_body |
Run_all_models_modified.py | from __future__ import print_function
import os
import sys
import logging
import numpy as np
import matplotlib.pyplot as plt
from time import time
from optparse import OptionParser
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_selection import SelectFromModel
from sklearn.feature_selection import SelectKBest
from sklearn.linear_model import RidgeClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import Perceptron
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.naive_bayes import BernoulliNB
from sklearn.naive_bayes import ComplementNB
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn import metrics
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
# output logs to stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# show how to call hashing function
op = OptionParser()
op.add_option("--use_hashing",
action="store_true",
help="Use a hashing vectorizer.")
op.add_option("--n_features",
action="store", type=int, default=2 ** 16,
help="n_features when using the hashing vectorizer.")
def is_interactive():
return not hasattr(sys.modules['__main__'], '__file__')
argv = [] if is_interactive() else sys.argv[1:]
(opts, args) = op.parse_args(argv)
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
print(__doc__)
op.print_help()
print()
#============================================================================
# Read custom data
#============================================================================
polarity = []
user_tweets = []
balance_classes = 1 # if 0, it only trims class 4, else trims all classes to size cls_num_ext
cls_0_num = 0
cls_1_num = 0
cls_2_num = 0
cls_3_num = 0
cls_4_num = 0
cls_num_ext = 500
num_ext = 0
with open("Categorized_User_Polarity.txt") as inp:
for line in inp:
values = line.split("\t") # id, polarity
user_id = values[0]
user_file = "tokens_lines_test/" + user_id
if os.path.isfile(user_file): # not all user IDs had tweets in the master tweet file
if not balance_classes:
if int(values[1]) == 4:
if num_ext < 2000:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
num_ext += 1
else:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
else:
if int(values[1]) == 0:
if cls_0_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_0_num += 1
if int(values[1]) == 0:
if cls_1_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_1_num += 1
if int(values[1]) == 0:
if cls_2_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_2_num += 1
if int(values[1]) == 0:
if cls_3_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_3_num += 1
else: # class 4
if cls_4_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_4_num += 1
#============================================================================
# Split into training and testing sets
#============================================================================
# X y % data used for testing
raw_X_train, raw_X_test, raw_y_train, raw_y_test = train_test_split(user_tweets, polarity, test_size=0.2)
categories = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train = Bunch()
data_train.data = raw_X_train
data_train.target_names = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train.target = raw_y_train
data_test = Bunch()
data_test.data = raw_X_test
data_test.target = raw_y_test
target_names = data_train.target_names # Note: order of labels in `target_names` can be different from `categories`
print('data loaded')
#============================================================================
# Create target vectors
#============================================================================
y_train, y_test = data_train.target, data_test.target
init_time = time()
if opts.use_hashing:
print("Using hashing vectorizer")
vectorizer = HashingVectorizer(stop_words='english', alternate_sign=False, n_features=opts.n_features)
X_train = vectorizer.transform(data_train.data)
else:
print("Using tfidf vectorizer")
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5, stop_words='english')
X_train = vectorizer.fit_transform(data_train.data)
print("n_samples: %d, n_features: %d" % X_train.shape)
print()
print("Extracting features from the test set")
init_time = time()
X_test = vectorizer.transform(data_test.data)
print("n_samples: %d, n_features: %d" % X_test.shape)
print()
# mapping from word to a string of tokens
if opts.use_hashing:
names_of_features = None
else:
names_of_features = vectorizer.get_feature_names()
if names_of_features:
names_of_features = np.asarray(names_of_features)
def trim(s):
|
#============================================================================
# Benchmark classifiers
#============================================================================
def benchmark(clf):
print('_' * 80)
print("Starting training: ")
print(clf)
init_time = time()
clf.fit(X_train, y_train)
time_to_train = time() - init_time
print("train time: %0.3fs" % time_to_train)
print("Starting testing: ")
init_time = time()
prediction = clf.predict(X_test)
time_to_test = time() - init_time
print("test time: %0.3fs" % time_to_test)
print("Scoring the model: ")
accuracy = metrics.accuracy_score(y_test, prediction)
print("accuracy: %0.3f" % accuracy)
clf_descr = str(clf).split('(')[0]
return clf_descr, accuracy, time_to_train, time_to_test
all_model_results = []
for clf, name in (
(RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
(Perceptron(max_iter=50, tol=1e-3), "Perceptron"),
(PassiveAggressiveClassifier(max_iter=50, tol=1e-3), "Passive-Aggressive"),
(KNeighborsClassifier(n_neighbors=10), "kNN"),
(RandomForestClassifier(n_estimators=100), "Random forest")):
print('=' * 80)
print(name)
all_model_results.append(benchmark(clf))
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
all_model_results.append(benchmark(LinearSVC(penalty=penalty, dual=False, tol=1e-3))) # Create and train lib-linear models
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty=penalty))) # Create and train stochastic gradient models
print('=' * 80)
print("SGD with Elastic-Net")
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty="elasticnet"))) # Create and train SGD w/ elastic penalty
print('=' * 80)
print("Nearest Centroid")
all_model_results.append(benchmark(NearestCentroid())) # Train NearestCentroid without threshold
print('=' * 80)
print("Naive Bayes (multinomial, bernoulli, and complement)")
all_model_results.append(benchmark(MultinomialNB(alpha=.01))) # Train sparse Naive Bayes classifiers
all_model_results.append(benchmark(BernoulliNB(alpha=.01)))
all_model_results.append(benchmark(ComplementNB(alpha=.1)))
print('=' * 80)
print("Linear SVC with l1")
all_model_results.append(benchmark(Pipeline([('feature_selection', SelectFromModel(LinearSVC(penalty="l1", dual=False, tol=1e-3))),
('classification', LinearSVC(penalty="l2"))])))
# ==============================
# Plotting Accuracies Of All Models
# ==============================
idx = np.arange(len(all_model_results))
all_model_results = [[x[i] for x in all_model_results] for i in range(4)]
all_clfs, accuracy, time_for_training, time_to_test = all_model_results
time_for_training = np.array(time_for_training) / np.max(time_for_training)
time_to_test = np.array(time_to_test) / np.max(time_to_test)
plt.figure(figsize=(10, 10))
plt.title("Classifier Accuracy")
plt.barh(idx, accuracy, .2, label="Accuracy", color='navy')
plt.barh(idx + .3, time_for_training, .2, label="Training Time", color='red')
plt.barh(idx + .6, time_to_test, .2, label="Test Time", color='darkorange')
plt.yticks(())
plt.legend(loc='best')
plt.subplots_adjust(left=.25)
plt.subplots_adjust(top=.95)
plt.subplots_adjust(bottom=.05)
for i, c in zip(idx, all_clfs):
plt.text(-.3, i, c)
plt.show()
| return s if len(s) <= 80 else s[:77] + "..." | identifier_body |
Run_all_models_modified.py | from __future__ import print_function
import os
import sys
import logging
import numpy as np
import matplotlib.pyplot as plt
from time import time
from optparse import OptionParser
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_selection import SelectFromModel
from sklearn.feature_selection import SelectKBest
from sklearn.linear_model import RidgeClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import Perceptron
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.naive_bayes import BernoulliNB
from sklearn.naive_bayes import ComplementNB
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn import metrics
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
# output logs to stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# show how to call hashing function
op = OptionParser()
op.add_option("--use_hashing",
action="store_true",
help="Use a hashing vectorizer.")
op.add_option("--n_features",
action="store", type=int, default=2 ** 16,
help="n_features when using the hashing vectorizer.")
def is_interactive():
return not hasattr(sys.modules['__main__'], '__file__')
argv = [] if is_interactive() else sys.argv[1:]
(opts, args) = op.parse_args(argv)
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
print(__doc__)
op.print_help()
print()
#============================================================================
# Read custom data
#============================================================================
polarity = []
user_tweets = []
balance_classes = 1 # if 0, it only trims class 4, else trims all classes to size cls_num_ext
cls_0_num = 0
cls_1_num = 0
cls_2_num = 0
cls_3_num = 0
cls_4_num = 0
cls_num_ext = 500
num_ext = 0
with open("Categorized_User_Polarity.txt") as inp:
for line in inp:
values = line.split("\t") # id, polarity
user_id = values[0]
user_file = "tokens_lines_test/" + user_id
if os.path.isfile(user_file): # not all user IDs had tweets in the master tweet file
if not balance_classes:
if int(values[1]) == 4:
if num_ext < 2000:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
num_ext += 1
else:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
else:
if int(values[1]) == 0:
if cls_0_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_0_num += 1
if int(values[1]) == 0:
if cls_1_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_1_num += 1
if int(values[1]) == 0:
if cls_2_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_2_num += 1
if int(values[1]) == 0:
if cls_3_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_3_num += 1
else: # class 4
if cls_4_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_4_num += 1
#============================================================================
# Split into training and testing sets
#============================================================================
# X y % data used for testing
raw_X_train, raw_X_test, raw_y_train, raw_y_test = train_test_split(user_tweets, polarity, test_size=0.2)
categories = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train = Bunch()
data_train.data = raw_X_train
data_train.target_names = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train.target = raw_y_train
data_test = Bunch()
data_test.data = raw_X_test
data_test.target = raw_y_test
target_names = data_train.target_names # Note: order of labels in `target_names` can be different from `categories`
print('data loaded')
#============================================================================
# Create target vectors
#============================================================================
y_train, y_test = data_train.target, data_test.target
init_time = time()
if opts.use_hashing:
print("Using hashing vectorizer")
vectorizer = HashingVectorizer(stop_words='english', alternate_sign=False, n_features=opts.n_features)
X_train = vectorizer.transform(data_train.data)
else:
print("Using tfidf vectorizer")
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5, stop_words='english')
X_train = vectorizer.fit_transform(data_train.data)
print("n_samples: %d, n_features: %d" % X_train.shape)
print()
print("Extracting features from the test set")
init_time = time()
X_test = vectorizer.transform(data_test.data)
print("n_samples: %d, n_features: %d" % X_test.shape)
print()
# mapping from word to a string of tokens
if opts.use_hashing:
names_of_features = None
else:
|
if names_of_features:
names_of_features = np.asarray(names_of_features)
def trim(s):
return s if len(s) <= 80 else s[:77] + "..."
#============================================================================
# Benchmark classifiers
#============================================================================
def benchmark(clf):
print('_' * 80)
print("Starting training: ")
print(clf)
init_time = time()
clf.fit(X_train, y_train)
time_to_train = time() - init_time
print("train time: %0.3fs" % time_to_train)
print("Starting testing: ")
init_time = time()
prediction = clf.predict(X_test)
time_to_test = time() - init_time
print("test time: %0.3fs" % time_to_test)
print("Scoring the model: ")
accuracy = metrics.accuracy_score(y_test, prediction)
print("accuracy: %0.3f" % accuracy)
clf_descr = str(clf).split('(')[0]
return clf_descr, accuracy, time_to_train, time_to_test
all_model_results = []
for clf, name in (
(RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
(Perceptron(max_iter=50, tol=1e-3), "Perceptron"),
(PassiveAggressiveClassifier(max_iter=50, tol=1e-3), "Passive-Aggressive"),
(KNeighborsClassifier(n_neighbors=10), "kNN"),
(RandomForestClassifier(n_estimators=100), "Random forest")):
print('=' * 80)
print(name)
all_model_results.append(benchmark(clf))
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
all_model_results.append(benchmark(LinearSVC(penalty=penalty, dual=False, tol=1e-3))) # Create and train lib-linear models
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty=penalty))) # Create and train stochastic gradient models
print('=' * 80)
print("SGD with Elastic-Net")
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty="elasticnet"))) # Create and train SGD w/ elastic penalty
print('=' * 80)
print("Nearest Centroid")
all_model_results.append(benchmark(NearestCentroid())) # Train NearestCentroid without threshold
print('=' * 80)
print("Naive Bayes (multinomial, bernoulli, and complement)")
all_model_results.append(benchmark(MultinomialNB(alpha=.01))) # Train sparse Naive Bayes classifiers
all_model_results.append(benchmark(BernoulliNB(alpha=.01)))
all_model_results.append(benchmark(ComplementNB(alpha=.1)))
print('=' * 80)
print("Linear SVC with l1")
all_model_results.append(benchmark(Pipeline([('feature_selection', SelectFromModel(LinearSVC(penalty="l1", dual=False, tol=1e-3))),
('classification', LinearSVC(penalty="l2"))])))
# ==============================
# Plotting Accuracies Of All Models
# ==============================
idx = np.arange(len(all_model_results))
all_model_results = [[x[i] for x in all_model_results] for i in range(4)]
all_clfs, accuracy, time_for_training, time_to_test = all_model_results
time_for_training = np.array(time_for_training) / np.max(time_for_training)
time_to_test = np.array(time_to_test) / np.max(time_to_test)
plt.figure(figsize=(10, 10))
plt.title("Classifier Accuracy")
plt.barh(idx, accuracy, .2, label="Accuracy", color='navy')
plt.barh(idx + .3, time_for_training, .2, label="Training Time", color='red')
plt.barh(idx + .6, time_to_test, .2, label="Test Time", color='darkorange')
plt.yticks(())
plt.legend(loc='best')
plt.subplots_adjust(left=.25)
plt.subplots_adjust(top=.95)
plt.subplots_adjust(bottom=.05)
for i, c in zip(idx, all_clfs):
plt.text(-.3, i, c)
plt.show()
| names_of_features = vectorizer.get_feature_names() | conditional_block |
Run_all_models_modified.py | from __future__ import print_function
import os
import sys
import logging
import numpy as np
import matplotlib.pyplot as plt
from time import time
from optparse import OptionParser
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_selection import SelectFromModel
from sklearn.feature_selection import SelectKBest
from sklearn.linear_model import RidgeClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import Perceptron
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.naive_bayes import BernoulliNB
from sklearn.naive_bayes import ComplementNB
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn import metrics
class | (object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
# output logs to stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# show how to call hashing function
op = OptionParser()
op.add_option("--use_hashing",
action="store_true",
help="Use a hashing vectorizer.")
op.add_option("--n_features",
action="store", type=int, default=2 ** 16,
help="n_features when using the hashing vectorizer.")
def is_interactive():
return not hasattr(sys.modules['__main__'], '__file__')
argv = [] if is_interactive() else sys.argv[1:]
(opts, args) = op.parse_args(argv)
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
print(__doc__)
op.print_help()
print()
#============================================================================
# Read custom data
#============================================================================
polarity = []
user_tweets = []
balance_classes = 1 # if 0, it only trims class 4, else trims all classes to size cls_num_ext
cls_0_num = 0
cls_1_num = 0
cls_2_num = 0
cls_3_num = 0
cls_4_num = 0
cls_num_ext = 500
num_ext = 0
with open("Categorized_User_Polarity.txt") as inp:
for line in inp:
values = line.split("\t") # id, polarity
user_id = values[0]
user_file = "tokens_lines_test/" + user_id
if os.path.isfile(user_file): # not all user IDs had tweets in the master tweet file
if not balance_classes:
if int(values[1]) == 4:
if num_ext < 2000:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
num_ext += 1
else:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
else:
if int(values[1]) == 0:
if cls_0_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_0_num += 1
if int(values[1]) == 0:
if cls_1_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_1_num += 1
if int(values[1]) == 0:
if cls_2_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_2_num += 1
if int(values[1]) == 0:
if cls_3_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_3_num += 1
else: # class 4
if cls_4_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_4_num += 1
#============================================================================
# Split into training and testing sets
#============================================================================
# X y % data used for testing
raw_X_train, raw_X_test, raw_y_train, raw_y_test = train_test_split(user_tweets, polarity, test_size=0.2)
categories = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train = Bunch()
data_train.data = raw_X_train
data_train.target_names = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train.target = raw_y_train
data_test = Bunch()
data_test.data = raw_X_test
data_test.target = raw_y_test
target_names = data_train.target_names # Note: order of labels in `target_names` can be different from `categories`
print('data loaded')
#============================================================================
# Create target vectors
#============================================================================
y_train, y_test = data_train.target, data_test.target
init_time = time()
if opts.use_hashing:
print("Using hashing vectorizer")
vectorizer = HashingVectorizer(stop_words='english', alternate_sign=False, n_features=opts.n_features)
X_train = vectorizer.transform(data_train.data)
else:
print("Using tfidf vectorizer")
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5, stop_words='english')
X_train = vectorizer.fit_transform(data_train.data)
print("n_samples: %d, n_features: %d" % X_train.shape)
print()
print("Extracting features from the test set")
init_time = time()
X_test = vectorizer.transform(data_test.data)
print("n_samples: %d, n_features: %d" % X_test.shape)
print()
# mapping from word to a string of tokens
if opts.use_hashing:
names_of_features = None
else:
names_of_features = vectorizer.get_feature_names()
if names_of_features:
names_of_features = np.asarray(names_of_features)
def trim(s):
return s if len(s) <= 80 else s[:77] + "..."
#============================================================================
# Benchmark classifiers
#============================================================================
def benchmark(clf):
print('_' * 80)
print("Starting training: ")
print(clf)
init_time = time()
clf.fit(X_train, y_train)
time_to_train = time() - init_time
print("train time: %0.3fs" % time_to_train)
print("Starting testing: ")
init_time = time()
prediction = clf.predict(X_test)
time_to_test = time() - init_time
print("test time: %0.3fs" % time_to_test)
print("Scoring the model: ")
accuracy = metrics.accuracy_score(y_test, prediction)
print("accuracy: %0.3f" % accuracy)
clf_descr = str(clf).split('(')[0]
return clf_descr, accuracy, time_to_train, time_to_test
all_model_results = []
for clf, name in (
(RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
(Perceptron(max_iter=50, tol=1e-3), "Perceptron"),
(PassiveAggressiveClassifier(max_iter=50, tol=1e-3), "Passive-Aggressive"),
(KNeighborsClassifier(n_neighbors=10), "kNN"),
(RandomForestClassifier(n_estimators=100), "Random forest")):
print('=' * 80)
print(name)
all_model_results.append(benchmark(clf))
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
all_model_results.append(benchmark(LinearSVC(penalty=penalty, dual=False, tol=1e-3))) # Create and train lib-linear models
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty=penalty))) # Create and train stochastic gradient models
print('=' * 80)
print("SGD with Elastic-Net")
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty="elasticnet"))) # Create and train SGD w/ elastic penalty
print('=' * 80)
print("Nearest Centroid")
all_model_results.append(benchmark(NearestCentroid())) # Train NearestCentroid without threshold
print('=' * 80)
print("Naive Bayes (multinomial, bernoulli, and complement)")
all_model_results.append(benchmark(MultinomialNB(alpha=.01))) # Train sparse Naive Bayes classifiers
all_model_results.append(benchmark(BernoulliNB(alpha=.01)))
all_model_results.append(benchmark(ComplementNB(alpha=.1)))
print('=' * 80)
print("Linear SVC with l1")
all_model_results.append(benchmark(Pipeline([('feature_selection', SelectFromModel(LinearSVC(penalty="l1", dual=False, tol=1e-3))),
('classification', LinearSVC(penalty="l2"))])))
# ==============================
# Plotting Accuracies Of All Models
# ==============================
idx = np.arange(len(all_model_results))
all_model_results = [[x[i] for x in all_model_results] for i in range(4)]
all_clfs, accuracy, time_for_training, time_to_test = all_model_results
time_for_training = np.array(time_for_training) / np.max(time_for_training)
time_to_test = np.array(time_to_test) / np.max(time_to_test)
plt.figure(figsize=(10, 10))
plt.title("Classifier Accuracy")
plt.barh(idx, accuracy, .2, label="Accuracy", color='navy')
plt.barh(idx + .3, time_for_training, .2, label="Training Time", color='red')
plt.barh(idx + .6, time_to_test, .2, label="Test Time", color='darkorange')
plt.yticks(())
plt.legend(loc='best')
plt.subplots_adjust(left=.25)
plt.subplots_adjust(top=.95)
plt.subplots_adjust(bottom=.05)
for i, c in zip(idx, all_clfs):
plt.text(-.3, i, c)
plt.show()
| Bunch | identifier_name |
Run_all_models_modified.py | from __future__ import print_function
import os
import sys
import logging
import numpy as np
import matplotlib.pyplot as plt
from time import time
from optparse import OptionParser
from sklearn.feature_extraction.text import TfidfVectorizer | from sklearn.feature_selection import SelectFromModel
from sklearn.feature_selection import SelectKBest
from sklearn.linear_model import RidgeClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import Perceptron
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.naive_bayes import BernoulliNB
from sklearn.naive_bayes import ComplementNB
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn import metrics
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
# output logs to stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# show how to call hashing function
op = OptionParser()
op.add_option("--use_hashing",
action="store_true",
help="Use a hashing vectorizer.")
op.add_option("--n_features",
action="store", type=int, default=2 ** 16,
help="n_features when using the hashing vectorizer.")
def is_interactive():
return not hasattr(sys.modules['__main__'], '__file__')
argv = [] if is_interactive() else sys.argv[1:]
(opts, args) = op.parse_args(argv)
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
print(__doc__)
op.print_help()
print()
#============================================================================
# Read custom data
#============================================================================
polarity = []
user_tweets = []
balance_classes = 1 # if 0, it only trims class 4, else trims all classes to size cls_num_ext
cls_0_num = 0
cls_1_num = 0
cls_2_num = 0
cls_3_num = 0
cls_4_num = 0
cls_num_ext = 500
num_ext = 0
with open("Categorized_User_Polarity.txt") as inp:
for line in inp:
values = line.split("\t") # id, polarity
user_id = values[0]
user_file = "tokens_lines_test/" + user_id
if os.path.isfile(user_file): # not all user IDs had tweets in the master tweet file
if not balance_classes:
if int(values[1]) == 4:
if num_ext < 2000:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
num_ext += 1
else:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
else:
if int(values[1]) == 0:
if cls_0_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_0_num += 1
if int(values[1]) == 0:
if cls_1_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_1_num += 1
if int(values[1]) == 0:
if cls_2_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_2_num += 1
if int(values[1]) == 0:
if cls_3_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_3_num += 1
else: # class 4
if cls_4_num < cls_num_ext:
polarity.append(int(values[1])) # save the polarity
with open(user_file, 'r') as inp: # save the
user_tweets.append(inp.read())
cls_4_num += 1
#============================================================================
# Split into training and testing sets
#============================================================================
# X y % data used for testing
raw_X_train, raw_X_test, raw_y_train, raw_y_test = train_test_split(user_tweets, polarity, test_size=0.2)
categories = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train = Bunch()
data_train.data = raw_X_train
data_train.target_names = ['far_left','mid_left','neutral','mid_right','far_right'] # for 5 classes
data_train.target = raw_y_train
data_test = Bunch()
data_test.data = raw_X_test
data_test.target = raw_y_test
target_names = data_train.target_names # Note: order of labels in `target_names` can be different from `categories`
print('data loaded')
#============================================================================
# Create target vectors
#============================================================================
y_train, y_test = data_train.target, data_test.target
init_time = time()
if opts.use_hashing:
print("Using hashing vectorizer")
vectorizer = HashingVectorizer(stop_words='english', alternate_sign=False, n_features=opts.n_features)
X_train = vectorizer.transform(data_train.data)
else:
print("Using tfidf vectorizer")
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5, stop_words='english')
X_train = vectorizer.fit_transform(data_train.data)
print("n_samples: %d, n_features: %d" % X_train.shape)
print()
print("Extracting features from the test set")
init_time = time()
X_test = vectorizer.transform(data_test.data)
print("n_samples: %d, n_features: %d" % X_test.shape)
print()
# mapping from word to a string of tokens
if opts.use_hashing:
names_of_features = None
else:
names_of_features = vectorizer.get_feature_names()
if names_of_features:
names_of_features = np.asarray(names_of_features)
def trim(s):
return s if len(s) <= 80 else s[:77] + "..."
#============================================================================
# Benchmark classifiers
#============================================================================
def benchmark(clf):
print('_' * 80)
print("Starting training: ")
print(clf)
init_time = time()
clf.fit(X_train, y_train)
time_to_train = time() - init_time
print("train time: %0.3fs" % time_to_train)
print("Starting testing: ")
init_time = time()
prediction = clf.predict(X_test)
time_to_test = time() - init_time
print("test time: %0.3fs" % time_to_test)
print("Scoring the model: ")
accuracy = metrics.accuracy_score(y_test, prediction)
print("accuracy: %0.3f" % accuracy)
clf_descr = str(clf).split('(')[0]
return clf_descr, accuracy, time_to_train, time_to_test
all_model_results = []
for clf, name in (
(RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
(Perceptron(max_iter=50, tol=1e-3), "Perceptron"),
(PassiveAggressiveClassifier(max_iter=50, tol=1e-3), "Passive-Aggressive"),
(KNeighborsClassifier(n_neighbors=10), "kNN"),
(RandomForestClassifier(n_estimators=100), "Random forest")):
print('=' * 80)
print(name)
all_model_results.append(benchmark(clf))
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
all_model_results.append(benchmark(LinearSVC(penalty=penalty, dual=False, tol=1e-3))) # Create and train lib-linear models
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty=penalty))) # Create and train stochastic gradient models
print('=' * 80)
print("SGD with Elastic-Net")
all_model_results.append(benchmark(SGDClassifier(alpha=.0001, max_iter=50, penalty="elasticnet"))) # Create and train SGD w/ elastic penalty
print('=' * 80)
print("Nearest Centroid")
all_model_results.append(benchmark(NearestCentroid())) # Train NearestCentroid without threshold
print('=' * 80)
print("Naive Bayes (multinomial, bernoulli, and complement)")
all_model_results.append(benchmark(MultinomialNB(alpha=.01))) # Train sparse Naive Bayes classifiers
all_model_results.append(benchmark(BernoulliNB(alpha=.01)))
all_model_results.append(benchmark(ComplementNB(alpha=.1)))
print('=' * 80)
print("Linear SVC with l1")
all_model_results.append(benchmark(Pipeline([('feature_selection', SelectFromModel(LinearSVC(penalty="l1", dual=False, tol=1e-3))),
('classification', LinearSVC(penalty="l2"))])))
# ==============================
# Plotting Accuracies Of All Models
# ==============================
idx = np.arange(len(all_model_results))
all_model_results = [[x[i] for x in all_model_results] for i in range(4)]
all_clfs, accuracy, time_for_training, time_to_test = all_model_results
time_for_training = np.array(time_for_training) / np.max(time_for_training)
time_to_test = np.array(time_to_test) / np.max(time_to_test)
plt.figure(figsize=(10, 10))
plt.title("Classifier Accuracy")
plt.barh(idx, accuracy, .2, label="Accuracy", color='navy')
plt.barh(idx + .3, time_for_training, .2, label="Training Time", color='red')
plt.barh(idx + .6, time_to_test, .2, label="Test Time", color='darkorange')
plt.yticks(())
plt.legend(loc='best')
plt.subplots_adjust(left=.25)
plt.subplots_adjust(top=.95)
plt.subplots_adjust(bottom=.05)
for i, c in zip(idx, all_clfs):
plt.text(-.3, i, c)
plt.show() | from sklearn.feature_extraction.text import HashingVectorizer | random_line_split |
pod_helper.go | package flytek8s
import (
"context"
"fmt"
"strings"
"time"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core/template"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/utils"
"github.com/flyteorg/flytestdlib/logger"
v1 "k8s.io/api/core/v1"
v12 "k8s.io/apimachinery/pkg/apis/meta/v1"
pluginsCore "github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/flytek8s/config"
)
const PodKind = "pod"
const OOMKilled = "OOMKilled"
const Interrupted = "Interrupted"
const SIGKILL = 137
// Updates the base pod spec used to execute tasks. This is configured with plugins and task metadata-specific options
func UpdatePod(taskExecutionMetadata pluginsCore.TaskExecutionMetadata,
resourceRequirements []v1.ResourceRequirements, podSpec *v1.PodSpec) {
if len(podSpec.RestartPolicy) == 0 {
podSpec.RestartPolicy = v1.RestartPolicyNever
}
podSpec.Tolerations = append(
GetPodTolerations(taskExecutionMetadata.IsInterruptible(), resourceRequirements...), podSpec.Tolerations...)
if len(podSpec.ServiceAccountName) == 0 {
podSpec.ServiceAccountName = taskExecutionMetadata.GetK8sServiceAccount()
}
if len(podSpec.SchedulerName) == 0 {
podSpec.SchedulerName = config.GetK8sPluginConfig().SchedulerName
}
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().DefaultNodeSelector)
if taskExecutionMetadata.IsInterruptible() {
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().InterruptibleNodeSelector)
}
if podSpec.Affinity == nil {
podSpec.Affinity = config.GetK8sPluginConfig().DefaultAffinity
}
}
func ToK8sPodSpec(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v1.PodSpec, error) {
task, err := tCtx.TaskReader().Read(ctx)
if err != nil {
logger.Warnf(ctx, "failed to read task information when trying to construct Pod, err: %s", err.Error())
return nil, err
}
if task.GetContainer() == nil {
logger.Errorf(ctx, "Default Pod creation logic works for default container in the task template only.")
return nil, fmt.Errorf("container not specified in task template")
}
c, err := ToK8sContainer(ctx, task.GetContainer(), task.Interface, template.Parameters{
Task: tCtx.TaskReader(),
Inputs: tCtx.InputReader(),
OutputPath: tCtx.OutputWriter(),
TaskExecMetadata: tCtx.TaskExecutionMetadata(),
})
if err != nil {
return nil, err
}
containers := []v1.Container{
*c,
}
pod := &v1.PodSpec{
Containers: containers,
}
UpdatePod(tCtx.TaskExecutionMetadata(), []v1.ResourceRequirements{c.Resources}, pod)
if err := AddCoPilotToPod(ctx, config.GetK8sPluginConfig().CoPilot, pod, task.GetInterface(), tCtx.TaskExecutionMetadata(), tCtx.InputReader(), tCtx.OutputWriter(), task.GetContainer().GetDataConfig()); err != nil {
return nil, err
}
return pod, nil
}
func BuildPodWithSpec(podSpec *v1.PodSpec) *v1.Pod {
pod := v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
Spec: *podSpec,
}
return &pod
}
func BuildIdentityPod() *v1.Pod {
return &v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
}
}
// Important considerations.
// Pending Status in Pod could be for various reasons and sometimes could signal a problem
// Case I: Pending because the Image pull is failing and it is backing off
// This could be transient. So we can actually rely on the failure reason.
// The failure transitions from ErrImagePull -> ImagePullBackoff
// Case II: Not enough resources are available. This is tricky. It could be that the total number of
// resources requested is beyond the capability of the system. for this we will rely on configuration
// and hence input gates. We should not allow bad requests that request for large number of resource through.
// In the case it makes through, we will fail after timeout
func DemystifyPending(status v1.PodStatus) (pluginsCore.PhaseInfo, error) {
// Search over the difference conditions in the status object. Note that the 'Pending' this function is
// demystifying is the 'phase' of the pod status. This is different than the PodReady condition type also used below
for _, c := range status.Conditions {
switch c.Type {
case v1.PodScheduled:
if c.Status == v1.ConditionFalse {
// Waiting to be scheduled. This usually refers to inability to acquire resources.
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
}
case v1.PodReasonUnschedulable:
// We Ignore case in which we are unable to find resources on the cluster. This is because
// - The resources may be not available at the moment, but may become available eventually
// The pod scheduler will keep on looking at this pod and trying to satisfy it.
//
// Pod status looks like this:
// message: '0/1 nodes are available: 1 Insufficient memory.'
// reason: Unschedulable
// status: "False"
// type: PodScheduled
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
case v1.PodReady:
if c.Status == v1.ConditionFalse {
// This happens in the case the image is having some problems. In the following example, K8s is having
// problems downloading an image. To ensure that, we will have to iterate over all the container statuses and
// find if some container has imagepull failure
// e.g.
// - lastProbeTime: null
// lastTransitionTime: 2018-12-18T00:57:30Z
// message: 'containers with unready status: [myapp-container]'
// reason: ContainersNotReady
// status: "False"
// type: Ready
//
// e.g. Container status
// - image: blah
// imageID: ""
// lastState: {}
// name: myapp-container
// ready: false
// restartCount: 0
// state:
// waiting:
// message: Back-off pulling image "blah"
// reason: ImagePullBackOff
for _, containerStatus := range status.ContainerStatuses {
if !containerStatus.Ready {
if containerStatus.State.Waiting != nil {
// There are a variety of reasons that can cause a pod to be in this waiting state.
// Waiting state may be legitimate when the container is being downloaded, started or init containers are running
reason := containerStatus.State.Waiting.Reason
finalReason := fmt.Sprintf("%s|%s", c.Reason, reason)
finalMessage := fmt.Sprintf("%s|%s", c.Message, containerStatus.State.Waiting.Message)
switch reason {
case "ErrImagePull", "ContainerCreating", "PodInitializing":
// But, there are only two "reasons" when a pod is successfully being created and hence it is in
// waiting state
// Refer to https://github.com/kubernetes/kubernetes/blob/master/pkg/kubelet/kubelet_pods.go
// and look for the default waiting states
// We also want to allow Image pulls to be retried, so ErrImagePull will be ignored
// as it eventually enters into ImagePullBackOff
// ErrImagePull -> Transitionary phase to ImagePullBackOff
// ContainerCreating -> Image is being downloaded
// PodInitializing -> Init containers are running
return pluginsCore.PhaseInfoInitializing(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("[%s]: %s", finalReason, finalMessage), &pluginsCore.TaskInfo{OccurredAt: &c.LastTransitionTime.Time}), nil
case "CreateContainerConfigError", "CreateContainerError":
// This happens if for instance the command to the container is incorrect, ie doesn't run
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
case "ImagePullBackOff":
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
default:
// Since we are not checking for all error states, we may end up perpetually
// in the queued state returned at the bottom of this function, until the Pod is reaped
// by K8s and we get elusive 'pod not found' errors
// So be default if the container is not waiting with the PodInitializing/ContainerCreating
// reasons, then we will assume a failure reason, and fail instantly
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoSystemRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
}
}
}
}
}
}
}
return pluginsCore.PhaseInfoQueued(time.Now(), pluginsCore.DefaultPhaseVersion, "Scheduling"), nil
}
func DemystifySuccess(status v1.PodStatus, info pluginsCore.TaskInfo) (pluginsCore.PhaseInfo, error) {
for _, status := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
if status.State.Terminated != nil && strings.Contains(status.State.Terminated.Reason, OOMKilled) {
return pluginsCore.PhaseInfoRetryableFailure("OOMKilled",
"Pod reported success despite being OOMKilled", &info), nil
}
}
return pluginsCore.PhaseInfoSuccess(&info), nil
}
func DeterminePrimaryContainerPhase(primaryContainerName string, statuses []v1.ContainerStatus, info *pluginsCore.TaskInfo) pluginsCore.PhaseInfo {
for _, s := range statuses {
if s.Name == primaryContainerName {
if s.State.Waiting != nil || s.State.Running != nil {
return pluginsCore.PhaseInfoRunning(pluginsCore.DefaultPhaseVersion, info)
}
if s.State.Terminated != nil {
if s.State.Terminated.ExitCode != 0 {
return pluginsCore.PhaseInfoRetryableFailure(
s.State.Terminated.Reason, s.State.Terminated.Message, info)
}
return pluginsCore.PhaseInfoSuccess(info)
}
}
}
// If for some reason we can't find the primary container, always just return a permanent failure
return pluginsCore.PhaseInfoFailure("PrimaryContainerMissing",
fmt.Sprintf("Primary container [%s] not found in pod's container statuses", primaryContainerName), info)
}
func ConvertPodFailureToError(status v1.PodStatus) (code, message string) {
code = "UnknownError"
message = "Pod failed. No message received from kubernetes."
if len(status.Reason) > 0 {
code = status.Reason
}
if len(status.Message) > 0 {
message = status.Message
}
for _, c := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
var containerState v1.ContainerState
if c.LastTerminationState.Terminated != nil {
containerState = c.LastTerminationState
} else if c.State.Terminated != nil {
containerState = c.State
}
if containerState.Terminated != nil {
if strings.Contains(c.State.Terminated.Reason, OOMKilled) {
code = OOMKilled
} else if containerState.Terminated.ExitCode == SIGKILL {
// in some setups, node termination sends SIGKILL to all the containers running on that node. Capturing and
// tagging that correctly.
code = Interrupted
}
if containerState.Terminated.ExitCode == 0 {
message += fmt.Sprintf("\r\n[%v] terminated with ExitCode 0.", c.Name) | message += fmt.Sprintf("\r\n[%v] terminated with exit code (%v). Reason [%v]. Message: \n%v.",
c.Name,
containerState.Terminated.ExitCode,
containerState.Terminated.Reason,
containerState.Terminated.Message)
}
}
}
return code, message
}
func GetLastTransitionOccurredAt(pod *v1.Pod) v12.Time {
var lastTransitionTime v12.Time
containerStatuses := append(pod.Status.ContainerStatuses, pod.Status.InitContainerStatuses...)
for _, containerStatus := range containerStatuses {
if r := containerStatus.LastTerminationState.Running; r != nil {
if r.StartedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
} else if r := containerStatus.LastTerminationState.Terminated; r != nil {
if r.FinishedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
}
}
if lastTransitionTime.IsZero() {
lastTransitionTime = v12.NewTime(time.Now())
}
return lastTransitionTime
} | } else { | random_line_split |
pod_helper.go | package flytek8s
import (
"context"
"fmt"
"strings"
"time"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core/template"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/utils"
"github.com/flyteorg/flytestdlib/logger"
v1 "k8s.io/api/core/v1"
v12 "k8s.io/apimachinery/pkg/apis/meta/v1"
pluginsCore "github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/flytek8s/config"
)
const PodKind = "pod"
const OOMKilled = "OOMKilled"
const Interrupted = "Interrupted"
const SIGKILL = 137
// Updates the base pod spec used to execute tasks. This is configured with plugins and task metadata-specific options
func UpdatePod(taskExecutionMetadata pluginsCore.TaskExecutionMetadata,
resourceRequirements []v1.ResourceRequirements, podSpec *v1.PodSpec) {
if len(podSpec.RestartPolicy) == 0 {
podSpec.RestartPolicy = v1.RestartPolicyNever
}
podSpec.Tolerations = append(
GetPodTolerations(taskExecutionMetadata.IsInterruptible(), resourceRequirements...), podSpec.Tolerations...)
if len(podSpec.ServiceAccountName) == 0 {
podSpec.ServiceAccountName = taskExecutionMetadata.GetK8sServiceAccount()
}
if len(podSpec.SchedulerName) == 0 {
podSpec.SchedulerName = config.GetK8sPluginConfig().SchedulerName
}
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().DefaultNodeSelector)
if taskExecutionMetadata.IsInterruptible() {
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().InterruptibleNodeSelector)
}
if podSpec.Affinity == nil {
podSpec.Affinity = config.GetK8sPluginConfig().DefaultAffinity
}
}
func ToK8sPodSpec(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v1.PodSpec, error) {
task, err := tCtx.TaskReader().Read(ctx)
if err != nil {
logger.Warnf(ctx, "failed to read task information when trying to construct Pod, err: %s", err.Error())
return nil, err
}
if task.GetContainer() == nil {
logger.Errorf(ctx, "Default Pod creation logic works for default container in the task template only.")
return nil, fmt.Errorf("container not specified in task template")
}
c, err := ToK8sContainer(ctx, task.GetContainer(), task.Interface, template.Parameters{
Task: tCtx.TaskReader(),
Inputs: tCtx.InputReader(),
OutputPath: tCtx.OutputWriter(),
TaskExecMetadata: tCtx.TaskExecutionMetadata(),
})
if err != nil {
return nil, err
}
containers := []v1.Container{
*c,
}
pod := &v1.PodSpec{
Containers: containers,
}
UpdatePod(tCtx.TaskExecutionMetadata(), []v1.ResourceRequirements{c.Resources}, pod)
if err := AddCoPilotToPod(ctx, config.GetK8sPluginConfig().CoPilot, pod, task.GetInterface(), tCtx.TaskExecutionMetadata(), tCtx.InputReader(), tCtx.OutputWriter(), task.GetContainer().GetDataConfig()); err != nil {
return nil, err
}
return pod, nil
}
func BuildPodWithSpec(podSpec *v1.PodSpec) *v1.Pod {
pod := v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
Spec: *podSpec,
}
return &pod
}
func BuildIdentityPod() *v1.Pod {
return &v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
}
}
// Important considerations.
// Pending Status in Pod could be for various reasons and sometimes could signal a problem
// Case I: Pending because the Image pull is failing and it is backing off
// This could be transient. So we can actually rely on the failure reason.
// The failure transitions from ErrImagePull -> ImagePullBackoff
// Case II: Not enough resources are available. This is tricky. It could be that the total number of
// resources requested is beyond the capability of the system. for this we will rely on configuration
// and hence input gates. We should not allow bad requests that request for large number of resource through.
// In the case it makes through, we will fail after timeout
func DemystifyPending(status v1.PodStatus) (pluginsCore.PhaseInfo, error) {
// Search over the difference conditions in the status object. Note that the 'Pending' this function is
// demystifying is the 'phase' of the pod status. This is different than the PodReady condition type also used below
for _, c := range status.Conditions {
switch c.Type {
case v1.PodScheduled:
if c.Status == v1.ConditionFalse {
// Waiting to be scheduled. This usually refers to inability to acquire resources.
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
}
case v1.PodReasonUnschedulable:
// We Ignore case in which we are unable to find resources on the cluster. This is because
// - The resources may be not available at the moment, but may become available eventually
// The pod scheduler will keep on looking at this pod and trying to satisfy it.
//
// Pod status looks like this:
// message: '0/1 nodes are available: 1 Insufficient memory.'
// reason: Unschedulable
// status: "False"
// type: PodScheduled
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
case v1.PodReady:
if c.Status == v1.ConditionFalse {
// This happens in the case the image is having some problems. In the following example, K8s is having
// problems downloading an image. To ensure that, we will have to iterate over all the container statuses and
// find if some container has imagepull failure
// e.g.
// - lastProbeTime: null
// lastTransitionTime: 2018-12-18T00:57:30Z
// message: 'containers with unready status: [myapp-container]'
// reason: ContainersNotReady
// status: "False"
// type: Ready
//
// e.g. Container status
// - image: blah
// imageID: ""
// lastState: {}
// name: myapp-container
// ready: false
// restartCount: 0
// state:
// waiting:
// message: Back-off pulling image "blah"
// reason: ImagePullBackOff
for _, containerStatus := range status.ContainerStatuses {
if !containerStatus.Ready {
if containerStatus.State.Waiting != nil {
// There are a variety of reasons that can cause a pod to be in this waiting state.
// Waiting state may be legitimate when the container is being downloaded, started or init containers are running
reason := containerStatus.State.Waiting.Reason
finalReason := fmt.Sprintf("%s|%s", c.Reason, reason)
finalMessage := fmt.Sprintf("%s|%s", c.Message, containerStatus.State.Waiting.Message)
switch reason {
case "ErrImagePull", "ContainerCreating", "PodInitializing":
// But, there are only two "reasons" when a pod is successfully being created and hence it is in
// waiting state
// Refer to https://github.com/kubernetes/kubernetes/blob/master/pkg/kubelet/kubelet_pods.go
// and look for the default waiting states
// We also want to allow Image pulls to be retried, so ErrImagePull will be ignored
// as it eventually enters into ImagePullBackOff
// ErrImagePull -> Transitionary phase to ImagePullBackOff
// ContainerCreating -> Image is being downloaded
// PodInitializing -> Init containers are running
return pluginsCore.PhaseInfoInitializing(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("[%s]: %s", finalReason, finalMessage), &pluginsCore.TaskInfo{OccurredAt: &c.LastTransitionTime.Time}), nil
case "CreateContainerConfigError", "CreateContainerError":
// This happens if for instance the command to the container is incorrect, ie doesn't run
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
case "ImagePullBackOff":
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
default:
// Since we are not checking for all error states, we may end up perpetually
// in the queued state returned at the bottom of this function, until the Pod is reaped
// by K8s and we get elusive 'pod not found' errors
// So be default if the container is not waiting with the PodInitializing/ContainerCreating
// reasons, then we will assume a failure reason, and fail instantly
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoSystemRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
}
}
}
}
}
}
}
return pluginsCore.PhaseInfoQueued(time.Now(), pluginsCore.DefaultPhaseVersion, "Scheduling"), nil
}
func DemystifySuccess(status v1.PodStatus, info pluginsCore.TaskInfo) (pluginsCore.PhaseInfo, error) {
for _, status := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
if status.State.Terminated != nil && strings.Contains(status.State.Terminated.Reason, OOMKilled) |
}
return pluginsCore.PhaseInfoSuccess(&info), nil
}
func DeterminePrimaryContainerPhase(primaryContainerName string, statuses []v1.ContainerStatus, info *pluginsCore.TaskInfo) pluginsCore.PhaseInfo {
for _, s := range statuses {
if s.Name == primaryContainerName {
if s.State.Waiting != nil || s.State.Running != nil {
return pluginsCore.PhaseInfoRunning(pluginsCore.DefaultPhaseVersion, info)
}
if s.State.Terminated != nil {
if s.State.Terminated.ExitCode != 0 {
return pluginsCore.PhaseInfoRetryableFailure(
s.State.Terminated.Reason, s.State.Terminated.Message, info)
}
return pluginsCore.PhaseInfoSuccess(info)
}
}
}
// If for some reason we can't find the primary container, always just return a permanent failure
return pluginsCore.PhaseInfoFailure("PrimaryContainerMissing",
fmt.Sprintf("Primary container [%s] not found in pod's container statuses", primaryContainerName), info)
}
func ConvertPodFailureToError(status v1.PodStatus) (code, message string) {
code = "UnknownError"
message = "Pod failed. No message received from kubernetes."
if len(status.Reason) > 0 {
code = status.Reason
}
if len(status.Message) > 0 {
message = status.Message
}
for _, c := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
var containerState v1.ContainerState
if c.LastTerminationState.Terminated != nil {
containerState = c.LastTerminationState
} else if c.State.Terminated != nil {
containerState = c.State
}
if containerState.Terminated != nil {
if strings.Contains(c.State.Terminated.Reason, OOMKilled) {
code = OOMKilled
} else if containerState.Terminated.ExitCode == SIGKILL {
// in some setups, node termination sends SIGKILL to all the containers running on that node. Capturing and
// tagging that correctly.
code = Interrupted
}
if containerState.Terminated.ExitCode == 0 {
message += fmt.Sprintf("\r\n[%v] terminated with ExitCode 0.", c.Name)
} else {
message += fmt.Sprintf("\r\n[%v] terminated with exit code (%v). Reason [%v]. Message: \n%v.",
c.Name,
containerState.Terminated.ExitCode,
containerState.Terminated.Reason,
containerState.Terminated.Message)
}
}
}
return code, message
}
func GetLastTransitionOccurredAt(pod *v1.Pod) v12.Time {
var lastTransitionTime v12.Time
containerStatuses := append(pod.Status.ContainerStatuses, pod.Status.InitContainerStatuses...)
for _, containerStatus := range containerStatuses {
if r := containerStatus.LastTerminationState.Running; r != nil {
if r.StartedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
} else if r := containerStatus.LastTerminationState.Terminated; r != nil {
if r.FinishedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
}
}
if lastTransitionTime.IsZero() {
lastTransitionTime = v12.NewTime(time.Now())
}
return lastTransitionTime
}
| {
return pluginsCore.PhaseInfoRetryableFailure("OOMKilled",
"Pod reported success despite being OOMKilled", &info), nil
} | conditional_block |
pod_helper.go | package flytek8s
import (
"context"
"fmt"
"strings"
"time"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core/template"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/utils"
"github.com/flyteorg/flytestdlib/logger"
v1 "k8s.io/api/core/v1"
v12 "k8s.io/apimachinery/pkg/apis/meta/v1"
pluginsCore "github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/flytek8s/config"
)
const PodKind = "pod"
const OOMKilled = "OOMKilled"
const Interrupted = "Interrupted"
const SIGKILL = 137
// Updates the base pod spec used to execute tasks. This is configured with plugins and task metadata-specific options
func UpdatePod(taskExecutionMetadata pluginsCore.TaskExecutionMetadata,
resourceRequirements []v1.ResourceRequirements, podSpec *v1.PodSpec) {
if len(podSpec.RestartPolicy) == 0 {
podSpec.RestartPolicy = v1.RestartPolicyNever
}
podSpec.Tolerations = append(
GetPodTolerations(taskExecutionMetadata.IsInterruptible(), resourceRequirements...), podSpec.Tolerations...)
if len(podSpec.ServiceAccountName) == 0 {
podSpec.ServiceAccountName = taskExecutionMetadata.GetK8sServiceAccount()
}
if len(podSpec.SchedulerName) == 0 {
podSpec.SchedulerName = config.GetK8sPluginConfig().SchedulerName
}
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().DefaultNodeSelector)
if taskExecutionMetadata.IsInterruptible() {
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().InterruptibleNodeSelector)
}
if podSpec.Affinity == nil {
podSpec.Affinity = config.GetK8sPluginConfig().DefaultAffinity
}
}
func ToK8sPodSpec(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v1.PodSpec, error) {
task, err := tCtx.TaskReader().Read(ctx)
if err != nil {
logger.Warnf(ctx, "failed to read task information when trying to construct Pod, err: %s", err.Error())
return nil, err
}
if task.GetContainer() == nil {
logger.Errorf(ctx, "Default Pod creation logic works for default container in the task template only.")
return nil, fmt.Errorf("container not specified in task template")
}
c, err := ToK8sContainer(ctx, task.GetContainer(), task.Interface, template.Parameters{
Task: tCtx.TaskReader(),
Inputs: tCtx.InputReader(),
OutputPath: tCtx.OutputWriter(),
TaskExecMetadata: tCtx.TaskExecutionMetadata(),
})
if err != nil {
return nil, err
}
containers := []v1.Container{
*c,
}
pod := &v1.PodSpec{
Containers: containers,
}
UpdatePod(tCtx.TaskExecutionMetadata(), []v1.ResourceRequirements{c.Resources}, pod)
if err := AddCoPilotToPod(ctx, config.GetK8sPluginConfig().CoPilot, pod, task.GetInterface(), tCtx.TaskExecutionMetadata(), tCtx.InputReader(), tCtx.OutputWriter(), task.GetContainer().GetDataConfig()); err != nil {
return nil, err
}
return pod, nil
}
func BuildPodWithSpec(podSpec *v1.PodSpec) *v1.Pod {
pod := v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
Spec: *podSpec,
}
return &pod
}
func BuildIdentityPod() *v1.Pod {
return &v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
}
}
// Important considerations.
// Pending Status in Pod could be for various reasons and sometimes could signal a problem
// Case I: Pending because the Image pull is failing and it is backing off
// This could be transient. So we can actually rely on the failure reason.
// The failure transitions from ErrImagePull -> ImagePullBackoff
// Case II: Not enough resources are available. This is tricky. It could be that the total number of
// resources requested is beyond the capability of the system. for this we will rely on configuration
// and hence input gates. We should not allow bad requests that request for large number of resource through.
// In the case it makes through, we will fail after timeout
func DemystifyPending(status v1.PodStatus) (pluginsCore.PhaseInfo, error) {
// Search over the difference conditions in the status object. Note that the 'Pending' this function is
// demystifying is the 'phase' of the pod status. This is different than the PodReady condition type also used below
for _, c := range status.Conditions {
switch c.Type {
case v1.PodScheduled:
if c.Status == v1.ConditionFalse {
// Waiting to be scheduled. This usually refers to inability to acquire resources.
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
}
case v1.PodReasonUnschedulable:
// We Ignore case in which we are unable to find resources on the cluster. This is because
// - The resources may be not available at the moment, but may become available eventually
// The pod scheduler will keep on looking at this pod and trying to satisfy it.
//
// Pod status looks like this:
// message: '0/1 nodes are available: 1 Insufficient memory.'
// reason: Unschedulable
// status: "False"
// type: PodScheduled
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
case v1.PodReady:
if c.Status == v1.ConditionFalse {
// This happens in the case the image is having some problems. In the following example, K8s is having
// problems downloading an image. To ensure that, we will have to iterate over all the container statuses and
// find if some container has imagepull failure
// e.g.
// - lastProbeTime: null
// lastTransitionTime: 2018-12-18T00:57:30Z
// message: 'containers with unready status: [myapp-container]'
// reason: ContainersNotReady
// status: "False"
// type: Ready
//
// e.g. Container status
// - image: blah
// imageID: ""
// lastState: {}
// name: myapp-container
// ready: false
// restartCount: 0
// state:
// waiting:
// message: Back-off pulling image "blah"
// reason: ImagePullBackOff
for _, containerStatus := range status.ContainerStatuses {
if !containerStatus.Ready {
if containerStatus.State.Waiting != nil {
// There are a variety of reasons that can cause a pod to be in this waiting state.
// Waiting state may be legitimate when the container is being downloaded, started or init containers are running
reason := containerStatus.State.Waiting.Reason
finalReason := fmt.Sprintf("%s|%s", c.Reason, reason)
finalMessage := fmt.Sprintf("%s|%s", c.Message, containerStatus.State.Waiting.Message)
switch reason {
case "ErrImagePull", "ContainerCreating", "PodInitializing":
// But, there are only two "reasons" when a pod is successfully being created and hence it is in
// waiting state
// Refer to https://github.com/kubernetes/kubernetes/blob/master/pkg/kubelet/kubelet_pods.go
// and look for the default waiting states
// We also want to allow Image pulls to be retried, so ErrImagePull will be ignored
// as it eventually enters into ImagePullBackOff
// ErrImagePull -> Transitionary phase to ImagePullBackOff
// ContainerCreating -> Image is being downloaded
// PodInitializing -> Init containers are running
return pluginsCore.PhaseInfoInitializing(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("[%s]: %s", finalReason, finalMessage), &pluginsCore.TaskInfo{OccurredAt: &c.LastTransitionTime.Time}), nil
case "CreateContainerConfigError", "CreateContainerError":
// This happens if for instance the command to the container is incorrect, ie doesn't run
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
case "ImagePullBackOff":
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
default:
// Since we are not checking for all error states, we may end up perpetually
// in the queued state returned at the bottom of this function, until the Pod is reaped
// by K8s and we get elusive 'pod not found' errors
// So be default if the container is not waiting with the PodInitializing/ContainerCreating
// reasons, then we will assume a failure reason, and fail instantly
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoSystemRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
}
}
}
}
}
}
}
return pluginsCore.PhaseInfoQueued(time.Now(), pluginsCore.DefaultPhaseVersion, "Scheduling"), nil
}
func | (status v1.PodStatus, info pluginsCore.TaskInfo) (pluginsCore.PhaseInfo, error) {
for _, status := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
if status.State.Terminated != nil && strings.Contains(status.State.Terminated.Reason, OOMKilled) {
return pluginsCore.PhaseInfoRetryableFailure("OOMKilled",
"Pod reported success despite being OOMKilled", &info), nil
}
}
return pluginsCore.PhaseInfoSuccess(&info), nil
}
func DeterminePrimaryContainerPhase(primaryContainerName string, statuses []v1.ContainerStatus, info *pluginsCore.TaskInfo) pluginsCore.PhaseInfo {
for _, s := range statuses {
if s.Name == primaryContainerName {
if s.State.Waiting != nil || s.State.Running != nil {
return pluginsCore.PhaseInfoRunning(pluginsCore.DefaultPhaseVersion, info)
}
if s.State.Terminated != nil {
if s.State.Terminated.ExitCode != 0 {
return pluginsCore.PhaseInfoRetryableFailure(
s.State.Terminated.Reason, s.State.Terminated.Message, info)
}
return pluginsCore.PhaseInfoSuccess(info)
}
}
}
// If for some reason we can't find the primary container, always just return a permanent failure
return pluginsCore.PhaseInfoFailure("PrimaryContainerMissing",
fmt.Sprintf("Primary container [%s] not found in pod's container statuses", primaryContainerName), info)
}
func ConvertPodFailureToError(status v1.PodStatus) (code, message string) {
code = "UnknownError"
message = "Pod failed. No message received from kubernetes."
if len(status.Reason) > 0 {
code = status.Reason
}
if len(status.Message) > 0 {
message = status.Message
}
for _, c := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
var containerState v1.ContainerState
if c.LastTerminationState.Terminated != nil {
containerState = c.LastTerminationState
} else if c.State.Terminated != nil {
containerState = c.State
}
if containerState.Terminated != nil {
if strings.Contains(c.State.Terminated.Reason, OOMKilled) {
code = OOMKilled
} else if containerState.Terminated.ExitCode == SIGKILL {
// in some setups, node termination sends SIGKILL to all the containers running on that node. Capturing and
// tagging that correctly.
code = Interrupted
}
if containerState.Terminated.ExitCode == 0 {
message += fmt.Sprintf("\r\n[%v] terminated with ExitCode 0.", c.Name)
} else {
message += fmt.Sprintf("\r\n[%v] terminated with exit code (%v). Reason [%v]. Message: \n%v.",
c.Name,
containerState.Terminated.ExitCode,
containerState.Terminated.Reason,
containerState.Terminated.Message)
}
}
}
return code, message
}
func GetLastTransitionOccurredAt(pod *v1.Pod) v12.Time {
var lastTransitionTime v12.Time
containerStatuses := append(pod.Status.ContainerStatuses, pod.Status.InitContainerStatuses...)
for _, containerStatus := range containerStatuses {
if r := containerStatus.LastTerminationState.Running; r != nil {
if r.StartedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
} else if r := containerStatus.LastTerminationState.Terminated; r != nil {
if r.FinishedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
}
}
if lastTransitionTime.IsZero() {
lastTransitionTime = v12.NewTime(time.Now())
}
return lastTransitionTime
}
| DemystifySuccess | identifier_name |
pod_helper.go | package flytek8s
import (
"context"
"fmt"
"strings"
"time"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core/template"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/utils"
"github.com/flyteorg/flytestdlib/logger"
v1 "k8s.io/api/core/v1"
v12 "k8s.io/apimachinery/pkg/apis/meta/v1"
pluginsCore "github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/core"
"github.com/flyteorg/flyteplugins/go/tasks/pluginmachinery/flytek8s/config"
)
const PodKind = "pod"
const OOMKilled = "OOMKilled"
const Interrupted = "Interrupted"
const SIGKILL = 137
// Updates the base pod spec used to execute tasks. This is configured with plugins and task metadata-specific options
func UpdatePod(taskExecutionMetadata pluginsCore.TaskExecutionMetadata,
resourceRequirements []v1.ResourceRequirements, podSpec *v1.PodSpec) {
if len(podSpec.RestartPolicy) == 0 {
podSpec.RestartPolicy = v1.RestartPolicyNever
}
podSpec.Tolerations = append(
GetPodTolerations(taskExecutionMetadata.IsInterruptible(), resourceRequirements...), podSpec.Tolerations...)
if len(podSpec.ServiceAccountName) == 0 {
podSpec.ServiceAccountName = taskExecutionMetadata.GetK8sServiceAccount()
}
if len(podSpec.SchedulerName) == 0 {
podSpec.SchedulerName = config.GetK8sPluginConfig().SchedulerName
}
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().DefaultNodeSelector)
if taskExecutionMetadata.IsInterruptible() {
podSpec.NodeSelector = utils.UnionMaps(podSpec.NodeSelector, config.GetK8sPluginConfig().InterruptibleNodeSelector)
}
if podSpec.Affinity == nil {
podSpec.Affinity = config.GetK8sPluginConfig().DefaultAffinity
}
}
func ToK8sPodSpec(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v1.PodSpec, error) {
task, err := tCtx.TaskReader().Read(ctx)
if err != nil {
logger.Warnf(ctx, "failed to read task information when trying to construct Pod, err: %s", err.Error())
return nil, err
}
if task.GetContainer() == nil {
logger.Errorf(ctx, "Default Pod creation logic works for default container in the task template only.")
return nil, fmt.Errorf("container not specified in task template")
}
c, err := ToK8sContainer(ctx, task.GetContainer(), task.Interface, template.Parameters{
Task: tCtx.TaskReader(),
Inputs: tCtx.InputReader(),
OutputPath: tCtx.OutputWriter(),
TaskExecMetadata: tCtx.TaskExecutionMetadata(),
})
if err != nil {
return nil, err
}
containers := []v1.Container{
*c,
}
pod := &v1.PodSpec{
Containers: containers,
}
UpdatePod(tCtx.TaskExecutionMetadata(), []v1.ResourceRequirements{c.Resources}, pod)
if err := AddCoPilotToPod(ctx, config.GetK8sPluginConfig().CoPilot, pod, task.GetInterface(), tCtx.TaskExecutionMetadata(), tCtx.InputReader(), tCtx.OutputWriter(), task.GetContainer().GetDataConfig()); err != nil {
return nil, err
}
return pod, nil
}
func BuildPodWithSpec(podSpec *v1.PodSpec) *v1.Pod {
pod := v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
Spec: *podSpec,
}
return &pod
}
func BuildIdentityPod() *v1.Pod {
return &v1.Pod{
TypeMeta: v12.TypeMeta{
Kind: PodKind,
APIVersion: v1.SchemeGroupVersion.String(),
},
}
}
// Important considerations.
// Pending Status in Pod could be for various reasons and sometimes could signal a problem
// Case I: Pending because the Image pull is failing and it is backing off
// This could be transient. So we can actually rely on the failure reason.
// The failure transitions from ErrImagePull -> ImagePullBackoff
// Case II: Not enough resources are available. This is tricky. It could be that the total number of
// resources requested is beyond the capability of the system. for this we will rely on configuration
// and hence input gates. We should not allow bad requests that request for large number of resource through.
// In the case it makes through, we will fail after timeout
func DemystifyPending(status v1.PodStatus) (pluginsCore.PhaseInfo, error) {
// Search over the difference conditions in the status object. Note that the 'Pending' this function is
// demystifying is the 'phase' of the pod status. This is different than the PodReady condition type also used below
for _, c := range status.Conditions {
switch c.Type {
case v1.PodScheduled:
if c.Status == v1.ConditionFalse {
// Waiting to be scheduled. This usually refers to inability to acquire resources.
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
}
case v1.PodReasonUnschedulable:
// We Ignore case in which we are unable to find resources on the cluster. This is because
// - The resources may be not available at the moment, but may become available eventually
// The pod scheduler will keep on looking at this pod and trying to satisfy it.
//
// Pod status looks like this:
// message: '0/1 nodes are available: 1 Insufficient memory.'
// reason: Unschedulable
// status: "False"
// type: PodScheduled
return pluginsCore.PhaseInfoQueued(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("%s:%s", c.Reason, c.Message)), nil
case v1.PodReady:
if c.Status == v1.ConditionFalse {
// This happens in the case the image is having some problems. In the following example, K8s is having
// problems downloading an image. To ensure that, we will have to iterate over all the container statuses and
// find if some container has imagepull failure
// e.g.
// - lastProbeTime: null
// lastTransitionTime: 2018-12-18T00:57:30Z
// message: 'containers with unready status: [myapp-container]'
// reason: ContainersNotReady
// status: "False"
// type: Ready
//
// e.g. Container status
// - image: blah
// imageID: ""
// lastState: {}
// name: myapp-container
// ready: false
// restartCount: 0
// state:
// waiting:
// message: Back-off pulling image "blah"
// reason: ImagePullBackOff
for _, containerStatus := range status.ContainerStatuses {
if !containerStatus.Ready {
if containerStatus.State.Waiting != nil {
// There are a variety of reasons that can cause a pod to be in this waiting state.
// Waiting state may be legitimate when the container is being downloaded, started or init containers are running
reason := containerStatus.State.Waiting.Reason
finalReason := fmt.Sprintf("%s|%s", c.Reason, reason)
finalMessage := fmt.Sprintf("%s|%s", c.Message, containerStatus.State.Waiting.Message)
switch reason {
case "ErrImagePull", "ContainerCreating", "PodInitializing":
// But, there are only two "reasons" when a pod is successfully being created and hence it is in
// waiting state
// Refer to https://github.com/kubernetes/kubernetes/blob/master/pkg/kubelet/kubelet_pods.go
// and look for the default waiting states
// We also want to allow Image pulls to be retried, so ErrImagePull will be ignored
// as it eventually enters into ImagePullBackOff
// ErrImagePull -> Transitionary phase to ImagePullBackOff
// ContainerCreating -> Image is being downloaded
// PodInitializing -> Init containers are running
return pluginsCore.PhaseInfoInitializing(c.LastTransitionTime.Time, pluginsCore.DefaultPhaseVersion, fmt.Sprintf("[%s]: %s", finalReason, finalMessage), &pluginsCore.TaskInfo{OccurredAt: &c.LastTransitionTime.Time}), nil
case "CreateContainerConfigError", "CreateContainerError":
// This happens if for instance the command to the container is incorrect, ie doesn't run
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
case "ImagePullBackOff":
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
default:
// Since we are not checking for all error states, we may end up perpetually
// in the queued state returned at the bottom of this function, until the Pod is reaped
// by K8s and we get elusive 'pod not found' errors
// So be default if the container is not waiting with the PodInitializing/ContainerCreating
// reasons, then we will assume a failure reason, and fail instantly
t := c.LastTransitionTime.Time
return pluginsCore.PhaseInfoSystemRetryableFailure(finalReason, finalMessage, &pluginsCore.TaskInfo{
OccurredAt: &t,
}), nil
}
}
}
}
}
}
}
return pluginsCore.PhaseInfoQueued(time.Now(), pluginsCore.DefaultPhaseVersion, "Scheduling"), nil
}
func DemystifySuccess(status v1.PodStatus, info pluginsCore.TaskInfo) (pluginsCore.PhaseInfo, error) {
for _, status := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
if status.State.Terminated != nil && strings.Contains(status.State.Terminated.Reason, OOMKilled) {
return pluginsCore.PhaseInfoRetryableFailure("OOMKilled",
"Pod reported success despite being OOMKilled", &info), nil
}
}
return pluginsCore.PhaseInfoSuccess(&info), nil
}
func DeterminePrimaryContainerPhase(primaryContainerName string, statuses []v1.ContainerStatus, info *pluginsCore.TaskInfo) pluginsCore.PhaseInfo {
for _, s := range statuses {
if s.Name == primaryContainerName {
if s.State.Waiting != nil || s.State.Running != nil {
return pluginsCore.PhaseInfoRunning(pluginsCore.DefaultPhaseVersion, info)
}
if s.State.Terminated != nil {
if s.State.Terminated.ExitCode != 0 {
return pluginsCore.PhaseInfoRetryableFailure(
s.State.Terminated.Reason, s.State.Terminated.Message, info)
}
return pluginsCore.PhaseInfoSuccess(info)
}
}
}
// If for some reason we can't find the primary container, always just return a permanent failure
return pluginsCore.PhaseInfoFailure("PrimaryContainerMissing",
fmt.Sprintf("Primary container [%s] not found in pod's container statuses", primaryContainerName), info)
}
func ConvertPodFailureToError(status v1.PodStatus) (code, message string) |
func GetLastTransitionOccurredAt(pod *v1.Pod) v12.Time {
var lastTransitionTime v12.Time
containerStatuses := append(pod.Status.ContainerStatuses, pod.Status.InitContainerStatuses...)
for _, containerStatus := range containerStatuses {
if r := containerStatus.LastTerminationState.Running; r != nil {
if r.StartedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
} else if r := containerStatus.LastTerminationState.Terminated; r != nil {
if r.FinishedAt.Unix() > lastTransitionTime.Unix() {
lastTransitionTime = r.StartedAt
}
}
}
if lastTransitionTime.IsZero() {
lastTransitionTime = v12.NewTime(time.Now())
}
return lastTransitionTime
}
| {
code = "UnknownError"
message = "Pod failed. No message received from kubernetes."
if len(status.Reason) > 0 {
code = status.Reason
}
if len(status.Message) > 0 {
message = status.Message
}
for _, c := range append(
append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) {
var containerState v1.ContainerState
if c.LastTerminationState.Terminated != nil {
containerState = c.LastTerminationState
} else if c.State.Terminated != nil {
containerState = c.State
}
if containerState.Terminated != nil {
if strings.Contains(c.State.Terminated.Reason, OOMKilled) {
code = OOMKilled
} else if containerState.Terminated.ExitCode == SIGKILL {
// in some setups, node termination sends SIGKILL to all the containers running on that node. Capturing and
// tagging that correctly.
code = Interrupted
}
if containerState.Terminated.ExitCode == 0 {
message += fmt.Sprintf("\r\n[%v] terminated with ExitCode 0.", c.Name)
} else {
message += fmt.Sprintf("\r\n[%v] terminated with exit code (%v). Reason [%v]. Message: \n%v.",
c.Name,
containerState.Terminated.ExitCode,
containerState.Terminated.Reason,
containerState.Terminated.Message)
}
}
}
return code, message
} | identifier_body |
searchByName.js | (function () {
'use strict';
angular
.module('app.searchByName')
.controller('SearchByName', SearchByName);
SearchByName.$inject = ['$scope', '$ionicConfig', '$ionicNavBarDelegate', 'SuggestFactory', '$state', '$stateParams', 'CONFIG', 'JamHelperFactory', 'AnalyticsHelper', 'LoaderFactory', 'OrientationchangeFactory', '$timeout', 'CacheFactory'];
function SearchByName($scope, $ionicConfig, $ionicNavBarDelegate, SuggestFactory, $state, $stateParams, CONFIG, JamHelperFactory, AnalyticsHelper, LoaderFactory, OrientationchangeFactory, $timeout, CacheFactory) {
/* jshint validthis: true */
/* ViewModel */
var vm = this;
// set back-button
$ionicConfig.backButton.text("Suche");
vm.searchInput = {
what: '',
where: '',
dist: 5,
results: []
};
vm.searchInputTemp = {
what: '',
where: ''
};
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
vm.showWhatCancel = false;
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
vm.showWhereCancel = false;
vm.controlInputFieldPosition = controlInputFieldPosition; // Steuert das Ein- und Ausblenden
vm.setChosenSuggestItem = setChosenSuggestItem;
vm.getSuggestData = getSuggestData;
vm.clearSearch = clearSearch;
vm.search = search;
vm.searchParamsObject = {};
// wenn iOs-App dann wird eine extra iOS-Css-Klasse benötigt im bar-header
vm.isIosApp = (CONFIG.environment == 'app' && CONFIG.deviceOs == 'iOS') ? true : false;
vm.urlPrefix = CONFIG.urlPrefix;
vm.isTablet = (CONFIG.deviceType == 'tablet') ? true : false;
vm.hideNavBar = (CONFIG.deviceOrientation == 'landscape' && vm.isTablet) ? true : false;
vm.removeFocusFromInput = removeFocusFromInput;
vm.checkKey= checkKey;
vm.bestMatchSuggest = {};
vm.clearTextInput = clearTextInput;
vm.lastSearchPersonSuggest = false;
////////////
$scope.$on('$ionicView.afterEnter', function() {
// set last search object
CacheFactory.getFromCache('searchCache').then(function(searchCache) {
vm.lastSearchPersonSuggest = CacheFactory.lastSearchPersonSuggest(searchCache);
}, function() {
// empty cache
vm.lastSearchPersonSuggest = false;
});
LoaderFactory.hideLoader();
// add event listener
OrientationchangeFactory.initListener();
// set back button
window.setTimeout(function() {
JamHelperFactory.setBackButton('burger');
}, CONFIG.backButtonDelay);
// GATRACKING
AnalyticsHelper.trackPageview('/namenssuche/');
// set canonical
JamHelperFactory.setCanonical('https://www.jameda.de/empfehlen/', true);
// reset back button cache
JamHelperFactory.setIntoCache('profileBackParams', {});
});
$scope.$on('$ionicView.beforeLeave', function() {
JamHelperFactory.resetBackButton();
});
function checkKey(type) {
// Keyboard wurde gedrückt (13 = Enter, 27 = Escape)
if (event.keyCode == 13 || event.keyCode == 27) {
hideKeyboard();
if (type == 'what') {
vm.setChosenSuggestItem({
searchType: vm.bestMatchSuggest.searchType,
inputItem: vm.bestMatchSuggest.was,
selectedItem: vm.bestMatchSuggest.was,
gruppe_fach_param: vm.bestMatchSuggest.gruppe_fach_param,
was_sel: vm.bestMatchSuggest.was_sel
});
}
}
}
function setBestMatchSuggest(type, data, input) {
vm.bestMatchSuggest = {};
if (type == 'what') {
if (data && typeof data[0] !== 'undefined') {
// Best Matches finden
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was_sel = 1;
if (data[0].header == 'Fachbereiche') {
vm.bestMatchSuggest.was = data[0].list[0].term;
vm.bestMatchSuggest.gruppe_fach_param = data[0].list[0].select;
} else if (data[0].header == 'Namen') {
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
}
}
if ((typeof data === 'undefined' || data == '') && input != '') {
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
vm.bestMatchSuggest.was_sel = 1;
}
}
}
function removeFocusFromInput() {
hideKeyboard();
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
function hideKeyboard() {
ionic.DomUtil.blurAll();
}
function clearTextInput() {
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Eingabe löschen
vm.searchInput.what = "";
}
function setScrollHeight() {
$timeout(function () {
var suggestScrollArea = $('ion-nav-view[name="menuContent"] ion-view[nav-view="active"] #suggest-scroll-area');
// Only on android tablets
if (vm.isTablet && vm.deviceOs == 'Android') {
var tmpHeight = $('ion-view[nav-view="active"] .jam-suggest-box-tablet').height();
if (tmpHeight > CONFIG.windowHeight) {
suggestScrollArea.height(CONFIG.windowHeight - 75);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 124);
}
} else {
if (vm.isTablet) {
suggestScrollArea.height(CONFIG.windowHeight - 124);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 75);
}
}
}, 100);
}
function controlInputFieldPosition(type) {
/* */
if(type == 'what') {
// GATRACKING
if (vm.showWhereInputField) {
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Was Wen Suchleiste geklickt');
}
$ionicNavBarDelegate.showBar(false);
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = true;
/// verstecke Wo-Input
vm.showWhereInputField = false;
vm.showWhereSuggest = false;
// zeige Abbrechen Button
vm.showWhatCancel = true;
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Suggest-Daten holen mit Eingabe
if(vm.searchInputTemp.what) {
vm.getSuggestData(type);
}
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
if(type == 'cancelWhat' || type == 'cancelWhere') {
if (CONFIG.deviceType == 'phone' || CONFIG.deviceOrientation != 'landscape') {
$ionicNavBarDelegate.showBar(true);
}
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
// Immer dann wenn die Eingabe sich verändert hat
// und auf Abbrechen geklickt,
// erste Eingabe zurück schreiben
if(type == 'cancelWhat') {
if(vm.searchInput.what != vm.searchInputTemp.what) {
vm.searchInput.what = vm.searchInputTemp.what;
}
}
if(type == 'cancelWhere') {
if(vm.searchInput.where != vm.searchInputTemp.where) {
vm.searchInput.where = vm.searchInputTemp.where;
}
}
}
if(type == 'all') {
$ionic | nction setSearchParamsObject(params) {
// Suche über Was / Wen
if (params.searchType == "what") {
vm.searchParamsObject.was = params.selectedItem;
vm.searchParamsObject.was_i = params.inputItem;
vm.searchParamsObject.was_sel = (typeof params.was_sel !== 'undefined' && params.was_sel) ? params.was_sel : 0;
if (typeof params.gruppe_fach_param != 'undefined') {
var params = params.gruppe_fach_param.split("&");
for (var i=0; i<params.length; i++) {
var pair = params[i].split("=");
vm.searchParamsObject[pair[0]] = pair[1];
}
vm.searchParamsObject.namen = '';
} else if (typeof params.selectedItem != 'undefined' && typeof params.deeplink == 'undefined') {
vm.searchParamsObject.namen = params.selectedItem;
vm.searchParamsObject.gruppe_fach_param = '';
} else if (typeof params.deeplink != 'undefined' && params.deeplink != '' && typeof params.selectedItem != 'undefined') {
var customerId = JamHelperFactory.strReplace('/profil/', '', params.deeplink);
customerId = JamHelperFactory.strReplace('?beta=1', '', customerId);
var customerIdClean = customerId.split("_")[0];
if (typeof params.multi !== 'undefined' && params.multi.length > 1) {
// mehrere Standorte, direkt auf die Auswahlseite leiten
$state.go('searchResultListSub', {
refId: customerIdClean,
isSearch: true
});
} else {
// direkt auf das Profil leiten, Beispiel: /profil/uebersicht/45897203100_2/
$state.go('profile', {
fullRefId: customerId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
// Personen-Suche aus dem Cache aufrufen
if (params.searchType == "personSearchFromCache") {
// Profil
if (typeof params.objFromCache.what_name_nice.refId !== 'undefined' && params.objFromCache.what_name_nice.refId != '') {
// direkt auf das Profil leiten
$state.go('profile', {
fullRefId: params.objFromCache.what_name_nice.refId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
function setChosenSuggestItem(params) {
// GATRACKING
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Namenssuggest geklickt');
/* */
setSearchParamsObject(params);
if(params.searchType == 'what') {
vm.searchInput.what = params.selectedItem;
}
if(params.searchType == 'where') {
vm.searchInput.where = params.selectedItem;
}
// zeige alles
controlInputFieldPosition('all');
}
function getSuggestData(type) {
/* */
// hole Daten für Suggest
SuggestFactory.getSuggestDataByName(vm.searchInput).then(
function(data) {
data = data.data;
setBestMatchSuggest(type, data.suggests, vm.searchInput);
vm.searchInput.results = data.suggests;
setScrollHeight();
}
);
// zeige Was-Suggest
if(type == 'what') {
vm.showWhatSuggest = true;
vm.showWhereSuggest = false;
}
// zeige Wo-Suggest
if(type == 'where') {
vm.showWhatSuggest = false;
vm.showWhereSuggest = true;
}
}
function clearSearch(type) {
/* */
if(type == 'cancelWhat') {
vm.searchInput.what = '';
}
if(type == 'cancelWhere') {
vm.searchInput.where = '';
}
}
function search() {
$state.go('searchResultList', vm.searchParamsObject);
}
activate();
$scope.$on('$ionicView.enter', function() {
activateViewToForeground();
});
function activate() {
// Aufruf aus dem Fachgebiete-Verzeichnis
if($stateParams.what) {
var params = {
searchType: 'what',
selectedItem: $stateParams.what,
gruppe_fach_param: $stateParams.gruppe_fach_param
};
vm.setChosenSuggestItem(params);
}
}
function activateViewToForeground() {
// Run code for when the view comes into foreground
// On initial launch this function fires prior to activate()
}
}
})(); | NavBarDelegate.showBar(true);
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
}
}
fu | conditional_block |
searchByName.js | (function () {
'use strict';
angular
.module('app.searchByName')
.controller('SearchByName', SearchByName);
SearchByName.$inject = ['$scope', '$ionicConfig', '$ionicNavBarDelegate', 'SuggestFactory', '$state', '$stateParams', 'CONFIG', 'JamHelperFactory', 'AnalyticsHelper', 'LoaderFactory', 'OrientationchangeFactory', '$timeout', 'CacheFactory'];
function SearchByName($scope, $ionicConfig, $ionicNavBarDelegate, SuggestFactory, $state, $stateParams, CONFIG, JamHelperFactory, AnalyticsHelper, LoaderFactory, OrientationchangeFactory, $timeout, CacheFactory) {
/* jshint validthis: true */
/* ViewModel */
var vm = this;
// set back-button
$ionicConfig.backButton.text("Suche");
vm.searchInput = {
what: '',
where: '',
dist: 5,
results: []
};
vm.searchInputTemp = {
what: '',
where: ''
};
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
vm.showWhatCancel = false;
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
vm.showWhereCancel = false;
vm.controlInputFieldPosition = controlInputFieldPosition; // Steuert das Ein- und Ausblenden
vm.setChosenSuggestItem = setChosenSuggestItem;
vm.getSuggestData = getSuggestData;
vm.clearSearch = clearSearch;
vm.search = search;
vm.searchParamsObject = {};
// wenn iOs-App dann wird eine extra iOS-Css-Klasse benötigt im bar-header
vm.isIosApp = (CONFIG.environment == 'app' && CONFIG.deviceOs == 'iOS') ? true : false;
vm.urlPrefix = CONFIG.urlPrefix;
vm.isTablet = (CONFIG.deviceType == 'tablet') ? true : false;
vm.hideNavBar = (CONFIG.deviceOrientation == 'landscape' && vm.isTablet) ? true : false;
vm.removeFocusFromInput = removeFocusFromInput;
vm.checkKey= checkKey;
vm.bestMatchSuggest = {};
vm.clearTextInput = clearTextInput;
vm.lastSearchPersonSuggest = false;
////////////
$scope.$on('$ionicView.afterEnter', function() {
// set last search object
CacheFactory.getFromCache('searchCache').then(function(searchCache) {
vm.lastSearchPersonSuggest = CacheFactory.lastSearchPersonSuggest(searchCache);
}, function() {
// empty cache
vm.lastSearchPersonSuggest = false;
});
LoaderFactory.hideLoader();
// add event listener
OrientationchangeFactory.initListener();
// set back button
window.setTimeout(function() {
JamHelperFactory.setBackButton('burger');
}, CONFIG.backButtonDelay);
// GATRACKING
AnalyticsHelper.trackPageview('/namenssuche/');
// set canonical
JamHelperFactory.setCanonical('https://www.jameda.de/empfehlen/', true);
// reset back button cache
JamHelperFactory.setIntoCache('profileBackParams', {});
});
$scope.$on('$ionicView.beforeLeave', function() {
JamHelperFactory.resetBackButton();
});
function checkKey(type) {
// Keyboard wurde gedrückt (13 = Enter, 27 = Escape)
if (event.keyCode == 13 || event.keyCode == 27) {
hideKeyboard();
if (type == 'what') {
vm.setChosenSuggestItem({
searchType: vm.bestMatchSuggest.searchType,
inputItem: vm.bestMatchSuggest.was,
selectedItem: vm.bestMatchSuggest.was,
gruppe_fach_param: vm.bestMatchSuggest.gruppe_fach_param,
was_sel: vm.bestMatchSuggest.was_sel
});
}
}
}
function setBestMatchSuggest(type, data, input) {
vm.bestMatchSuggest = {};
if (type == 'what') {
if (data && typeof data[0] !== 'undefined') {
// Best Matches finden
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was_sel = 1;
if (data[0].header == 'Fachbereiche') {
vm.bestMatchSuggest.was = data[0].list[0].term;
vm.bestMatchSuggest.gruppe_fach_param = data[0].list[0].select;
} else if (data[0].header == 'Namen') {
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
}
}
if ((typeof data === 'undefined' || data == '') && input != '') {
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
vm.bestMatchSuggest.was_sel = 1;
}
}
}
function removeFocusFromInput() {
hideKeyboard();
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
function hideKeyboard() {
ionic.DomUtil.blurAll();
}
function clearTextInput() {
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Eingabe löschen
vm.searchInput.what = "";
}
function setScrollHeight() {
$timeout(function () {
var suggestScrollArea = $('ion-nav-view[name="menuContent"] ion-view[nav-view="active"] #suggest-scroll-area');
// Only on android tablets
if (vm.isTablet && vm.deviceOs == 'Android') {
var tmpHeight = $('ion-view[nav-view="active"] .jam-suggest-box-tablet').height();
if (tmpHeight > CONFIG.windowHeight) {
suggestScrollArea.height(CONFIG.windowHeight - 75);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 124);
}
} else {
if (vm.isTablet) {
suggestScrollArea.height(CONFIG.windowHeight - 124);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 75);
}
}
}, 100);
}
function controlInputFieldPosition(type) {
/* */
if(type == 'what') {
// GATRACKING
if (vm.showWhereInputField) {
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Was Wen Suchleiste geklickt');
}
$ionicNavBarDelegate.showBar(false);
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = true;
/// verstecke Wo-Input
vm.showWhereInputField = false;
vm.showWhereSuggest = false;
// zeige Abbrechen Button
vm.showWhatCancel = true;
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Suggest-Daten holen mit Eingabe
if(vm.searchInputTemp.what) {
vm.getSuggestData(type);
}
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
if(type == 'cancelWhat' || type == 'cancelWhere') {
if (CONFIG.deviceType == 'phone' || CONFIG.deviceOrientation != 'landscape') {
$ionicNavBarDelegate.showBar(true);
}
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
// Immer dann wenn die Eingabe sich verändert hat
// und auf Abbrechen geklickt,
// erste Eingabe zurück schreiben
if(type == 'cancelWhat') {
if(vm.searchInput.what != vm.searchInputTemp.what) {
vm.searchInput.what = vm.searchInputTemp.what;
}
}
if(type == 'cancelWhere') {
if(vm.searchInput.where != vm.searchInputTemp.where) {
vm.searchInput.where = vm.searchInputTemp.where;
}
}
}
if(type == 'all') {
$ionicNavBarDelegate.showBar(true);
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
}
}
function setSearchParamsObject(params) {
// Suche über Was / Wen
if (params.searchType == "what") {
vm.searchParamsObject.was = params.selectedItem;
vm.searchParamsObject.was_i = params.inputItem;
vm.searchParamsObject.was_sel = (typeof params.was_sel !== 'undefined' && params.was_sel) ? params.was_sel : 0;
if (typeof params.gruppe_fach_param != 'undefined') {
var params = params.gruppe_fach_param.split("&");
for (var i=0; i<params.length; i++) {
var pair = params[i].split("=");
vm.searchParamsObject[pair[0]] = pair[1];
}
vm.searchParamsObject.namen = '';
} else if (typeof params.selectedItem != 'undefined' && typeof params.deeplink == 'undefined') {
vm.searchParamsObject.namen = params.selectedItem;
vm.searchParamsObject.gruppe_fach_param = '';
} else if (typeof params.deeplink != 'undefined' && params.deeplink != '' && typeof params.selectedItem != 'undefined') {
var customerId = JamHelperFactory.strReplace('/profil/', '', params.deeplink);
customerId = JamHelperFactory.strReplace('?beta=1', '', customerId);
var customerIdClean = customerId.split("_")[0];
if (typeof params.multi !== 'undefined' && params.multi.length > 1) {
// mehrere Standorte, direkt auf die Auswahlseite leiten
$state.go('searchResultListSub', {
refId: customerIdClean,
isSearch: true
});
} else {
// direkt auf das Profil leiten, Beispiel: /profil/uebersicht/45897203100_2/
$state.go('profile', {
fullRefId: customerId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
// Personen-Suche aus dem Cache aufrufen
if (params.searchType == "personSearchFromCache") {
// Profil
if (typeof params.objFromCache.what_name_nice.refId !== 'undefined' && params.objFromCache.what_name_nice.refId != '') {
// direkt auf das Profil leiten
$state.go('profile', {
fullRefId: params.objFromCache.what_name_nice.refId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
function setChosenSuggestItem(params) {
// GATRACKING
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Namenssuggest geklickt');
| if(params.searchType == 'what') {
vm.searchInput.what = params.selectedItem;
}
if(params.searchType == 'where') {
vm.searchInput.where = params.selectedItem;
}
// zeige alles
controlInputFieldPosition('all');
}
function getSuggestData(type) {
/* */
// hole Daten für Suggest
SuggestFactory.getSuggestDataByName(vm.searchInput).then(
function(data) {
data = data.data;
setBestMatchSuggest(type, data.suggests, vm.searchInput);
vm.searchInput.results = data.suggests;
setScrollHeight();
}
);
// zeige Was-Suggest
if(type == 'what') {
vm.showWhatSuggest = true;
vm.showWhereSuggest = false;
}
// zeige Wo-Suggest
if(type == 'where') {
vm.showWhatSuggest = false;
vm.showWhereSuggest = true;
}
}
function clearSearch(type) {
/* */
if(type == 'cancelWhat') {
vm.searchInput.what = '';
}
if(type == 'cancelWhere') {
vm.searchInput.where = '';
}
}
function search() {
$state.go('searchResultList', vm.searchParamsObject);
}
activate();
$scope.$on('$ionicView.enter', function() {
activateViewToForeground();
});
function activate() {
// Aufruf aus dem Fachgebiete-Verzeichnis
if($stateParams.what) {
var params = {
searchType: 'what',
selectedItem: $stateParams.what,
gruppe_fach_param: $stateParams.gruppe_fach_param
};
vm.setChosenSuggestItem(params);
}
}
function activateViewToForeground() {
// Run code for when the view comes into foreground
// On initial launch this function fires prior to activate()
}
}
})(); |
/* */
setSearchParamsObject(params);
| random_line_split |
searchByName.js | (function () {
'use strict';
angular
.module('app.searchByName')
.controller('SearchByName', SearchByName);
SearchByName.$inject = ['$scope', '$ionicConfig', '$ionicNavBarDelegate', 'SuggestFactory', '$state', '$stateParams', 'CONFIG', 'JamHelperFactory', 'AnalyticsHelper', 'LoaderFactory', 'OrientationchangeFactory', '$timeout', 'CacheFactory'];
function SearchByName($scope, $ionicConfig, $ionicNavBarDelegate, SuggestFactory, $state, $stateParams, CONFIG, JamHelperFactory, AnalyticsHelper, LoaderFactory, OrientationchangeFactory, $timeout, CacheFactory) {
/* jshint validthis: true */
/* ViewModel */
var vm = this;
// set back-button
$ionicConfig.backButton.text("Suche");
vm.searchInput = {
what: '',
where: '',
dist: 5,
results: []
};
vm.searchInputTemp = {
what: '',
where: ''
};
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
vm.showWhatCancel = false;
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
vm.showWhereCancel = false;
vm.controlInputFieldPosition = controlInputFieldPosition; // Steuert das Ein- und Ausblenden
vm.setChosenSuggestItem = setChosenSuggestItem;
vm.getSuggestData = getSuggestData;
vm.clearSearch = clearSearch;
vm.search = search;
vm.searchParamsObject = {};
// wenn iOs-App dann wird eine extra iOS-Css-Klasse benötigt im bar-header
vm.isIosApp = (CONFIG.environment == 'app' && CONFIG.deviceOs == 'iOS') ? true : false;
vm.urlPrefix = CONFIG.urlPrefix;
vm.isTablet = (CONFIG.deviceType == 'tablet') ? true : false;
vm.hideNavBar = (CONFIG.deviceOrientation == 'landscape' && vm.isTablet) ? true : false;
vm.removeFocusFromInput = removeFocusFromInput;
vm.checkKey= checkKey;
vm.bestMatchSuggest = {};
vm.clearTextInput = clearTextInput;
vm.lastSearchPersonSuggest = false;
////////////
$scope.$on('$ionicView.afterEnter', function() {
// set last search object
CacheFactory.getFromCache('searchCache').then(function(searchCache) {
vm.lastSearchPersonSuggest = CacheFactory.lastSearchPersonSuggest(searchCache);
}, function() {
// empty cache
vm.lastSearchPersonSuggest = false;
});
LoaderFactory.hideLoader();
// add event listener
OrientationchangeFactory.initListener();
// set back button
window.setTimeout(function() {
JamHelperFactory.setBackButton('burger');
}, CONFIG.backButtonDelay);
// GATRACKING
AnalyticsHelper.trackPageview('/namenssuche/');
// set canonical
JamHelperFactory.setCanonical('https://www.jameda.de/empfehlen/', true);
// reset back button cache
JamHelperFactory.setIntoCache('profileBackParams', {});
});
$scope.$on('$ionicView.beforeLeave', function() {
JamHelperFactory.resetBackButton();
});
function checkKey(type) {
// Keyboard wurde gedrückt (13 = Enter, 27 = Escape)
if (event.keyCode == 13 || event.keyCode == 27) {
hideKeyboard();
if (type == 'what') {
vm.setChosenSuggestItem({
searchType: vm.bestMatchSuggest.searchType,
inputItem: vm.bestMatchSuggest.was,
selectedItem: vm.bestMatchSuggest.was,
gruppe_fach_param: vm.bestMatchSuggest.gruppe_fach_param,
was_sel: vm.bestMatchSuggest.was_sel
});
}
}
}
function setBestMatchSuggest(type, data, input) {
vm.bestMatchSuggest = {};
if (type == 'what') {
if (data && typeof data[0] !== 'undefined') {
// Best Matches finden
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was_sel = 1;
if (data[0].header == 'Fachbereiche') {
vm.bestMatchSuggest.was = data[0].list[0].term;
vm.bestMatchSuggest.gruppe_fach_param = data[0].list[0].select;
} else if (data[0].header == 'Namen') {
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
}
}
if ((typeof data === 'undefined' || data == '') && input != '') {
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
vm.bestMatchSuggest.was_sel = 1;
}
}
}
function removeFocusFromInput() {
hideKeyboard();
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
function hideKeyboard() {
ionic.DomUtil.blurAll();
}
function clearTextInput() {
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Eingabe löschen
vm.searchInput.what = "";
}
function setScro |
$timeout(function () {
var suggestScrollArea = $('ion-nav-view[name="menuContent"] ion-view[nav-view="active"] #suggest-scroll-area');
// Only on android tablets
if (vm.isTablet && vm.deviceOs == 'Android') {
var tmpHeight = $('ion-view[nav-view="active"] .jam-suggest-box-tablet').height();
if (tmpHeight > CONFIG.windowHeight) {
suggestScrollArea.height(CONFIG.windowHeight - 75);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 124);
}
} else {
if (vm.isTablet) {
suggestScrollArea.height(CONFIG.windowHeight - 124);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 75);
}
}
}, 100);
}
function controlInputFieldPosition(type) {
/* */
if(type == 'what') {
// GATRACKING
if (vm.showWhereInputField) {
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Was Wen Suchleiste geklickt');
}
$ionicNavBarDelegate.showBar(false);
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = true;
/// verstecke Wo-Input
vm.showWhereInputField = false;
vm.showWhereSuggest = false;
// zeige Abbrechen Button
vm.showWhatCancel = true;
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Suggest-Daten holen mit Eingabe
if(vm.searchInputTemp.what) {
vm.getSuggestData(type);
}
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
if(type == 'cancelWhat' || type == 'cancelWhere') {
if (CONFIG.deviceType == 'phone' || CONFIG.deviceOrientation != 'landscape') {
$ionicNavBarDelegate.showBar(true);
}
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
// Immer dann wenn die Eingabe sich verändert hat
// und auf Abbrechen geklickt,
// erste Eingabe zurück schreiben
if(type == 'cancelWhat') {
if(vm.searchInput.what != vm.searchInputTemp.what) {
vm.searchInput.what = vm.searchInputTemp.what;
}
}
if(type == 'cancelWhere') {
if(vm.searchInput.where != vm.searchInputTemp.where) {
vm.searchInput.where = vm.searchInputTemp.where;
}
}
}
if(type == 'all') {
$ionicNavBarDelegate.showBar(true);
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
}
}
function setSearchParamsObject(params) {
// Suche über Was / Wen
if (params.searchType == "what") {
vm.searchParamsObject.was = params.selectedItem;
vm.searchParamsObject.was_i = params.inputItem;
vm.searchParamsObject.was_sel = (typeof params.was_sel !== 'undefined' && params.was_sel) ? params.was_sel : 0;
if (typeof params.gruppe_fach_param != 'undefined') {
var params = params.gruppe_fach_param.split("&");
for (var i=0; i<params.length; i++) {
var pair = params[i].split("=");
vm.searchParamsObject[pair[0]] = pair[1];
}
vm.searchParamsObject.namen = '';
} else if (typeof params.selectedItem != 'undefined' && typeof params.deeplink == 'undefined') {
vm.searchParamsObject.namen = params.selectedItem;
vm.searchParamsObject.gruppe_fach_param = '';
} else if (typeof params.deeplink != 'undefined' && params.deeplink != '' && typeof params.selectedItem != 'undefined') {
var customerId = JamHelperFactory.strReplace('/profil/', '', params.deeplink);
customerId = JamHelperFactory.strReplace('?beta=1', '', customerId);
var customerIdClean = customerId.split("_")[0];
if (typeof params.multi !== 'undefined' && params.multi.length > 1) {
// mehrere Standorte, direkt auf die Auswahlseite leiten
$state.go('searchResultListSub', {
refId: customerIdClean,
isSearch: true
});
} else {
// direkt auf das Profil leiten, Beispiel: /profil/uebersicht/45897203100_2/
$state.go('profile', {
fullRefId: customerId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
// Personen-Suche aus dem Cache aufrufen
if (params.searchType == "personSearchFromCache") {
// Profil
if (typeof params.objFromCache.what_name_nice.refId !== 'undefined' && params.objFromCache.what_name_nice.refId != '') {
// direkt auf das Profil leiten
$state.go('profile', {
fullRefId: params.objFromCache.what_name_nice.refId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
function setChosenSuggestItem(params) {
// GATRACKING
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Namenssuggest geklickt');
/* */
setSearchParamsObject(params);
if(params.searchType == 'what') {
vm.searchInput.what = params.selectedItem;
}
if(params.searchType == 'where') {
vm.searchInput.where = params.selectedItem;
}
// zeige alles
controlInputFieldPosition('all');
}
function getSuggestData(type) {
/* */
// hole Daten für Suggest
SuggestFactory.getSuggestDataByName(vm.searchInput).then(
function(data) {
data = data.data;
setBestMatchSuggest(type, data.suggests, vm.searchInput);
vm.searchInput.results = data.suggests;
setScrollHeight();
}
);
// zeige Was-Suggest
if(type == 'what') {
vm.showWhatSuggest = true;
vm.showWhereSuggest = false;
}
// zeige Wo-Suggest
if(type == 'where') {
vm.showWhatSuggest = false;
vm.showWhereSuggest = true;
}
}
function clearSearch(type) {
/* */
if(type == 'cancelWhat') {
vm.searchInput.what = '';
}
if(type == 'cancelWhere') {
vm.searchInput.where = '';
}
}
function search() {
$state.go('searchResultList', vm.searchParamsObject);
}
activate();
$scope.$on('$ionicView.enter', function() {
activateViewToForeground();
});
function activate() {
// Aufruf aus dem Fachgebiete-Verzeichnis
if($stateParams.what) {
var params = {
searchType: 'what',
selectedItem: $stateParams.what,
gruppe_fach_param: $stateParams.gruppe_fach_param
};
vm.setChosenSuggestItem(params);
}
}
function activateViewToForeground() {
// Run code for when the view comes into foreground
// On initial launch this function fires prior to activate()
}
}
})(); | llHeight() {
| identifier_name |
searchByName.js | (function () {
'use strict';
angular
.module('app.searchByName')
.controller('SearchByName', SearchByName);
SearchByName.$inject = ['$scope', '$ionicConfig', '$ionicNavBarDelegate', 'SuggestFactory', '$state', '$stateParams', 'CONFIG', 'JamHelperFactory', 'AnalyticsHelper', 'LoaderFactory', 'OrientationchangeFactory', '$timeout', 'CacheFactory'];
function SearchByName($scope, $ionicConfig, $ionicNavBarDelegate, SuggestFactory, $state, $stateParams, CONFIG, JamHelperFactory, AnalyticsHelper, LoaderFactory, OrientationchangeFactory, $timeout, CacheFactory) {
/* jshint validthis: true */
/* ViewModel */
var vm = this;
// set back-button
$ionicConfig.backButton.text("Suche");
vm.searchInput = {
what: '',
where: '',
dist: 5,
results: []
};
vm.searchInputTemp = {
what: '',
where: ''
};
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
vm.showWhatCancel = false;
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
vm.showWhereCancel = false;
vm.controlInputFieldPosition = controlInputFieldPosition; // Steuert das Ein- und Ausblenden
vm.setChosenSuggestItem = setChosenSuggestItem;
vm.getSuggestData = getSuggestData;
vm.clearSearch = clearSearch;
vm.search = search;
vm.searchParamsObject = {};
// wenn iOs-App dann wird eine extra iOS-Css-Klasse benötigt im bar-header
vm.isIosApp = (CONFIG.environment == 'app' && CONFIG.deviceOs == 'iOS') ? true : false;
vm.urlPrefix = CONFIG.urlPrefix;
vm.isTablet = (CONFIG.deviceType == 'tablet') ? true : false;
vm.hideNavBar = (CONFIG.deviceOrientation == 'landscape' && vm.isTablet) ? true : false;
vm.removeFocusFromInput = removeFocusFromInput;
vm.checkKey= checkKey;
vm.bestMatchSuggest = {};
vm.clearTextInput = clearTextInput;
vm.lastSearchPersonSuggest = false;
////////////
$scope.$on('$ionicView.afterEnter', function() {
// set last search object
CacheFactory.getFromCache('searchCache').then(function(searchCache) {
vm.lastSearchPersonSuggest = CacheFactory.lastSearchPersonSuggest(searchCache);
}, function() {
// empty cache
vm.lastSearchPersonSuggest = false;
});
LoaderFactory.hideLoader();
// add event listener
OrientationchangeFactory.initListener();
// set back button
window.setTimeout(function() {
JamHelperFactory.setBackButton('burger');
}, CONFIG.backButtonDelay);
// GATRACKING
AnalyticsHelper.trackPageview('/namenssuche/');
// set canonical
JamHelperFactory.setCanonical('https://www.jameda.de/empfehlen/', true);
// reset back button cache
JamHelperFactory.setIntoCache('profileBackParams', {});
});
$scope.$on('$ionicView.beforeLeave', function() {
JamHelperFactory.resetBackButton();
});
function checkKey(type) {
// Keyboard wurde gedrückt (13 = Enter, 27 = Escape)
if (event.keyCode == 13 || event.keyCode == 27) {
hideKeyboard();
if (type == 'what') {
vm.setChosenSuggestItem({
searchType: vm.bestMatchSuggest.searchType,
inputItem: vm.bestMatchSuggest.was,
selectedItem: vm.bestMatchSuggest.was,
gruppe_fach_param: vm.bestMatchSuggest.gruppe_fach_param,
was_sel: vm.bestMatchSuggest.was_sel
});
}
}
}
function setBestMatchSuggest(type, data, input) {
vm.bestMatchSuggest = {};
if (type == 'what') {
if (data && typeof data[0] !== 'undefined') {
// Best Matches finden
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was_sel = 1;
if (data[0].header == 'Fachbereiche') {
vm.bestMatchSuggest.was = data[0].list[0].term;
vm.bestMatchSuggest.gruppe_fach_param = data[0].list[0].select;
} else if (data[0].header == 'Namen') {
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
}
}
if ((typeof data === 'undefined' || data == '') && input != '') {
vm.bestMatchSuggest.searchType = 'what';
vm.bestMatchSuggest.was = input.what;
vm.bestMatchSuggest.namen = input.what;
vm.bestMatchSuggest.was_sel = 1;
}
}
}
function removeFocusFromInput() {
hideKeyboard();
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
function hideKeyboard() {
ionic.DomUtil.blurAll();
}
function clearTextInput() {
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Eingabe löschen
vm.searchInput.what = "";
}
function setScrollHeight() {
$timeout(function () {
var suggestScrollArea = $('ion-nav-view[name="menuContent"] ion-view[nav-view="active"] #suggest-scroll-area');
// Only on android tablets
if (vm.isTablet && vm.deviceOs == 'Android') {
var tmpHeight = $('ion-view[nav-view="active"] .jam-suggest-box-tablet').height();
if (tmpHeight > CONFIG.windowHeight) {
suggestScrollArea.height(CONFIG.windowHeight - 75);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 124);
}
} else {
if (vm.isTablet) {
suggestScrollArea.height(CONFIG.windowHeight - 124);
} else {
suggestScrollArea.height(CONFIG.windowHeight - 75);
}
}
}, 100);
}
function controlInputFieldPosition(type) {
/* */
if(type == 'what') {
// GATRACKING
if (vm.showWhereInputField) {
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Was Wen Suchleiste geklickt');
}
$ionicNavBarDelegate.showBar(false);
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = true;
/// verstecke Wo-Input
vm.showWhereInputField = false;
vm.showWhereSuggest = false;
// zeige Abbrechen Button
vm.showWhatCancel = true;
// letzte Eingabe temporär zwischenspeichern
vm.searchInputTemp.what = vm.searchInput.what;
// Suggest-Daten holen mit Eingabe
if(vm.searchInputTemp.what) {
vm.getSuggestData(type);
}
// Höhe für scrollbaren Bereich für den Suggest setzen
setScrollHeight();
}
if(type == 'cancelWhat' || type == 'cancelWhere') {
if (CONFIG.deviceType == 'phone' || CONFIG.deviceOrientation != 'landscape') {
$ionicNavBarDelegate.showBar(true);
}
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
// Immer dann wenn die Eingabe sich verändert hat
// und auf Abbrechen geklickt,
// erste Eingabe zurück schreiben
if(type == 'cancelWhat') {
if(vm.searchInput.what != vm.searchInputTemp.what) {
vm.searchInput.what = vm.searchInputTemp.what;
}
}
if(type == 'cancelWhere') {
if(vm.searchInput.where != vm.searchInputTemp.where) {
vm.searchInput.where = vm.searchInputTemp.where;
}
}
}
if(type == 'all') {
$ionicNavBarDelegate.showBar(true);
// zeige Wo-Input
vm.showWhereInputField = true;
vm.showWhereSuggest = false;
// zeige Was-Input
vm.showWhatInputField = true;
vm.showWhatSuggest = false;
// verstecke Abbrechen Buttons
vm.showWhatCancel = false;
vm.showWhereCancel = false;
}
}
function setSearchParamsObject(params) {
// Suche über Was / Wen
if (params.searchType == "what") {
vm.searchParamsObject.was = params.selectedItem;
vm.searchParamsObject.was_i = params.inputItem;
vm.searchParamsObject.was_sel = (typeof params.was_sel !== 'undefined' && params.was_sel) ? params.was_sel : 0;
if (typeof params.gruppe_fach_param != 'undefined') {
var params = params.gruppe_fach_param.split("&");
for (var i=0; i<params.length; i++) {
var pair = params[i].split("=");
vm.searchParamsObject[pair[0]] = pair[1];
}
vm.searchParamsObject.namen = '';
} else if (typeof params.selectedItem != 'undefined' && typeof params.deeplink == 'undefined') {
vm.searchParamsObject.namen = params.selectedItem;
vm.searchParamsObject.gruppe_fach_param = '';
} else if (typeof params.deeplink != 'undefined' && params.deeplink != '' && typeof params.selectedItem != 'undefined') {
var customerId = JamHelperFactory.strReplace('/profil/', '', params.deeplink);
customerId = JamHelperFactory.strReplace('?beta=1', '', customerId);
var customerIdClean = customerId.split("_")[0];
if (typeof params.multi !== 'undefined' && params.multi.length > 1) {
// mehrere Standorte, direkt auf die Auswahlseite leiten
$state.go('searchResultListSub', {
refId: customerIdClean,
isSearch: true
});
} else {
// direkt auf das Profil leiten, Beispiel: /profil/uebersicht/45897203100_2/
$state.go('profile', {
fullRefId: customerId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
// Personen-Suche aus dem Cache aufrufen
if (params.searchType == "personSearchFromCache") {
// Profil
if (typeof params.objFromCache.what_name_nice.refId !== 'undefined' && params.objFromCache.what_name_nice.refId != '') {
// direkt auf das Profil leiten
$state.go('profile', {
fullRefId: params.objFromCache.what_name_nice.refId,
path: 'profil',
backLinkType: 'deeplink',
isSearch: true
});
}
}
}
function setChosenSuggestItem(params) {
// GATRACKING
AnalyticsHelper.trackEvent('Namenssuche - Klicks', 'Namenssuggest geklickt');
/* */
setSearchParamsObject(params);
if(params.searchType == 'what') {
vm.searchInput.what = params.selectedItem;
}
if(params.searchType == 'where') {
vm.searchInput.where = params.selectedItem;
}
// zeige alles
controlInputFieldPosition('all');
}
function getSuggestData(type) {
/* */
// hole Daten für Suggest
SuggestFactory.getSuggestDataByName(vm.searchInput).then(
function(data) {
data = data.data;
setBestMatchSuggest(type, data.suggests, vm.searchInput);
vm.searchInput.results = data.suggests;
setScrollHeight();
}
);
// zeige Was-Suggest
if(type == 'what') {
vm.showWhatSuggest = true;
vm.showWhereSuggest = false;
}
// zeige Wo-Suggest
if(type == 'where') {
vm.showWhatSuggest = false;
vm.showWhereSuggest = true;
}
}
function clearSearch(type) {
/* */
if(type == 'cancelWhat') {
vm.searchInput.what = '';
}
if(type == 'cancelWhere') {
vm.searchInput.where = '';
}
}
function search() {
$state.go | );
$scope.$on('$ionicView.enter', function() {
activateViewToForeground();
});
function activate() {
// Aufruf aus dem Fachgebiete-Verzeichnis
if($stateParams.what) {
var params = {
searchType: 'what',
selectedItem: $stateParams.what,
gruppe_fach_param: $stateParams.gruppe_fach_param
};
vm.setChosenSuggestItem(params);
}
}
function activateViewToForeground() {
// Run code for when the view comes into foreground
// On initial launch this function fires prior to activate()
}
}
})(); | ('searchResultList', vm.searchParamsObject);
}
activate( | identifier_body |
time.rs | //!Constants and structures from time classes
//!
//! This includes include/uapi/linux/time.h, //include/linux/time.h, and /include/linux/time64.h
///A structure that contains the number of seconds and nanoseconds since an epoch.
///
///If in doubt, assume we're talking about the UNIX epoch.
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct timespec {
///The number of seconds contained in this timespec
pub tv_sec: ::time_t,
///The number of nanoseconds contained in this timespec
pub tv_nsec: ::c_long
}
impl timespec {
///Creates a new timespec with both values defaulting to zero
pub fn new() -> timespec {
timespec { tv_sec: 0, tv_nsec: 0 }
}
///Creates a new timespec from the specified number of seconds
pub fn from_seconds(seconds: i64) -> timespec {
timespec { tv_sec: seconds, tv_nsec: 0 }
}
///Gets a representation of this timespec as a number of milliseconds
pub fn to_milliseconds(&self) -> i64 {
(self.tv_sec as i64 * MSEC_PER_SEC) + (self.tv_nsec as i64 / NSEC_PER_MSEC)
}
///Gets a representation of this timespec as a number of seconds
pub fn to_seconds(&self) -> i64 {
self.to_milliseconds() / MSEC_PER_SEC
}
///Clears this timespec, setting each value to zero
pub fn clear(&mut self) {
self.tv_sec = 0;
self.tv_nsec = 0;
}
}
///A structure that contains the number of seconds and microseconds since an epoch.
///
///If in doubt, assume we're talking about the UNIX epoch.
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct timeval {
///The number of seconds contained in this timeval
pub tv_sec: ::time_t,
///The number of microseconds contained in this timeval
pub tv_usec: ::suseconds_t
}
impl timeval {
///Creates a new timeval with both values defaulting to zero
pub fn new() -> timeval {
timeval { tv_sec: 0, tv_usec: 0 }
}
///Creates a new timeval from the specified number of seconds
pub fn from_seconds(seconds: ::time_t) -> timeval {
timeval { tv_sec: seconds, tv_usec: 0 }
}
///Gets a representation of this timeval as a number of milliseconds
pub fn to_milliseconds(&self) -> i64 {
(self.tv_sec as i64 * MSEC_PER_SEC) + (self.tv_usec as i64 / USEC_PER_MSEC)
}
///Gets a representation of this timeval as a number of seconds
pub fn to_seconds(&self) -> i64 {
self.to_milliseconds() / MSEC_PER_SEC
}
///Clears this timeval, setting each value to zero
pub fn clear(&mut self) {
self.tv_sec = 0;
self.tv_usec = 0;
}
}
///A structure containing information on the time-based location of a timezone
///
///Please note that this does not include the name or country code, only the minutes west of Greenwich and the type of DST correction
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct timezone {
///The number of minutes west of Greenwich
pub tz_minuteswest: ::c_int,
///The type of Daylight Savings Time correction
pub tz_dsttime: ::c_int
}
//Names of the interval timers
///An interval timer that decrements in real time
///
///On expiration, a SIGALRM is delivered
pub const ITIMER_REAL: ::c_int = 0;
///An interval timer that decrements only when the process is executing.
///
///On expiration, a SIGVTALRM is delivered
pub const ITIMER_VIRTUAL: ::c_int = 1;
///Decrements both while the process is executing and while the system is executing on behalf of the process
///
///This is usually used to profile kernel-space and user-space concurrently.
///
///If coupled with ITIMER_VIRTUAL, you can separate the two values - What is left when ITIMER_VIRTUAL's value is removed is kernel time
pub const ITIMER_PROF: ::c_int = 2;
///An interval timer based on a `timespec`
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct itimerspec {
///The period of time this timer should run for (Need to verify)
pub it_interval: timespec,
///The amount of time left until expiration (Need to verify)
pub it_value: timespec
}
///An interval timer based on a `timeval`
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct itimerval { |
///The amount of time left until expiration (Need to verify)
pub it_value: timeval
}
///A system-wide clock that measures time from the "real world"
///
///This clock **is** affected by discontinuous jumps in system time, NTP, and user changes
pub const CLOCK_REALTIME: ::clockid_t = 0;
///A clock that measures monotonic time since an unspecified starting point
///
///Unless you manage to break your system, this unspecified point is usually when your computer powers on.
///
///This is not affected by user changes, but is by `adjtime` and NTP.
pub const CLOCK_MONOTONIC: ::clockid_t = 1;
///A high-resolution per-process timer from the processor.
pub const CLOCK_PROCESS_CPUTIME_ID: ::clockid_t = 2;
///A (high-resolution?) thread-specific timer from the processor
pub const CLOCK_THREAD_CPUTIME_ID: ::clockid_t = 3;
///A hardware-based version of `CLOCK_MONOTONIC` that is not subject to changes
pub const CLOCK_MONOTONIC_RAW: ::clockid_t = 4;
///A faster but less precise version of `CLOCK_REALTIME`, measuring time in the "real world"
pub const CLOCK_REALTIME_COARSE: ::clockid_t = 5;
///A faster but less precise version of `CLOCK_MONOTONIC`, measuring time since an unspecified starting point
pub const CLOCK_MONOTONIC_COARSE: ::clockid_t = 6;
///Identical to `CLOCK_MONOTONIC`, but includes any time that the system is suspended.
pub const CLOCK_BOOTIME: ::clockid_t = 7;
///Identical to `CLOCK_REALTIME`, but timers exposed via this will wake the system if suspended
pub const CLOCK_REALTIME_ALARM: ::clockid_t = 8;
///Identical to `CLOCK_BOOTIME`, but timers exposed via this will wake the system if suspended
pub const CLOCK_BOOTTIME_ALARM: ::clockid_t = 9;
///A clock used for SGI systems. Need to investigate
pub const CLOCK_SGI_CYCLE: ::clockid_t = 10;
///A clock that shows International Atomic Time
pub const CLOCK_TAI: ::clockid_t = 11;
///The maximum clock ID that the system is allowed to have
pub const MAX_CLOCKS: ::clockid_t = 16; //Resolves to c_int. Please let me know if this should be c_int on it's own
///A mask for supported clocks
///
///Needs to be investigated
pub const CLOCKS_MASK: ::clockid_t = CLOCK_REALTIME | CLOCK_MONOTONIC;
///A shorthand variant of CLOCK_MONOTONIC.
///
///This isn't used in the kernel. Is it left over from an old change that was reverted?
pub const CLOCKS_MONO: ::clockid_t = CLOCK_MONOTONIC;
///A flag indicating time is absolute
pub const TIMER_ABSTIME: ::c_int = 0x01;
///The type used for 64-bit time
pub type time64_t = i64;
///The number of milliseconds in a second
pub const MSEC_PER_SEC: ::c_long = 1000;
///The number of microseconds in a millisecond
pub const USEC_PER_MSEC: ::c_long = 1000;
///The number of nanoseconds in a microsecond
pub const NSEC_PER_USEC: ::c_long = 1000;
///The number of nanoseconds in a millisecond
pub const NSEC_PER_MSEC: ::c_long = 1000000;
///The number of microseconds in a second
pub const USEC_PER_SEC: ::c_long = 1000000;
///The number of nanoseconds in a second
pub const NSEC_PER_SEC: ::c_long = 1000000000;
///The number of femtoseconds in a second
pub const FSEC_PER_SEC: ::c_longlong = 1000000000000000;
#[cfg(any(target_arch = "x86",
target_arch = "le32",
target_arch = "powerpc",
target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel"))]
pub const TIME_T_MAX: ::time_t = 0b01111111111111111111111111111111;
#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64"))]
pub const TIME_T_MAX: ::time_t = 0b0111111111111111111111111111111111111111111111111111111111111111;
///The maximum value of a time64_t
pub const TIME64_MAX: ::c_longlong = 0b0111111111111111111111111111111111111111111111111111111111111111;
///The maximum value of a ktime_t
pub const KTIME_MAX: ::c_longlong = 9_223_372_036_854_775_807;
///The maximum number of seconds in a ktime_t
pub const KTIME_SEC_MAX: ::c_longlong = 9_223_372_036;
#[cfg(test)]
mod tests {
use super::*;
#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64"))]
#[test]
fn test_time_t_max_64() {
assert_eq!(9223372036854775807, TIME64_MAX);
}
#[cfg(any(target_arch = "x86",
target_arch = "le32",
target_arch = "powerpc",
target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel"))]
#[test]
fn test_time_t_max_32() {
assert_eq!(2147483647, TIME64_MAX);
}
#[test]
fn test_time64_max() {
assert_eq!(9223372036854775807, TIME64_MAX);
}
#[test]
fn test_timeval_to_msec_sec() {
let mut val = ::timeval::from_seconds(4);
val.tv_usec += USEC_PER_SEC / 2;
assert_eq!(4500, val.to_milliseconds());
assert_eq!(4, val.to_seconds());
}
#[test]
fn test_timespec_to_msec_sec() {
let mut spec = ::timespec::from_seconds(4);
spec.tv_nsec += NSEC_PER_SEC / 2;
assert_eq!(4500, spec.to_milliseconds());
assert_eq!(4, spec.to_seconds());
}
#[test]
fn test_per_sec_accuracy() {
assert_eq!(NSEC_PER_MSEC, NSEC_PER_USEC * USEC_PER_MSEC);
assert_eq!(NSEC_PER_SEC, NSEC_PER_MSEC * MSEC_PER_SEC);
}
#[test]
fn test_timeval_utility_functions() {
let mut val: timeval = timeval::new();
assert_eq!(0, val.tv_sec);
val = timeval::from_seconds(100);
assert_eq!(100, val.tv_sec);
val.clear();
assert_eq!(0, val.tv_sec);
}
#[test]
fn test_timespec_utility_functions() {
let mut spec: timespec = timespec::new();
assert_eq!(0, spec.tv_sec);
spec = timespec::from_seconds(164);
assert_eq!(164, spec.tv_sec);
spec.clear();
assert_eq!(0, spec.tv_sec);
}
} | ///The period of time this timer should run for (Need to verify)
pub it_interval: timeval, | random_line_split |
time.rs | //!Constants and structures from time classes
//!
//! This includes include/uapi/linux/time.h, //include/linux/time.h, and /include/linux/time64.h
///A structure that contains the number of seconds and nanoseconds since an epoch.
///
///If in doubt, assume we're talking about the UNIX epoch.
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct | {
///The number of seconds contained in this timespec
pub tv_sec: ::time_t,
///The number of nanoseconds contained in this timespec
pub tv_nsec: ::c_long
}
impl timespec {
///Creates a new timespec with both values defaulting to zero
pub fn new() -> timespec {
timespec { tv_sec: 0, tv_nsec: 0 }
}
///Creates a new timespec from the specified number of seconds
pub fn from_seconds(seconds: i64) -> timespec {
timespec { tv_sec: seconds, tv_nsec: 0 }
}
///Gets a representation of this timespec as a number of milliseconds
pub fn to_milliseconds(&self) -> i64 {
(self.tv_sec as i64 * MSEC_PER_SEC) + (self.tv_nsec as i64 / NSEC_PER_MSEC)
}
///Gets a representation of this timespec as a number of seconds
pub fn to_seconds(&self) -> i64 {
self.to_milliseconds() / MSEC_PER_SEC
}
///Clears this timespec, setting each value to zero
pub fn clear(&mut self) {
self.tv_sec = 0;
self.tv_nsec = 0;
}
}
///A structure that contains the number of seconds and microseconds since an epoch.
///
///If in doubt, assume we're talking about the UNIX epoch.
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct timeval {
///The number of seconds contained in this timeval
pub tv_sec: ::time_t,
///The number of microseconds contained in this timeval
pub tv_usec: ::suseconds_t
}
impl timeval {
///Creates a new timeval with both values defaulting to zero
pub fn new() -> timeval {
timeval { tv_sec: 0, tv_usec: 0 }
}
///Creates a new timeval from the specified number of seconds
pub fn from_seconds(seconds: ::time_t) -> timeval {
timeval { tv_sec: seconds, tv_usec: 0 }
}
///Gets a representation of this timeval as a number of milliseconds
pub fn to_milliseconds(&self) -> i64 {
(self.tv_sec as i64 * MSEC_PER_SEC) + (self.tv_usec as i64 / USEC_PER_MSEC)
}
///Gets a representation of this timeval as a number of seconds
pub fn to_seconds(&self) -> i64 {
self.to_milliseconds() / MSEC_PER_SEC
}
///Clears this timeval, setting each value to zero
pub fn clear(&mut self) {
self.tv_sec = 0;
self.tv_usec = 0;
}
}
///A structure containing information on the time-based location of a timezone
///
///Please note that this does not include the name or country code, only the minutes west of Greenwich and the type of DST correction
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct timezone {
///The number of minutes west of Greenwich
pub tz_minuteswest: ::c_int,
///The type of Daylight Savings Time correction
pub tz_dsttime: ::c_int
}
//Names of the interval timers
///An interval timer that decrements in real time
///
///On expiration, a SIGALRM is delivered
pub const ITIMER_REAL: ::c_int = 0;
///An interval timer that decrements only when the process is executing.
///
///On expiration, a SIGVTALRM is delivered
pub const ITIMER_VIRTUAL: ::c_int = 1;
///Decrements both while the process is executing and while the system is executing on behalf of the process
///
///This is usually used to profile kernel-space and user-space concurrently.
///
///If coupled with ITIMER_VIRTUAL, you can separate the two values - What is left when ITIMER_VIRTUAL's value is removed is kernel time
pub const ITIMER_PROF: ::c_int = 2;
///An interval timer based on a `timespec`
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct itimerspec {
///The period of time this timer should run for (Need to verify)
pub it_interval: timespec,
///The amount of time left until expiration (Need to verify)
pub it_value: timespec
}
///An interval timer based on a `timeval`
#[repr(C)]
#[derive(Debug, Clone, Copy)]
pub struct itimerval {
///The period of time this timer should run for (Need to verify)
pub it_interval: timeval,
///The amount of time left until expiration (Need to verify)
pub it_value: timeval
}
///A system-wide clock that measures time from the "real world"
///
///This clock **is** affected by discontinuous jumps in system time, NTP, and user changes
pub const CLOCK_REALTIME: ::clockid_t = 0;
///A clock that measures monotonic time since an unspecified starting point
///
///Unless you manage to break your system, this unspecified point is usually when your computer powers on.
///
///This is not affected by user changes, but is by `adjtime` and NTP.
pub const CLOCK_MONOTONIC: ::clockid_t = 1;
///A high-resolution per-process timer from the processor.
pub const CLOCK_PROCESS_CPUTIME_ID: ::clockid_t = 2;
///A (high-resolution?) thread-specific timer from the processor
pub const CLOCK_THREAD_CPUTIME_ID: ::clockid_t = 3;
///A hardware-based version of `CLOCK_MONOTONIC` that is not subject to changes
pub const CLOCK_MONOTONIC_RAW: ::clockid_t = 4;
///A faster but less precise version of `CLOCK_REALTIME`, measuring time in the "real world"
pub const CLOCK_REALTIME_COARSE: ::clockid_t = 5;
///A faster but less precise version of `CLOCK_MONOTONIC`, measuring time since an unspecified starting point
pub const CLOCK_MONOTONIC_COARSE: ::clockid_t = 6;
///Identical to `CLOCK_MONOTONIC`, but includes any time that the system is suspended.
pub const CLOCK_BOOTIME: ::clockid_t = 7;
///Identical to `CLOCK_REALTIME`, but timers exposed via this will wake the system if suspended
pub const CLOCK_REALTIME_ALARM: ::clockid_t = 8;
///Identical to `CLOCK_BOOTIME`, but timers exposed via this will wake the system if suspended
pub const CLOCK_BOOTTIME_ALARM: ::clockid_t = 9;
///A clock used for SGI systems. Need to investigate
pub const CLOCK_SGI_CYCLE: ::clockid_t = 10;
///A clock that shows International Atomic Time
pub const CLOCK_TAI: ::clockid_t = 11;
///The maximum clock ID that the system is allowed to have
pub const MAX_CLOCKS: ::clockid_t = 16; //Resolves to c_int. Please let me know if this should be c_int on it's own
///A mask for supported clocks
///
///Needs to be investigated
pub const CLOCKS_MASK: ::clockid_t = CLOCK_REALTIME | CLOCK_MONOTONIC;
///A shorthand variant of CLOCK_MONOTONIC.
///
///This isn't used in the kernel. Is it left over from an old change that was reverted?
pub const CLOCKS_MONO: ::clockid_t = CLOCK_MONOTONIC;
///A flag indicating time is absolute
pub const TIMER_ABSTIME: ::c_int = 0x01;
///The type used for 64-bit time
pub type time64_t = i64;
///The number of milliseconds in a second
pub const MSEC_PER_SEC: ::c_long = 1000;
///The number of microseconds in a millisecond
pub const USEC_PER_MSEC: ::c_long = 1000;
///The number of nanoseconds in a microsecond
pub const NSEC_PER_USEC: ::c_long = 1000;
///The number of nanoseconds in a millisecond
pub const NSEC_PER_MSEC: ::c_long = 1000000;
///The number of microseconds in a second
pub const USEC_PER_SEC: ::c_long = 1000000;
///The number of nanoseconds in a second
pub const NSEC_PER_SEC: ::c_long = 1000000000;
///The number of femtoseconds in a second
pub const FSEC_PER_SEC: ::c_longlong = 1000000000000000;
#[cfg(any(target_arch = "x86",
target_arch = "le32",
target_arch = "powerpc",
target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel"))]
pub const TIME_T_MAX: ::time_t = 0b01111111111111111111111111111111;
#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64"))]
pub const TIME_T_MAX: ::time_t = 0b0111111111111111111111111111111111111111111111111111111111111111;
///The maximum value of a time64_t
pub const TIME64_MAX: ::c_longlong = 0b0111111111111111111111111111111111111111111111111111111111111111;
///The maximum value of a ktime_t
pub const KTIME_MAX: ::c_longlong = 9_223_372_036_854_775_807;
///The maximum number of seconds in a ktime_t
pub const KTIME_SEC_MAX: ::c_longlong = 9_223_372_036;
#[cfg(test)]
mod tests {
use super::*;
#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64"))]
#[test]
fn test_time_t_max_64() {
assert_eq!(9223372036854775807, TIME64_MAX);
}
#[cfg(any(target_arch = "x86",
target_arch = "le32",
target_arch = "powerpc",
target_arch = "arm",
target_arch = "mips",
target_arch = "mipsel"))]
#[test]
fn test_time_t_max_32() {
assert_eq!(2147483647, TIME64_MAX);
}
#[test]
fn test_time64_max() {
assert_eq!(9223372036854775807, TIME64_MAX);
}
#[test]
fn test_timeval_to_msec_sec() {
let mut val = ::timeval::from_seconds(4);
val.tv_usec += USEC_PER_SEC / 2;
assert_eq!(4500, val.to_milliseconds());
assert_eq!(4, val.to_seconds());
}
#[test]
fn test_timespec_to_msec_sec() {
let mut spec = ::timespec::from_seconds(4);
spec.tv_nsec += NSEC_PER_SEC / 2;
assert_eq!(4500, spec.to_milliseconds());
assert_eq!(4, spec.to_seconds());
}
#[test]
fn test_per_sec_accuracy() {
assert_eq!(NSEC_PER_MSEC, NSEC_PER_USEC * USEC_PER_MSEC);
assert_eq!(NSEC_PER_SEC, NSEC_PER_MSEC * MSEC_PER_SEC);
}
#[test]
fn test_timeval_utility_functions() {
let mut val: timeval = timeval::new();
assert_eq!(0, val.tv_sec);
val = timeval::from_seconds(100);
assert_eq!(100, val.tv_sec);
val.clear();
assert_eq!(0, val.tv_sec);
}
#[test]
fn test_timespec_utility_functions() {
let mut spec: timespec = timespec::new();
assert_eq!(0, spec.tv_sec);
spec = timespec::from_seconds(164);
assert_eq!(164, spec.tv_sec);
spec.clear();
assert_eq!(0, spec.tv_sec);
}
} | timespec | identifier_name |
format_wav_scp.py | #!/usr/bin/env python3
import argparse
import logging
from io import BytesIO
from pathlib import Path
from typing import Optional, Tuple
import humanfriendly
import kaldiio
import numpy as np
import resampy
import soundfile
from tqdm import tqdm
from typeguard import check_argument_types
from espnet2.fileio.read_text import read_2columns_text
from espnet2.fileio.sound_scp import SoundScpWriter, soundfile_read
from espnet2.fileio.vad_scp import VADScpReader
from espnet2.utils.types import str2bool
from espnet.utils.cli_utils import get_commandline_args
def humanfriendly_or_none(value: str):
if value in ("none", "None", "NONE"):
return None
return humanfriendly.parse_size(value)
def str2int_tuple(integers: str) -> Optional[Tuple[int, ...]]:
"""
>>> str2int_tuple('3,4,5')
(3, 4, 5)
"""
assert check_argument_types()
if integers.strip() in ("none", "None", "NONE", "null", "Null", "NULL"):
return None
return tuple(map(int, integers.strip().split(",")))
def vad_trim(vad_reader: VADScpReader, uttid: str, wav: np.array, fs: int) -> np.array:
# Conduct trim wtih vad information
assert check_argument_types()
assert uttid in vad_reader, uttid
vad_info = vad_reader[uttid]
total_length = sum(int((time[1] - time[0]) * fs) for time in vad_info)
new_wav = np.zeros((total_length,), dtype=wav.dtype)
start_frame = 0
for time in vad_info:
# Note: we regard vad as [xxx, yyy)
duration = int((time[1] - time[0]) * fs)
orig_start_frame = int(time[0] * fs)
orig_end_frame = orig_start_frame + duration
end_frame = start_frame + duration
new_wav[start_frame:end_frame] = wav[orig_start_frame:orig_end_frame]
start_frame = end_frame
return new_wav
class SegmentsExtractor:
"""Emulating kaldi extract-segments.cc
Args:
segments (str): The file format is
"<segment-id> <recording-id> <start-time> <end-time>\n"
"e.g. call-861225-A-0050-0065 call-861225-A 5.0 6.5\n"
"""
def __init__(self, fname: str, segments: str = None, multi_columns: bool = False):
assert check_argument_types()
self.wav_scp = fname
self.multi_columns = multi_columns
self.wav_dict = {}
with open(self.wav_scp, "r") as f:
for line in f:
recodeid, wavpath = line.strip().split(None, 1)
if recodeid in self.wav_dict:
raise RuntimeError(f"{recodeid} is duplicated")
self.wav_dict[recodeid] = wavpath
self.segments = segments
self.segments_dict = {}
with open(self.segments, "r") as f:
for line in f:
sps = line.rstrip().split(None)
if len(sps) != 4:
raise RuntimeError("Format is invalid: {}".format(line))
uttid, recodeid, st, et = sps
self.segments_dict[uttid] = (recodeid, float(st), float(et))
if recodeid not in self.wav_dict:
raise RuntimeError(
'Not found "{}" in {}'.format(recodeid, self.wav_scp)
)
def generator(self):
recodeid_counter = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
recodeid_counter[recodeid] = recodeid_counter.get(recodeid, 0) + 1
cached = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
wavpath = self.wav_dict[recodeid]
if recodeid not in cached:
if wavpath.endswith("|"):
if self.multi_columns:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
array, rate = soundfile.read(g)
else:
if self.multi_columns:
array, rate = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
)
else:
array, rate = soundfile.read(wavpath)
cached[recodeid] = array, rate
array, rate = cached[recodeid]
# Keep array until the last query
recodeid_counter[recodeid] -= 1
if recodeid_counter[recodeid] == 0:
cached.pop(recodeid)
# Convert starting time of the segment to corresponding sample number.
# If end time is -1 then use the whole file starting from start time.
if et != -1:
array = array[int(st * rate) : int(et * rate)]
else:
array = array[int(st * rate) :]
yield utt, (array, rate), None, None
def main():
logfmt = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
logging.basicConfig(level=logging.INFO, format=logfmt)
logging.info(get_commandline_args())
parser = argparse.ArgumentParser(
description='Create waves list from "wav.scp"',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("scp")
parser.add_argument("outdir")
parser.add_argument(
"--name",
default="wav",
help='Specify the prefix word of output file name such as "wav.scp"',
)
parser.add_argument("--segments", default=None)
parser.add_argument(
"--fs",
type=humanfriendly_or_none,
default=None,
help="If the sampling rate specified, Change the sampling rate.",
)
parser.add_argument("--audio-format", default="wav")
parser.add_argument("--vad_based_trim", type=str, default=None)
group = parser.add_mutually_exclusive_group()
group.add_argument("--ref-channels", default=None, type=str2int_tuple)
group.add_argument("--utt2ref-channels", default=None, type=str)
group.add_argument(
"--audio-subtype",
default=None,
type=str,
help=(
"Give a interpretable subtype by soundfile e.g. PCM_16. "
"You can check all available types by soundfile.available_subtypes()"
),
)
parser.add_argument(
"--multi-columns-input",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for input wav.scp. "
"e.g. 'ID a.wav b.wav c.wav' is interpreted as 3ch audio data"
),
)
parser.add_argument(
"--multi-columns-output",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for output wav.scp. "
"e.g. If input audio data has 2ch, "
"each line in wav.scp has the the format like "
"'ID ID-CH0.wav ID-CH1.wav'"
),
)
args = parser.parse_args()
out_num_samples = Path(args.outdir) / "utt2num_samples"
if args.ref_channels is not None:
def utt2ref_channels(x) -> Tuple[int, ...]:
return args.ref_channels
elif args.utt2ref_channels is not None:
utt2ref_channels_dict = read_2columns_text(args.utt2ref_channels)
def utt2ref_channels(x, d=utt2ref_channels_dict) -> Tuple[int, ...]:
chs_str = d[x]
return tuple(map(int, chs_str.split()))
else:
utt2ref_channels = None
if args.audio_format.endswith("ark") and args.multi_columns_output:
raise RuntimeError("Multi columns wav.scp is not supported for ark type")
Path(args.outdir).mkdir(parents=True, exist_ok=True)
out_wavscp = Path(args.outdir) / f"{args.name}.scp" | fark = open(Path(args.outdir) / f"data_{args.name}.ark", "wb")
fscp_out = out_wavscp.open("w")
writer = None
else:
writer = SoundScpWriter(
args.outdir,
out_wavscp,
format=args.audio_format,
multi_columns=args.multi_columns_output,
subtype=args.audio_subtype,
)
fscp_out = None
if args.vad_based_trim is not None:
vad_reader = VADScpReader(args.vad_based_trim)
if args.segments is not None:
extractor = SegmentsExtractor(
args.scp, segments=args.segments, multi_columns=args.multi_columns_input
)
generator = extractor.generator
else:
def generator():
with Path(args.scp).open("r") as fscp:
for line in tqdm(fscp):
uttid, wavpath = line.strip().split(None, 1)
# B.a. Without segments and using pipe inputs
if wavpath.endswith("|"):
if args.multi_columns_input:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by"
" pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
wave, rate = soundfile.read(g)
subtypes = None
# B.b Without segments and not using pipe
else:
if args.multi_columns_input:
wave, rate, subtypes = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
return_subtype=True,
)
else:
with soundfile.SoundFile(wavpath) as sf:
rate = sf.samplerate
subtypes = [sf.subtype]
wave = sf.read()
yield uttid, (wave, rate), wavpath, subtypes
with out_num_samples.open("w") as fnum_samples:
for uttid, (wave, rate), wavpath, subtypes in tqdm(generator()):
save_asis = True
if args.fs is not None and args.fs != rate:
# FIXME(kamo): To use sox?
wave = resampy.resample(wave, rate, args.fs, axis=0)
rate = args.fs
save_asis = False
if args.vad_based_trim is not None:
wave = vad_trim(vad_reader, uttid, wave, rate)
save_asis = False
if wave.ndim == 2 and utt2ref_channels is not None:
wave = wave[:, utt2ref_channels(uttid)]
save_asis = False
if args.segments is not None:
save_asis = False
if args.audio_format.endswith("ark"):
save_asis = False
if args.multi_columns_input:
if args.multi_columns_output:
if wavpath is not None:
for _wavpath in wavpath.split():
if Path(_wavpath).suffix != "." + args.audio_format:
save_asis = False
break
if wave.ndim == 1:
_num_ch = 1
else:
_num_ch = wave.shape[1]
if len(wavpath.split()) != _num_ch:
save_asis = False
else:
if wavpath is not None and len(wavpath.split()) > 1:
save_asis = False
elif args.multi_columns_output:
if wave.ndim == 2 and wave.shape[1] > 1:
save_asis = False
if wavpath is not None and wavpath.endswith("|"):
save_asis = False
if wavpath is not None and Path(wavpath).suffix != "." + args.audio_format:
save_asis = False
if not args.audio_format.endswith("ark") and subtypes is not None:
if args.audio_subtype is None:
subtype2 = soundfile.default_subtype(args.audio_format)
else:
subtype2 = args.audio_subtype
for subtype in subtypes:
if subtype != subtype2:
save_asis = False
break
if save_asis:
writer.fscp.write(f"{uttid} {wavpath}\n")
elif args.audio_format.endswith("ark"):
for name in soundfile.available_formats():
if name.lower() in args.audio_format.lower():
suf = name.lower()
break
else:
raise RuntimeError(f"{args.audio_format} is not supported.")
# NOTE(kamo): Using extended ark format style here.
# This format is incompatible with Kaldi
kaldiio.save_ark(
fark,
{uttid: (wave, rate)},
scp=fscp_out,
append=True,
write_function="soundfile",
write_kwargs={"format": suf, "subtype": args.audio_subtype},
)
else:
writer[uttid] = rate, wave
fnum_samples.write(f"{uttid} {len(wave)}\n")
if __name__ == "__main__":
main() |
if args.audio_format.endswith("ark"): | random_line_split |
format_wav_scp.py | #!/usr/bin/env python3
import argparse
import logging
from io import BytesIO
from pathlib import Path
from typing import Optional, Tuple
import humanfriendly
import kaldiio
import numpy as np
import resampy
import soundfile
from tqdm import tqdm
from typeguard import check_argument_types
from espnet2.fileio.read_text import read_2columns_text
from espnet2.fileio.sound_scp import SoundScpWriter, soundfile_read
from espnet2.fileio.vad_scp import VADScpReader
from espnet2.utils.types import str2bool
from espnet.utils.cli_utils import get_commandline_args
def humanfriendly_or_none(value: str):
if value in ("none", "None", "NONE"):
return None
return humanfriendly.parse_size(value)
def str2int_tuple(integers: str) -> Optional[Tuple[int, ...]]:
"""
>>> str2int_tuple('3,4,5')
(3, 4, 5)
"""
assert check_argument_types()
if integers.strip() in ("none", "None", "NONE", "null", "Null", "NULL"):
return None
return tuple(map(int, integers.strip().split(",")))
def vad_trim(vad_reader: VADScpReader, uttid: str, wav: np.array, fs: int) -> np.array:
# Conduct trim wtih vad information
assert check_argument_types()
assert uttid in vad_reader, uttid
vad_info = vad_reader[uttid]
total_length = sum(int((time[1] - time[0]) * fs) for time in vad_info)
new_wav = np.zeros((total_length,), dtype=wav.dtype)
start_frame = 0
for time in vad_info:
# Note: we regard vad as [xxx, yyy)
duration = int((time[1] - time[0]) * fs)
orig_start_frame = int(time[0] * fs)
orig_end_frame = orig_start_frame + duration
end_frame = start_frame + duration
new_wav[start_frame:end_frame] = wav[orig_start_frame:orig_end_frame]
start_frame = end_frame
return new_wav
class SegmentsExtractor:
"""Emulating kaldi extract-segments.cc
Args:
segments (str): The file format is
"<segment-id> <recording-id> <start-time> <end-time>\n"
"e.g. call-861225-A-0050-0065 call-861225-A 5.0 6.5\n"
"""
def __init__(self, fname: str, segments: str = None, multi_columns: bool = False):
assert check_argument_types()
self.wav_scp = fname
self.multi_columns = multi_columns
self.wav_dict = {}
with open(self.wav_scp, "r") as f:
for line in f:
recodeid, wavpath = line.strip().split(None, 1)
if recodeid in self.wav_dict:
raise RuntimeError(f"{recodeid} is duplicated")
self.wav_dict[recodeid] = wavpath
self.segments = segments
self.segments_dict = {}
with open(self.segments, "r") as f:
for line in f:
sps = line.rstrip().split(None)
if len(sps) != 4:
raise RuntimeError("Format is invalid: {}".format(line))
uttid, recodeid, st, et = sps
self.segments_dict[uttid] = (recodeid, float(st), float(et))
if recodeid not in self.wav_dict:
raise RuntimeError(
'Not found "{}" in {}'.format(recodeid, self.wav_scp)
)
def generator(self):
recodeid_counter = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
recodeid_counter[recodeid] = recodeid_counter.get(recodeid, 0) + 1
cached = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
wavpath = self.wav_dict[recodeid]
if recodeid not in cached:
if wavpath.endswith("|"):
if self.multi_columns:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
array, rate = soundfile.read(g)
else:
if self.multi_columns:
array, rate = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
)
else:
array, rate = soundfile.read(wavpath)
cached[recodeid] = array, rate
array, rate = cached[recodeid]
# Keep array until the last query
recodeid_counter[recodeid] -= 1
if recodeid_counter[recodeid] == 0:
cached.pop(recodeid)
# Convert starting time of the segment to corresponding sample number.
# If end time is -1 then use the whole file starting from start time.
if et != -1:
|
else:
array = array[int(st * rate) :]
yield utt, (array, rate), None, None
def main():
logfmt = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
logging.basicConfig(level=logging.INFO, format=logfmt)
logging.info(get_commandline_args())
parser = argparse.ArgumentParser(
description='Create waves list from "wav.scp"',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("scp")
parser.add_argument("outdir")
parser.add_argument(
"--name",
default="wav",
help='Specify the prefix word of output file name such as "wav.scp"',
)
parser.add_argument("--segments", default=None)
parser.add_argument(
"--fs",
type=humanfriendly_or_none,
default=None,
help="If the sampling rate specified, Change the sampling rate.",
)
parser.add_argument("--audio-format", default="wav")
parser.add_argument("--vad_based_trim", type=str, default=None)
group = parser.add_mutually_exclusive_group()
group.add_argument("--ref-channels", default=None, type=str2int_tuple)
group.add_argument("--utt2ref-channels", default=None, type=str)
group.add_argument(
"--audio-subtype",
default=None,
type=str,
help=(
"Give a interpretable subtype by soundfile e.g. PCM_16. "
"You can check all available types by soundfile.available_subtypes()"
),
)
parser.add_argument(
"--multi-columns-input",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for input wav.scp. "
"e.g. 'ID a.wav b.wav c.wav' is interpreted as 3ch audio data"
),
)
parser.add_argument(
"--multi-columns-output",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for output wav.scp. "
"e.g. If input audio data has 2ch, "
"each line in wav.scp has the the format like "
"'ID ID-CH0.wav ID-CH1.wav'"
),
)
args = parser.parse_args()
out_num_samples = Path(args.outdir) / "utt2num_samples"
if args.ref_channels is not None:
def utt2ref_channels(x) -> Tuple[int, ...]:
return args.ref_channels
elif args.utt2ref_channels is not None:
utt2ref_channels_dict = read_2columns_text(args.utt2ref_channels)
def utt2ref_channels(x, d=utt2ref_channels_dict) -> Tuple[int, ...]:
chs_str = d[x]
return tuple(map(int, chs_str.split()))
else:
utt2ref_channels = None
if args.audio_format.endswith("ark") and args.multi_columns_output:
raise RuntimeError("Multi columns wav.scp is not supported for ark type")
Path(args.outdir).mkdir(parents=True, exist_ok=True)
out_wavscp = Path(args.outdir) / f"{args.name}.scp"
if args.audio_format.endswith("ark"):
fark = open(Path(args.outdir) / f"data_{args.name}.ark", "wb")
fscp_out = out_wavscp.open("w")
writer = None
else:
writer = SoundScpWriter(
args.outdir,
out_wavscp,
format=args.audio_format,
multi_columns=args.multi_columns_output,
subtype=args.audio_subtype,
)
fscp_out = None
if args.vad_based_trim is not None:
vad_reader = VADScpReader(args.vad_based_trim)
if args.segments is not None:
extractor = SegmentsExtractor(
args.scp, segments=args.segments, multi_columns=args.multi_columns_input
)
generator = extractor.generator
else:
def generator():
with Path(args.scp).open("r") as fscp:
for line in tqdm(fscp):
uttid, wavpath = line.strip().split(None, 1)
# B.a. Without segments and using pipe inputs
if wavpath.endswith("|"):
if args.multi_columns_input:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by"
" pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
wave, rate = soundfile.read(g)
subtypes = None
# B.b Without segments and not using pipe
else:
if args.multi_columns_input:
wave, rate, subtypes = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
return_subtype=True,
)
else:
with soundfile.SoundFile(wavpath) as sf:
rate = sf.samplerate
subtypes = [sf.subtype]
wave = sf.read()
yield uttid, (wave, rate), wavpath, subtypes
with out_num_samples.open("w") as fnum_samples:
for uttid, (wave, rate), wavpath, subtypes in tqdm(generator()):
save_asis = True
if args.fs is not None and args.fs != rate:
# FIXME(kamo): To use sox?
wave = resampy.resample(wave, rate, args.fs, axis=0)
rate = args.fs
save_asis = False
if args.vad_based_trim is not None:
wave = vad_trim(vad_reader, uttid, wave, rate)
save_asis = False
if wave.ndim == 2 and utt2ref_channels is not None:
wave = wave[:, utt2ref_channels(uttid)]
save_asis = False
if args.segments is not None:
save_asis = False
if args.audio_format.endswith("ark"):
save_asis = False
if args.multi_columns_input:
if args.multi_columns_output:
if wavpath is not None:
for _wavpath in wavpath.split():
if Path(_wavpath).suffix != "." + args.audio_format:
save_asis = False
break
if wave.ndim == 1:
_num_ch = 1
else:
_num_ch = wave.shape[1]
if len(wavpath.split()) != _num_ch:
save_asis = False
else:
if wavpath is not None and len(wavpath.split()) > 1:
save_asis = False
elif args.multi_columns_output:
if wave.ndim == 2 and wave.shape[1] > 1:
save_asis = False
if wavpath is not None and wavpath.endswith("|"):
save_asis = False
if wavpath is not None and Path(wavpath).suffix != "." + args.audio_format:
save_asis = False
if not args.audio_format.endswith("ark") and subtypes is not None:
if args.audio_subtype is None:
subtype2 = soundfile.default_subtype(args.audio_format)
else:
subtype2 = args.audio_subtype
for subtype in subtypes:
if subtype != subtype2:
save_asis = False
break
if save_asis:
writer.fscp.write(f"{uttid} {wavpath}\n")
elif args.audio_format.endswith("ark"):
for name in soundfile.available_formats():
if name.lower() in args.audio_format.lower():
suf = name.lower()
break
else:
raise RuntimeError(f"{args.audio_format} is not supported.")
# NOTE(kamo): Using extended ark format style here.
# This format is incompatible with Kaldi
kaldiio.save_ark(
fark,
{uttid: (wave, rate)},
scp=fscp_out,
append=True,
write_function="soundfile",
write_kwargs={"format": suf, "subtype": args.audio_subtype},
)
else:
writer[uttid] = rate, wave
fnum_samples.write(f"{uttid} {len(wave)}\n")
if __name__ == "__main__":
main()
| array = array[int(st * rate) : int(et * rate)] | conditional_block |
format_wav_scp.py | #!/usr/bin/env python3
import argparse
import logging
from io import BytesIO
from pathlib import Path
from typing import Optional, Tuple
import humanfriendly
import kaldiio
import numpy as np
import resampy
import soundfile
from tqdm import tqdm
from typeguard import check_argument_types
from espnet2.fileio.read_text import read_2columns_text
from espnet2.fileio.sound_scp import SoundScpWriter, soundfile_read
from espnet2.fileio.vad_scp import VADScpReader
from espnet2.utils.types import str2bool
from espnet.utils.cli_utils import get_commandline_args
def humanfriendly_or_none(value: str):
|
def str2int_tuple(integers: str) -> Optional[Tuple[int, ...]]:
"""
>>> str2int_tuple('3,4,5')
(3, 4, 5)
"""
assert check_argument_types()
if integers.strip() in ("none", "None", "NONE", "null", "Null", "NULL"):
return None
return tuple(map(int, integers.strip().split(",")))
def vad_trim(vad_reader: VADScpReader, uttid: str, wav: np.array, fs: int) -> np.array:
# Conduct trim wtih vad information
assert check_argument_types()
assert uttid in vad_reader, uttid
vad_info = vad_reader[uttid]
total_length = sum(int((time[1] - time[0]) * fs) for time in vad_info)
new_wav = np.zeros((total_length,), dtype=wav.dtype)
start_frame = 0
for time in vad_info:
# Note: we regard vad as [xxx, yyy)
duration = int((time[1] - time[0]) * fs)
orig_start_frame = int(time[0] * fs)
orig_end_frame = orig_start_frame + duration
end_frame = start_frame + duration
new_wav[start_frame:end_frame] = wav[orig_start_frame:orig_end_frame]
start_frame = end_frame
return new_wav
class SegmentsExtractor:
"""Emulating kaldi extract-segments.cc
Args:
segments (str): The file format is
"<segment-id> <recording-id> <start-time> <end-time>\n"
"e.g. call-861225-A-0050-0065 call-861225-A 5.0 6.5\n"
"""
def __init__(self, fname: str, segments: str = None, multi_columns: bool = False):
assert check_argument_types()
self.wav_scp = fname
self.multi_columns = multi_columns
self.wav_dict = {}
with open(self.wav_scp, "r") as f:
for line in f:
recodeid, wavpath = line.strip().split(None, 1)
if recodeid in self.wav_dict:
raise RuntimeError(f"{recodeid} is duplicated")
self.wav_dict[recodeid] = wavpath
self.segments = segments
self.segments_dict = {}
with open(self.segments, "r") as f:
for line in f:
sps = line.rstrip().split(None)
if len(sps) != 4:
raise RuntimeError("Format is invalid: {}".format(line))
uttid, recodeid, st, et = sps
self.segments_dict[uttid] = (recodeid, float(st), float(et))
if recodeid not in self.wav_dict:
raise RuntimeError(
'Not found "{}" in {}'.format(recodeid, self.wav_scp)
)
def generator(self):
recodeid_counter = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
recodeid_counter[recodeid] = recodeid_counter.get(recodeid, 0) + 1
cached = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
wavpath = self.wav_dict[recodeid]
if recodeid not in cached:
if wavpath.endswith("|"):
if self.multi_columns:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
array, rate = soundfile.read(g)
else:
if self.multi_columns:
array, rate = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
)
else:
array, rate = soundfile.read(wavpath)
cached[recodeid] = array, rate
array, rate = cached[recodeid]
# Keep array until the last query
recodeid_counter[recodeid] -= 1
if recodeid_counter[recodeid] == 0:
cached.pop(recodeid)
# Convert starting time of the segment to corresponding sample number.
# If end time is -1 then use the whole file starting from start time.
if et != -1:
array = array[int(st * rate) : int(et * rate)]
else:
array = array[int(st * rate) :]
yield utt, (array, rate), None, None
def main():
logfmt = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
logging.basicConfig(level=logging.INFO, format=logfmt)
logging.info(get_commandline_args())
parser = argparse.ArgumentParser(
description='Create waves list from "wav.scp"',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("scp")
parser.add_argument("outdir")
parser.add_argument(
"--name",
default="wav",
help='Specify the prefix word of output file name such as "wav.scp"',
)
parser.add_argument("--segments", default=None)
parser.add_argument(
"--fs",
type=humanfriendly_or_none,
default=None,
help="If the sampling rate specified, Change the sampling rate.",
)
parser.add_argument("--audio-format", default="wav")
parser.add_argument("--vad_based_trim", type=str, default=None)
group = parser.add_mutually_exclusive_group()
group.add_argument("--ref-channels", default=None, type=str2int_tuple)
group.add_argument("--utt2ref-channels", default=None, type=str)
group.add_argument(
"--audio-subtype",
default=None,
type=str,
help=(
"Give a interpretable subtype by soundfile e.g. PCM_16. "
"You can check all available types by soundfile.available_subtypes()"
),
)
parser.add_argument(
"--multi-columns-input",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for input wav.scp. "
"e.g. 'ID a.wav b.wav c.wav' is interpreted as 3ch audio data"
),
)
parser.add_argument(
"--multi-columns-output",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for output wav.scp. "
"e.g. If input audio data has 2ch, "
"each line in wav.scp has the the format like "
"'ID ID-CH0.wav ID-CH1.wav'"
),
)
args = parser.parse_args()
out_num_samples = Path(args.outdir) / "utt2num_samples"
if args.ref_channels is not None:
def utt2ref_channels(x) -> Tuple[int, ...]:
return args.ref_channels
elif args.utt2ref_channels is not None:
utt2ref_channels_dict = read_2columns_text(args.utt2ref_channels)
def utt2ref_channels(x, d=utt2ref_channels_dict) -> Tuple[int, ...]:
chs_str = d[x]
return tuple(map(int, chs_str.split()))
else:
utt2ref_channels = None
if args.audio_format.endswith("ark") and args.multi_columns_output:
raise RuntimeError("Multi columns wav.scp is not supported for ark type")
Path(args.outdir).mkdir(parents=True, exist_ok=True)
out_wavscp = Path(args.outdir) / f"{args.name}.scp"
if args.audio_format.endswith("ark"):
fark = open(Path(args.outdir) / f"data_{args.name}.ark", "wb")
fscp_out = out_wavscp.open("w")
writer = None
else:
writer = SoundScpWriter(
args.outdir,
out_wavscp,
format=args.audio_format,
multi_columns=args.multi_columns_output,
subtype=args.audio_subtype,
)
fscp_out = None
if args.vad_based_trim is not None:
vad_reader = VADScpReader(args.vad_based_trim)
if args.segments is not None:
extractor = SegmentsExtractor(
args.scp, segments=args.segments, multi_columns=args.multi_columns_input
)
generator = extractor.generator
else:
def generator():
with Path(args.scp).open("r") as fscp:
for line in tqdm(fscp):
uttid, wavpath = line.strip().split(None, 1)
# B.a. Without segments and using pipe inputs
if wavpath.endswith("|"):
if args.multi_columns_input:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by"
" pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
wave, rate = soundfile.read(g)
subtypes = None
# B.b Without segments and not using pipe
else:
if args.multi_columns_input:
wave, rate, subtypes = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
return_subtype=True,
)
else:
with soundfile.SoundFile(wavpath) as sf:
rate = sf.samplerate
subtypes = [sf.subtype]
wave = sf.read()
yield uttid, (wave, rate), wavpath, subtypes
with out_num_samples.open("w") as fnum_samples:
for uttid, (wave, rate), wavpath, subtypes in tqdm(generator()):
save_asis = True
if args.fs is not None and args.fs != rate:
# FIXME(kamo): To use sox?
wave = resampy.resample(wave, rate, args.fs, axis=0)
rate = args.fs
save_asis = False
if args.vad_based_trim is not None:
wave = vad_trim(vad_reader, uttid, wave, rate)
save_asis = False
if wave.ndim == 2 and utt2ref_channels is not None:
wave = wave[:, utt2ref_channels(uttid)]
save_asis = False
if args.segments is not None:
save_asis = False
if args.audio_format.endswith("ark"):
save_asis = False
if args.multi_columns_input:
if args.multi_columns_output:
if wavpath is not None:
for _wavpath in wavpath.split():
if Path(_wavpath).suffix != "." + args.audio_format:
save_asis = False
break
if wave.ndim == 1:
_num_ch = 1
else:
_num_ch = wave.shape[1]
if len(wavpath.split()) != _num_ch:
save_asis = False
else:
if wavpath is not None and len(wavpath.split()) > 1:
save_asis = False
elif args.multi_columns_output:
if wave.ndim == 2 and wave.shape[1] > 1:
save_asis = False
if wavpath is not None and wavpath.endswith("|"):
save_asis = False
if wavpath is not None and Path(wavpath).suffix != "." + args.audio_format:
save_asis = False
if not args.audio_format.endswith("ark") and subtypes is not None:
if args.audio_subtype is None:
subtype2 = soundfile.default_subtype(args.audio_format)
else:
subtype2 = args.audio_subtype
for subtype in subtypes:
if subtype != subtype2:
save_asis = False
break
if save_asis:
writer.fscp.write(f"{uttid} {wavpath}\n")
elif args.audio_format.endswith("ark"):
for name in soundfile.available_formats():
if name.lower() in args.audio_format.lower():
suf = name.lower()
break
else:
raise RuntimeError(f"{args.audio_format} is not supported.")
# NOTE(kamo): Using extended ark format style here.
# This format is incompatible with Kaldi
kaldiio.save_ark(
fark,
{uttid: (wave, rate)},
scp=fscp_out,
append=True,
write_function="soundfile",
write_kwargs={"format": suf, "subtype": args.audio_subtype},
)
else:
writer[uttid] = rate, wave
fnum_samples.write(f"{uttid} {len(wave)}\n")
if __name__ == "__main__":
main()
| if value in ("none", "None", "NONE"):
return None
return humanfriendly.parse_size(value) | identifier_body |
format_wav_scp.py | #!/usr/bin/env python3
import argparse
import logging
from io import BytesIO
from pathlib import Path
from typing import Optional, Tuple
import humanfriendly
import kaldiio
import numpy as np
import resampy
import soundfile
from tqdm import tqdm
from typeguard import check_argument_types
from espnet2.fileio.read_text import read_2columns_text
from espnet2.fileio.sound_scp import SoundScpWriter, soundfile_read
from espnet2.fileio.vad_scp import VADScpReader
from espnet2.utils.types import str2bool
from espnet.utils.cli_utils import get_commandline_args
def humanfriendly_or_none(value: str):
if value in ("none", "None", "NONE"):
return None
return humanfriendly.parse_size(value)
def str2int_tuple(integers: str) -> Optional[Tuple[int, ...]]:
"""
>>> str2int_tuple('3,4,5')
(3, 4, 5)
"""
assert check_argument_types()
if integers.strip() in ("none", "None", "NONE", "null", "Null", "NULL"):
return None
return tuple(map(int, integers.strip().split(",")))
def vad_trim(vad_reader: VADScpReader, uttid: str, wav: np.array, fs: int) -> np.array:
# Conduct trim wtih vad information
assert check_argument_types()
assert uttid in vad_reader, uttid
vad_info = vad_reader[uttid]
total_length = sum(int((time[1] - time[0]) * fs) for time in vad_info)
new_wav = np.zeros((total_length,), dtype=wav.dtype)
start_frame = 0
for time in vad_info:
# Note: we regard vad as [xxx, yyy)
duration = int((time[1] - time[0]) * fs)
orig_start_frame = int(time[0] * fs)
orig_end_frame = orig_start_frame + duration
end_frame = start_frame + duration
new_wav[start_frame:end_frame] = wav[orig_start_frame:orig_end_frame]
start_frame = end_frame
return new_wav
class SegmentsExtractor:
"""Emulating kaldi extract-segments.cc
Args:
segments (str): The file format is
"<segment-id> <recording-id> <start-time> <end-time>\n"
"e.g. call-861225-A-0050-0065 call-861225-A 5.0 6.5\n"
"""
def __init__(self, fname: str, segments: str = None, multi_columns: bool = False):
assert check_argument_types()
self.wav_scp = fname
self.multi_columns = multi_columns
self.wav_dict = {}
with open(self.wav_scp, "r") as f:
for line in f:
recodeid, wavpath = line.strip().split(None, 1)
if recodeid in self.wav_dict:
raise RuntimeError(f"{recodeid} is duplicated")
self.wav_dict[recodeid] = wavpath
self.segments = segments
self.segments_dict = {}
with open(self.segments, "r") as f:
for line in f:
sps = line.rstrip().split(None)
if len(sps) != 4:
raise RuntimeError("Format is invalid: {}".format(line))
uttid, recodeid, st, et = sps
self.segments_dict[uttid] = (recodeid, float(st), float(et))
if recodeid not in self.wav_dict:
raise RuntimeError(
'Not found "{}" in {}'.format(recodeid, self.wav_scp)
)
def generator(self):
recodeid_counter = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
recodeid_counter[recodeid] = recodeid_counter.get(recodeid, 0) + 1
cached = {}
for utt, (recodeid, st, et) in self.segments_dict.items():
wavpath = self.wav_dict[recodeid]
if recodeid not in cached:
if wavpath.endswith("|"):
if self.multi_columns:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
array, rate = soundfile.read(g)
else:
if self.multi_columns:
array, rate = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
)
else:
array, rate = soundfile.read(wavpath)
cached[recodeid] = array, rate
array, rate = cached[recodeid]
# Keep array until the last query
recodeid_counter[recodeid] -= 1
if recodeid_counter[recodeid] == 0:
cached.pop(recodeid)
# Convert starting time of the segment to corresponding sample number.
# If end time is -1 then use the whole file starting from start time.
if et != -1:
array = array[int(st * rate) : int(et * rate)]
else:
array = array[int(st * rate) :]
yield utt, (array, rate), None, None
def main():
logfmt = "%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"
logging.basicConfig(level=logging.INFO, format=logfmt)
logging.info(get_commandline_args())
parser = argparse.ArgumentParser(
description='Create waves list from "wav.scp"',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("scp")
parser.add_argument("outdir")
parser.add_argument(
"--name",
default="wav",
help='Specify the prefix word of output file name such as "wav.scp"',
)
parser.add_argument("--segments", default=None)
parser.add_argument(
"--fs",
type=humanfriendly_or_none,
default=None,
help="If the sampling rate specified, Change the sampling rate.",
)
parser.add_argument("--audio-format", default="wav")
parser.add_argument("--vad_based_trim", type=str, default=None)
group = parser.add_mutually_exclusive_group()
group.add_argument("--ref-channels", default=None, type=str2int_tuple)
group.add_argument("--utt2ref-channels", default=None, type=str)
group.add_argument(
"--audio-subtype",
default=None,
type=str,
help=(
"Give a interpretable subtype by soundfile e.g. PCM_16. "
"You can check all available types by soundfile.available_subtypes()"
),
)
parser.add_argument(
"--multi-columns-input",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for input wav.scp. "
"e.g. 'ID a.wav b.wav c.wav' is interpreted as 3ch audio data"
),
)
parser.add_argument(
"--multi-columns-output",
type=str2bool,
default=False,
help=(
"Enable multi columns mode for output wav.scp. "
"e.g. If input audio data has 2ch, "
"each line in wav.scp has the the format like "
"'ID ID-CH0.wav ID-CH1.wav'"
),
)
args = parser.parse_args()
out_num_samples = Path(args.outdir) / "utt2num_samples"
if args.ref_channels is not None:
def utt2ref_channels(x) -> Tuple[int, ...]:
return args.ref_channels
elif args.utt2ref_channels is not None:
utt2ref_channels_dict = read_2columns_text(args.utt2ref_channels)
def | (x, d=utt2ref_channels_dict) -> Tuple[int, ...]:
chs_str = d[x]
return tuple(map(int, chs_str.split()))
else:
utt2ref_channels = None
if args.audio_format.endswith("ark") and args.multi_columns_output:
raise RuntimeError("Multi columns wav.scp is not supported for ark type")
Path(args.outdir).mkdir(parents=True, exist_ok=True)
out_wavscp = Path(args.outdir) / f"{args.name}.scp"
if args.audio_format.endswith("ark"):
fark = open(Path(args.outdir) / f"data_{args.name}.ark", "wb")
fscp_out = out_wavscp.open("w")
writer = None
else:
writer = SoundScpWriter(
args.outdir,
out_wavscp,
format=args.audio_format,
multi_columns=args.multi_columns_output,
subtype=args.audio_subtype,
)
fscp_out = None
if args.vad_based_trim is not None:
vad_reader = VADScpReader(args.vad_based_trim)
if args.segments is not None:
extractor = SegmentsExtractor(
args.scp, segments=args.segments, multi_columns=args.multi_columns_input
)
generator = extractor.generator
else:
def generator():
with Path(args.scp).open("r") as fscp:
for line in tqdm(fscp):
uttid, wavpath = line.strip().split(None, 1)
# B.a. Without segments and using pipe inputs
if wavpath.endswith("|"):
if args.multi_columns_input:
raise RuntimeError(
"Not supporting multi_columns wav.scp for inputs by"
" pipe"
)
# Streaming input e.g. cat a.wav |
with kaldiio.open_like_kaldi(wavpath, "rb") as f:
with BytesIO(f.read()) as g:
wave, rate = soundfile.read(g)
subtypes = None
# B.b Without segments and not using pipe
else:
if args.multi_columns_input:
wave, rate, subtypes = soundfile_read(
wavs=wavpath.split(),
dtype=None,
always_2d=False,
concat_axis=1,
return_subtype=True,
)
else:
with soundfile.SoundFile(wavpath) as sf:
rate = sf.samplerate
subtypes = [sf.subtype]
wave = sf.read()
yield uttid, (wave, rate), wavpath, subtypes
with out_num_samples.open("w") as fnum_samples:
for uttid, (wave, rate), wavpath, subtypes in tqdm(generator()):
save_asis = True
if args.fs is not None and args.fs != rate:
# FIXME(kamo): To use sox?
wave = resampy.resample(wave, rate, args.fs, axis=0)
rate = args.fs
save_asis = False
if args.vad_based_trim is not None:
wave = vad_trim(vad_reader, uttid, wave, rate)
save_asis = False
if wave.ndim == 2 and utt2ref_channels is not None:
wave = wave[:, utt2ref_channels(uttid)]
save_asis = False
if args.segments is not None:
save_asis = False
if args.audio_format.endswith("ark"):
save_asis = False
if args.multi_columns_input:
if args.multi_columns_output:
if wavpath is not None:
for _wavpath in wavpath.split():
if Path(_wavpath).suffix != "." + args.audio_format:
save_asis = False
break
if wave.ndim == 1:
_num_ch = 1
else:
_num_ch = wave.shape[1]
if len(wavpath.split()) != _num_ch:
save_asis = False
else:
if wavpath is not None and len(wavpath.split()) > 1:
save_asis = False
elif args.multi_columns_output:
if wave.ndim == 2 and wave.shape[1] > 1:
save_asis = False
if wavpath is not None and wavpath.endswith("|"):
save_asis = False
if wavpath is not None and Path(wavpath).suffix != "." + args.audio_format:
save_asis = False
if not args.audio_format.endswith("ark") and subtypes is not None:
if args.audio_subtype is None:
subtype2 = soundfile.default_subtype(args.audio_format)
else:
subtype2 = args.audio_subtype
for subtype in subtypes:
if subtype != subtype2:
save_asis = False
break
if save_asis:
writer.fscp.write(f"{uttid} {wavpath}\n")
elif args.audio_format.endswith("ark"):
for name in soundfile.available_formats():
if name.lower() in args.audio_format.lower():
suf = name.lower()
break
else:
raise RuntimeError(f"{args.audio_format} is not supported.")
# NOTE(kamo): Using extended ark format style here.
# This format is incompatible with Kaldi
kaldiio.save_ark(
fark,
{uttid: (wave, rate)},
scp=fscp_out,
append=True,
write_function="soundfile",
write_kwargs={"format": suf, "subtype": args.audio_subtype},
)
else:
writer[uttid] = rate, wave
fnum_samples.write(f"{uttid} {len(wave)}\n")
if __name__ == "__main__":
main()
| utt2ref_channels | identifier_name |
cv4ag.py | #!usr/bin/env python
"""
Top Layer for the Computer Vision 4 Agriculture (cv4ag) framework
Lukas Arnold
WB-DIME
Jan 30 2017
The framework consists of four parts
1. Parsing input data (parse)
2. Downloading satellite images (get_satellite)
3. Overlaying data with satellite images (overlay)
4. Training (train)
5. Application (ml)
"""
import argparse,sys,os
#import subdirectories to python path
sys.path.append('scripts')
sys.path.append('modules')
sys.path.append('lib')
import parse,get_satellite,overlay,clean
#----------------------------------------------------------------------
#Main
if __name__ == "__main__":
# get options from command line
class myParse(argparse.ArgumentParser): # override error message to show usage
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
cmdParser = myParse(\
description='Machine Learning Framework for Agricultural Data.',
add_help=True)
cmdParser.add_argument('module',
metavar='OPTION',
type=str,default=False,
help='The modules to be loaded. OPTION: \n\
all - all modules (except clear).\n\
parse - input file parser.\n\
satellite - get satellite data.\n\
overlay - overlay classification with satellite data. \n\
train - train.\n\
ml - apply machine learning algorithm.\n\
clear - clear generated data from previous run on input file')
cmdParser.add_argument('mapbox_token',
metavar='MAPBOX_TOKEN',
type=str,default=False,nargs='?',
help='Mapbox token to download satellite images .')
cmdParser.add_argument('-i',
type=str,default=None,metavar='FILE',
help='Input file. Do not give if data obtained by script.')
cmdParser.add_argument('-s',metavar='FILE',
type=str,default=None,
help='Script file to obtain data')
cmdParser.add_argument('-o',metavar='PATH',
type=str,default="data/",
help='Output folder. Satellite data are put in and read from\
PATH/sat/.')
cmdParser.add_argument('-c',metavar='N',
type=int,default=1000,
help='Number of satellite images to download.')
cmdParser.add_argument('-z',metavar='N',
type=int,default=17,
help='Zoom level. Min=15, Max=19. See libs/satellite_resolutions.csv for resolutions.')
cmdParser.add_argument('-x',metavar='N',
type=int,default=480,
help='Images have width N pixel.')
cmdParser.add_argument('-y',metavar='N',
type=int,default=360,
help='Images have height N pixel.')
cmdParser.add_argument('-d',metavar='FILETYPE_CODE',
type=str,default=None,
help='Specify file type. Will find to detect filetype automatically. \
Will not prompt for vector conversion if not given.\
See www.gdal.org/formats_list.html or\
www.gdal.org/ogr_formats.html \
(or libs/*_formats.csv for FILETYPE_CODEs.')
cmdParser.add_argument('-n',metavar='N',
type=int,default=1,
help='Accuracy of neural net. 0: lowest. 3: highest.')
cmdParser.add_argument('--lonshift',metavar='N.N',
type=float,default=0,
help='Longitudanal shift of training data.')
cmdParser.add_argument('--latshift',metavar='N.N',
type=float,default=0,
help='Lateral shift of training data .')
cmdParser.add_argument('--shiftformat',metavar='N',
type=int,default=0,
help='Format of longitudinal/lateral shift.\
0: As fraction of image. 1: Georeferenced unites.')
cmdParser.add_argument('--top',metavar='N',
type=int,default=15,
help='Get N most frequent classes.')
cmdParser.add_argument('--key',
type=str,default='Descriptio',
help='Set parameter key for category in GIS file to classify data.')
cmdParser.add_argument('--epsg',metavar='N',
type=int,default=None,
help='EPSG format for GIS data. Is read from data if not set.')
cmdParser.add_argument('--layer',metavar='N',
type=int,default=None,
help='Number of layers to be trained on.')
cmdParser.add_argument('--mode',
type=str,default='gpu',
help='GPU (default) or CPU mode')
cmdParser.add_argument('--sat',
type=str,default=None,
help='Folder with training satellite images, if not in standard location.')
cmdParser.add_argument('--batchsize',metavar='N',
type=int,default=None,
help='Size of training batch (1-4)')
cmdParser.add_argument('--stepsize',metavar='N.N',
type=float,default=None,
help='Size of training step')
cmdParser.add_argument('--maxiter',metavar='N',
type=int,default=None,
help='Maximum iterations at training stage')
cmdParser.add_argument('--datatype',
type=str,default='PNG',
help='Datatype of training input date (PNG, LMDB, LMDB2 or HDF5)')
cmdParser.add_argument('--arg1',
type=str,default=None,
help='Argument 1 for script.')
cmdParser.add_argument('--arg2',
type=str,default=None,
help='Argument 2 for script.')
cmdParser.add_argument('--arg3',
type=str,default=None,
help='Argument 3 for script.')
cmdParser.add_argument('--arg4',
type=str,default=None,
help='Argument 4 for script.')
testParser = cmdParser.add_mutually_exclusive_group(required=False)
testParser.add_argument('--test', dest='test', action='store_true',help='Create test set.')
testParser.add_argument('--no-test', dest='test', action='store_false',help='Do not create test set (default)')
cmdParser.set_defaults(test=False)
backgroundParser = cmdParser.add_mutually_exclusive_group(required=False)
backgroundParser.add_argument('--background', dest='b', action='store_false',help='Classify background for training (default)')
backgroundParser.add_argument('--no-background', dest='b', action='store_true',help='Ignore background for training.')
cmdParser.set_defaults(b=False)
randomParser = cmdParser.add_mutually_exclusive_group(required=False)
randomParser.add_argument('--random', dest='randomImages', action='store_true',help='Use random images within GIS boundary box.')
randomParser.add_argument('--no-random', dest='randomImages', action='store_false',help='Only use images with features (default).')
cmdParser.set_defaults(randomImages=False)
weightParser = cmdParser.add_mutually_exclusive_group(required=False)
weightParser.add_argument('--weights', dest='initweights', action='store_true',help='Initialize weights according to frequency statistics (default).')
weightParser.add_argument('--no-weights', dest='initweights', action='store_false',help='Do not initialize weights.')
cmdParser.set_defaults(initweights=True)
compareParser = cmdParser.add_mutually_exclusive_group(required=False)
compareParser.add_argument('--compares', dest='compare', action='store_true',help='Compare classified results with labels')
compareParser.add_argument('--no-compares', dest='compare', action='store_false',help='Do not compare classified results with labels')
cmdParser.set_defaults(compare=True)
cmdArgs = vars(cmdParser.parse_args())
selectedModule = cmdArgs.get('module')
mapboxtoken = cmdArgs.get('mapbox_token')
inputFile = cmdArgs.get('i')
outputFolder = cmdArgs.get('o')
zoomLevel= cmdArgs.get('z')
datatype = cmdArgs.get('d')
satelliteCount = cmdArgs.get('c')
xpixel = cmdArgs.get('x')
ypixel = cmdArgs.get('y')
scriptFile = cmdArgs.get('s')
net = cmdArgs.get('n')
scriptArg1 = cmdArgs.get('arg1')
scriptArg2 = cmdArgs.get('arg2')
scriptArg3 = cmdArgs.get('arg3')
scriptArg4 = cmdArgs.get('arg4')
scriptArg4 = cmdArgs.get('arg4')
lonshift= cmdArgs.get('lonshift')
latshift= cmdArgs.get('latshift')
layernumber = cmdArgs.get('layer')
shiftformat = cmdArgs.get('shiftformat')
key = cmdArgs.get('key')
mode = cmdArgs.get('mode')
top = cmdArgs.get('top')
epsg = cmdArgs.get('epsg')
test = cmdArgs.get('test')
batchsize = cmdArgs.get('batchsize')
maxiter = cmdArgs.get('maxiter')
stepsize = cmdArgs.get('stepsize')
datatype = cmdArgs.get('datatype')
sat = cmdArgs.get('sat')
b = cmdArgs.get('b')
randomImages = cmdArgs.get('randomImages')
initweights = cmdArgs.get('initweights')
compare = cmdArgs.get('compare')
# Execute according to options
print "Option:",selectedModule
#only import caffe if needed
if selectedModule == 'all' or selectedModule=='train' or selectedModule=='ml':
import train,applyml
if selectedModule == 'all':
inputFile,stats,freq,elements=\
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
outputFolder=outputFolder,
epsg=epsg,
xpixel=xpixel,
ypixel=ypixel,
randomImages=randomImages,
elements=elements)
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
stats=stats,
count=satelliteCount,
epsg=epsg,
key=key,
sat=sat,
randomImages=randomImages,
elements=elements\
)
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
stats=stats,
freq=freq,
elements=elements,
ignorebackground=b,
initweights=initweights,
batchsize=batchsize,
maxiter=maxiter,
stepsize=stepsize,
datatype=datatype,
createTest=test\
)
applyml.apply(outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
#stats=stats,
epsg=epsg,
top=top,
compare=compare,
key=key)
elif selectedModule == 'parse':
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
elif selectedModule == 'satellite':
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
epsg=epsg,
outputFolder=outputFolder,
randomImages=randomImages,
xpixel=xpixel,
ypixel=ypixel)
elif selectedModule == 'overlay':
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
epsg=epsg,
sat=sat,
count=satelliteCount,
randomImages=randomImages,
key=key
)
elif selectedModule == 'train':
|
elif selectedModule == 'ml':
applyml.apply(\
outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
epsg=epsg,
compare=compare,
top=top)
#key=key)
elif selectedModule == 'clear':
clean.clear(inputFile)
else:
print "error - no valid option"
cmdParser.print_help()
| train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
ignorebackground=b,
batchsize=batchsize,
maxiter=maxiter,
datatype=datatype,
stepsize=stepsize,
initweights=initweights,
createTest=test\
) | conditional_block |
cv4ag.py | #!usr/bin/env python
"""
Top Layer for the Computer Vision 4 Agriculture (cv4ag) framework
Lukas Arnold
WB-DIME
Jan 30 2017
The framework consists of four parts
1. Parsing input data (parse)
2. Downloading satellite images (get_satellite)
3. Overlaying data with satellite images (overlay)
4. Training (train)
5. Application (ml)
"""
import argparse,sys,os
#import subdirectories to python path
sys.path.append('scripts')
sys.path.append('modules')
sys.path.append('lib')
import parse,get_satellite,overlay,clean
#----------------------------------------------------------------------
#Main
if __name__ == "__main__":
# get options from command line
class | (argparse.ArgumentParser): # override error message to show usage
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
cmdParser = myParse(\
description='Machine Learning Framework for Agricultural Data.',
add_help=True)
cmdParser.add_argument('module',
metavar='OPTION',
type=str,default=False,
help='The modules to be loaded. OPTION: \n\
all - all modules (except clear).\n\
parse - input file parser.\n\
satellite - get satellite data.\n\
overlay - overlay classification with satellite data. \n\
train - train.\n\
ml - apply machine learning algorithm.\n\
clear - clear generated data from previous run on input file')
cmdParser.add_argument('mapbox_token',
metavar='MAPBOX_TOKEN',
type=str,default=False,nargs='?',
help='Mapbox token to download satellite images .')
cmdParser.add_argument('-i',
type=str,default=None,metavar='FILE',
help='Input file. Do not give if data obtained by script.')
cmdParser.add_argument('-s',metavar='FILE',
type=str,default=None,
help='Script file to obtain data')
cmdParser.add_argument('-o',metavar='PATH',
type=str,default="data/",
help='Output folder. Satellite data are put in and read from\
PATH/sat/.')
cmdParser.add_argument('-c',metavar='N',
type=int,default=1000,
help='Number of satellite images to download.')
cmdParser.add_argument('-z',metavar='N',
type=int,default=17,
help='Zoom level. Min=15, Max=19. See libs/satellite_resolutions.csv for resolutions.')
cmdParser.add_argument('-x',metavar='N',
type=int,default=480,
help='Images have width N pixel.')
cmdParser.add_argument('-y',metavar='N',
type=int,default=360,
help='Images have height N pixel.')
cmdParser.add_argument('-d',metavar='FILETYPE_CODE',
type=str,default=None,
help='Specify file type. Will find to detect filetype automatically. \
Will not prompt for vector conversion if not given.\
See www.gdal.org/formats_list.html or\
www.gdal.org/ogr_formats.html \
(or libs/*_formats.csv for FILETYPE_CODEs.')
cmdParser.add_argument('-n',metavar='N',
type=int,default=1,
help='Accuracy of neural net. 0: lowest. 3: highest.')
cmdParser.add_argument('--lonshift',metavar='N.N',
type=float,default=0,
help='Longitudanal shift of training data.')
cmdParser.add_argument('--latshift',metavar='N.N',
type=float,default=0,
help='Lateral shift of training data .')
cmdParser.add_argument('--shiftformat',metavar='N',
type=int,default=0,
help='Format of longitudinal/lateral shift.\
0: As fraction of image. 1: Georeferenced unites.')
cmdParser.add_argument('--top',metavar='N',
type=int,default=15,
help='Get N most frequent classes.')
cmdParser.add_argument('--key',
type=str,default='Descriptio',
help='Set parameter key for category in GIS file to classify data.')
cmdParser.add_argument('--epsg',metavar='N',
type=int,default=None,
help='EPSG format for GIS data. Is read from data if not set.')
cmdParser.add_argument('--layer',metavar='N',
type=int,default=None,
help='Number of layers to be trained on.')
cmdParser.add_argument('--mode',
type=str,default='gpu',
help='GPU (default) or CPU mode')
cmdParser.add_argument('--sat',
type=str,default=None,
help='Folder with training satellite images, if not in standard location.')
cmdParser.add_argument('--batchsize',metavar='N',
type=int,default=None,
help='Size of training batch (1-4)')
cmdParser.add_argument('--stepsize',metavar='N.N',
type=float,default=None,
help='Size of training step')
cmdParser.add_argument('--maxiter',metavar='N',
type=int,default=None,
help='Maximum iterations at training stage')
cmdParser.add_argument('--datatype',
type=str,default='PNG',
help='Datatype of training input date (PNG, LMDB, LMDB2 or HDF5)')
cmdParser.add_argument('--arg1',
type=str,default=None,
help='Argument 1 for script.')
cmdParser.add_argument('--arg2',
type=str,default=None,
help='Argument 2 for script.')
cmdParser.add_argument('--arg3',
type=str,default=None,
help='Argument 3 for script.')
cmdParser.add_argument('--arg4',
type=str,default=None,
help='Argument 4 for script.')
testParser = cmdParser.add_mutually_exclusive_group(required=False)
testParser.add_argument('--test', dest='test', action='store_true',help='Create test set.')
testParser.add_argument('--no-test', dest='test', action='store_false',help='Do not create test set (default)')
cmdParser.set_defaults(test=False)
backgroundParser = cmdParser.add_mutually_exclusive_group(required=False)
backgroundParser.add_argument('--background', dest='b', action='store_false',help='Classify background for training (default)')
backgroundParser.add_argument('--no-background', dest='b', action='store_true',help='Ignore background for training.')
cmdParser.set_defaults(b=False)
randomParser = cmdParser.add_mutually_exclusive_group(required=False)
randomParser.add_argument('--random', dest='randomImages', action='store_true',help='Use random images within GIS boundary box.')
randomParser.add_argument('--no-random', dest='randomImages', action='store_false',help='Only use images with features (default).')
cmdParser.set_defaults(randomImages=False)
weightParser = cmdParser.add_mutually_exclusive_group(required=False)
weightParser.add_argument('--weights', dest='initweights', action='store_true',help='Initialize weights according to frequency statistics (default).')
weightParser.add_argument('--no-weights', dest='initweights', action='store_false',help='Do not initialize weights.')
cmdParser.set_defaults(initweights=True)
compareParser = cmdParser.add_mutually_exclusive_group(required=False)
compareParser.add_argument('--compares', dest='compare', action='store_true',help='Compare classified results with labels')
compareParser.add_argument('--no-compares', dest='compare', action='store_false',help='Do not compare classified results with labels')
cmdParser.set_defaults(compare=True)
cmdArgs = vars(cmdParser.parse_args())
selectedModule = cmdArgs.get('module')
mapboxtoken = cmdArgs.get('mapbox_token')
inputFile = cmdArgs.get('i')
outputFolder = cmdArgs.get('o')
zoomLevel= cmdArgs.get('z')
datatype = cmdArgs.get('d')
satelliteCount = cmdArgs.get('c')
xpixel = cmdArgs.get('x')
ypixel = cmdArgs.get('y')
scriptFile = cmdArgs.get('s')
net = cmdArgs.get('n')
scriptArg1 = cmdArgs.get('arg1')
scriptArg2 = cmdArgs.get('arg2')
scriptArg3 = cmdArgs.get('arg3')
scriptArg4 = cmdArgs.get('arg4')
scriptArg4 = cmdArgs.get('arg4')
lonshift= cmdArgs.get('lonshift')
latshift= cmdArgs.get('latshift')
layernumber = cmdArgs.get('layer')
shiftformat = cmdArgs.get('shiftformat')
key = cmdArgs.get('key')
mode = cmdArgs.get('mode')
top = cmdArgs.get('top')
epsg = cmdArgs.get('epsg')
test = cmdArgs.get('test')
batchsize = cmdArgs.get('batchsize')
maxiter = cmdArgs.get('maxiter')
stepsize = cmdArgs.get('stepsize')
datatype = cmdArgs.get('datatype')
sat = cmdArgs.get('sat')
b = cmdArgs.get('b')
randomImages = cmdArgs.get('randomImages')
initweights = cmdArgs.get('initweights')
compare = cmdArgs.get('compare')
# Execute according to options
print "Option:",selectedModule
#only import caffe if needed
if selectedModule == 'all' or selectedModule=='train' or selectedModule=='ml':
import train,applyml
if selectedModule == 'all':
inputFile,stats,freq,elements=\
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
outputFolder=outputFolder,
epsg=epsg,
xpixel=xpixel,
ypixel=ypixel,
randomImages=randomImages,
elements=elements)
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
stats=stats,
count=satelliteCount,
epsg=epsg,
key=key,
sat=sat,
randomImages=randomImages,
elements=elements\
)
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
stats=stats,
freq=freq,
elements=elements,
ignorebackground=b,
initweights=initweights,
batchsize=batchsize,
maxiter=maxiter,
stepsize=stepsize,
datatype=datatype,
createTest=test\
)
applyml.apply(outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
#stats=stats,
epsg=epsg,
top=top,
compare=compare,
key=key)
elif selectedModule == 'parse':
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
elif selectedModule == 'satellite':
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
epsg=epsg,
outputFolder=outputFolder,
randomImages=randomImages,
xpixel=xpixel,
ypixel=ypixel)
elif selectedModule == 'overlay':
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
epsg=epsg,
sat=sat,
count=satelliteCount,
randomImages=randomImages,
key=key
)
elif selectedModule == 'train':
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
ignorebackground=b,
batchsize=batchsize,
maxiter=maxiter,
datatype=datatype,
stepsize=stepsize,
initweights=initweights,
createTest=test\
)
elif selectedModule == 'ml':
applyml.apply(\
outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
epsg=epsg,
compare=compare,
top=top)
#key=key)
elif selectedModule == 'clear':
clean.clear(inputFile)
else:
print "error - no valid option"
cmdParser.print_help()
| myParse | identifier_name |
cv4ag.py | #!usr/bin/env python
"""
Top Layer for the Computer Vision 4 Agriculture (cv4ag) framework
Lukas Arnold
WB-DIME
Jan 30 2017
The framework consists of four parts
1. Parsing input data (parse)
2. Downloading satellite images (get_satellite)
3. Overlaying data with satellite images (overlay)
4. Training (train)
5. Application (ml)
"""
import argparse,sys,os
#import subdirectories to python path
sys.path.append('scripts')
sys.path.append('modules')
sys.path.append('lib')
import parse,get_satellite,overlay,clean
#----------------------------------------------------------------------
#Main
if __name__ == "__main__":
# get options from command line
class myParse(argparse.ArgumentParser): # override error message to show usage
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
cmdParser = myParse(\
description='Machine Learning Framework for Agricultural Data.',
add_help=True)
cmdParser.add_argument('module',
metavar='OPTION',
type=str,default=False,
help='The modules to be loaded. OPTION: \n\
all - all modules (except clear).\n\
parse - input file parser.\n\
satellite - get satellite data.\n\
overlay - overlay classification with satellite data. \n\
train - train.\n\
ml - apply machine learning algorithm.\n\
clear - clear generated data from previous run on input file')
cmdParser.add_argument('mapbox_token',
metavar='MAPBOX_TOKEN',
type=str,default=False,nargs='?',
help='Mapbox token to download satellite images .')
cmdParser.add_argument('-i',
type=str,default=None,metavar='FILE',
help='Input file. Do not give if data obtained by script.')
cmdParser.add_argument('-s',metavar='FILE',
type=str,default=None,
help='Script file to obtain data')
cmdParser.add_argument('-o',metavar='PATH',
type=str,default="data/",
help='Output folder. Satellite data are put in and read from\
PATH/sat/.')
cmdParser.add_argument('-c',metavar='N',
type=int,default=1000,
help='Number of satellite images to download.')
cmdParser.add_argument('-z',metavar='N',
type=int,default=17,
help='Zoom level. Min=15, Max=19. See libs/satellite_resolutions.csv for resolutions.')
cmdParser.add_argument('-x',metavar='N',
type=int,default=480,
help='Images have width N pixel.')
cmdParser.add_argument('-y',metavar='N',
type=int,default=360,
help='Images have height N pixel.')
cmdParser.add_argument('-d',metavar='FILETYPE_CODE',
type=str,default=None,
help='Specify file type. Will find to detect filetype automatically. \
Will not prompt for vector conversion if not given.\
See www.gdal.org/formats_list.html or\
www.gdal.org/ogr_formats.html \
(or libs/*_formats.csv for FILETYPE_CODEs.')
cmdParser.add_argument('-n',metavar='N',
type=int,default=1,
help='Accuracy of neural net. 0: lowest. 3: highest.')
cmdParser.add_argument('--lonshift',metavar='N.N',
type=float,default=0,
help='Longitudanal shift of training data.')
cmdParser.add_argument('--latshift',metavar='N.N',
type=float,default=0,
help='Lateral shift of training data .')
cmdParser.add_argument('--shiftformat',metavar='N',
type=int,default=0,
help='Format of longitudinal/lateral shift.\
0: As fraction of image. 1: Georeferenced unites.')
cmdParser.add_argument('--top',metavar='N',
type=int,default=15,
help='Get N most frequent classes.')
cmdParser.add_argument('--key',
type=str,default='Descriptio',
help='Set parameter key for category in GIS file to classify data.')
cmdParser.add_argument('--epsg',metavar='N',
type=int,default=None,
help='EPSG format for GIS data. Is read from data if not set.')
cmdParser.add_argument('--layer',metavar='N',
type=int,default=None,
help='Number of layers to be trained on.')
cmdParser.add_argument('--mode',
type=str,default='gpu',
help='GPU (default) or CPU mode')
cmdParser.add_argument('--sat',
type=str,default=None,
help='Folder with training satellite images, if not in standard location.')
cmdParser.add_argument('--batchsize',metavar='N',
type=int,default=None,
help='Size of training batch (1-4)')
cmdParser.add_argument('--stepsize',metavar='N.N',
type=float,default=None,
help='Size of training step')
cmdParser.add_argument('--maxiter',metavar='N',
type=int,default=None,
help='Maximum iterations at training stage')
cmdParser.add_argument('--datatype',
type=str,default='PNG',
help='Datatype of training input date (PNG, LMDB, LMDB2 or HDF5)')
cmdParser.add_argument('--arg1',
type=str,default=None,
help='Argument 1 for script.')
cmdParser.add_argument('--arg2',
type=str,default=None,
help='Argument 2 for script.')
cmdParser.add_argument('--arg3',
type=str,default=None,
help='Argument 3 for script.')
cmdParser.add_argument('--arg4',
type=str,default=None,
help='Argument 4 for script.')
testParser = cmdParser.add_mutually_exclusive_group(required=False)
testParser.add_argument('--test', dest='test', action='store_true',help='Create test set.')
testParser.add_argument('--no-test', dest='test', action='store_false',help='Do not create test set (default)') | cmdParser.set_defaults(b=False)
randomParser = cmdParser.add_mutually_exclusive_group(required=False)
randomParser.add_argument('--random', dest='randomImages', action='store_true',help='Use random images within GIS boundary box.')
randomParser.add_argument('--no-random', dest='randomImages', action='store_false',help='Only use images with features (default).')
cmdParser.set_defaults(randomImages=False)
weightParser = cmdParser.add_mutually_exclusive_group(required=False)
weightParser.add_argument('--weights', dest='initweights', action='store_true',help='Initialize weights according to frequency statistics (default).')
weightParser.add_argument('--no-weights', dest='initweights', action='store_false',help='Do not initialize weights.')
cmdParser.set_defaults(initweights=True)
compareParser = cmdParser.add_mutually_exclusive_group(required=False)
compareParser.add_argument('--compares', dest='compare', action='store_true',help='Compare classified results with labels')
compareParser.add_argument('--no-compares', dest='compare', action='store_false',help='Do not compare classified results with labels')
cmdParser.set_defaults(compare=True)
cmdArgs = vars(cmdParser.parse_args())
selectedModule = cmdArgs.get('module')
mapboxtoken = cmdArgs.get('mapbox_token')
inputFile = cmdArgs.get('i')
outputFolder = cmdArgs.get('o')
zoomLevel= cmdArgs.get('z')
datatype = cmdArgs.get('d')
satelliteCount = cmdArgs.get('c')
xpixel = cmdArgs.get('x')
ypixel = cmdArgs.get('y')
scriptFile = cmdArgs.get('s')
net = cmdArgs.get('n')
scriptArg1 = cmdArgs.get('arg1')
scriptArg2 = cmdArgs.get('arg2')
scriptArg3 = cmdArgs.get('arg3')
scriptArg4 = cmdArgs.get('arg4')
scriptArg4 = cmdArgs.get('arg4')
lonshift= cmdArgs.get('lonshift')
latshift= cmdArgs.get('latshift')
layernumber = cmdArgs.get('layer')
shiftformat = cmdArgs.get('shiftformat')
key = cmdArgs.get('key')
mode = cmdArgs.get('mode')
top = cmdArgs.get('top')
epsg = cmdArgs.get('epsg')
test = cmdArgs.get('test')
batchsize = cmdArgs.get('batchsize')
maxiter = cmdArgs.get('maxiter')
stepsize = cmdArgs.get('stepsize')
datatype = cmdArgs.get('datatype')
sat = cmdArgs.get('sat')
b = cmdArgs.get('b')
randomImages = cmdArgs.get('randomImages')
initweights = cmdArgs.get('initweights')
compare = cmdArgs.get('compare')
# Execute according to options
print "Option:",selectedModule
#only import caffe if needed
if selectedModule == 'all' or selectedModule=='train' or selectedModule=='ml':
import train,applyml
if selectedModule == 'all':
inputFile,stats,freq,elements=\
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
outputFolder=outputFolder,
epsg=epsg,
xpixel=xpixel,
ypixel=ypixel,
randomImages=randomImages,
elements=elements)
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
stats=stats,
count=satelliteCount,
epsg=epsg,
key=key,
sat=sat,
randomImages=randomImages,
elements=elements\
)
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
stats=stats,
freq=freq,
elements=elements,
ignorebackground=b,
initweights=initweights,
batchsize=batchsize,
maxiter=maxiter,
stepsize=stepsize,
datatype=datatype,
createTest=test\
)
applyml.apply(outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
#stats=stats,
epsg=epsg,
top=top,
compare=compare,
key=key)
elif selectedModule == 'parse':
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
elif selectedModule == 'satellite':
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
epsg=epsg,
outputFolder=outputFolder,
randomImages=randomImages,
xpixel=xpixel,
ypixel=ypixel)
elif selectedModule == 'overlay':
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
epsg=epsg,
sat=sat,
count=satelliteCount,
randomImages=randomImages,
key=key
)
elif selectedModule == 'train':
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
ignorebackground=b,
batchsize=batchsize,
maxiter=maxiter,
datatype=datatype,
stepsize=stepsize,
initweights=initweights,
createTest=test\
)
elif selectedModule == 'ml':
applyml.apply(\
outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
epsg=epsg,
compare=compare,
top=top)
#key=key)
elif selectedModule == 'clear':
clean.clear(inputFile)
else:
print "error - no valid option"
cmdParser.print_help() | cmdParser.set_defaults(test=False)
backgroundParser = cmdParser.add_mutually_exclusive_group(required=False)
backgroundParser.add_argument('--background', dest='b', action='store_false',help='Classify background for training (default)')
backgroundParser.add_argument('--no-background', dest='b', action='store_true',help='Ignore background for training.') | random_line_split |
cv4ag.py | #!usr/bin/env python
"""
Top Layer for the Computer Vision 4 Agriculture (cv4ag) framework
Lukas Arnold
WB-DIME
Jan 30 2017
The framework consists of four parts
1. Parsing input data (parse)
2. Downloading satellite images (get_satellite)
3. Overlaying data with satellite images (overlay)
4. Training (train)
5. Application (ml)
"""
import argparse,sys,os
#import subdirectories to python path
sys.path.append('scripts')
sys.path.append('modules')
sys.path.append('lib')
import parse,get_satellite,overlay,clean
#----------------------------------------------------------------------
#Main
if __name__ == "__main__":
# get options from command line
class myParse(argparse.ArgumentParser): # override error message to show usage
|
cmdParser = myParse(\
description='Machine Learning Framework for Agricultural Data.',
add_help=True)
cmdParser.add_argument('module',
metavar='OPTION',
type=str,default=False,
help='The modules to be loaded. OPTION: \n\
all - all modules (except clear).\n\
parse - input file parser.\n\
satellite - get satellite data.\n\
overlay - overlay classification with satellite data. \n\
train - train.\n\
ml - apply machine learning algorithm.\n\
clear - clear generated data from previous run on input file')
cmdParser.add_argument('mapbox_token',
metavar='MAPBOX_TOKEN',
type=str,default=False,nargs='?',
help='Mapbox token to download satellite images .')
cmdParser.add_argument('-i',
type=str,default=None,metavar='FILE',
help='Input file. Do not give if data obtained by script.')
cmdParser.add_argument('-s',metavar='FILE',
type=str,default=None,
help='Script file to obtain data')
cmdParser.add_argument('-o',metavar='PATH',
type=str,default="data/",
help='Output folder. Satellite data are put in and read from\
PATH/sat/.')
cmdParser.add_argument('-c',metavar='N',
type=int,default=1000,
help='Number of satellite images to download.')
cmdParser.add_argument('-z',metavar='N',
type=int,default=17,
help='Zoom level. Min=15, Max=19. See libs/satellite_resolutions.csv for resolutions.')
cmdParser.add_argument('-x',metavar='N',
type=int,default=480,
help='Images have width N pixel.')
cmdParser.add_argument('-y',metavar='N',
type=int,default=360,
help='Images have height N pixel.')
cmdParser.add_argument('-d',metavar='FILETYPE_CODE',
type=str,default=None,
help='Specify file type. Will find to detect filetype automatically. \
Will not prompt for vector conversion if not given.\
See www.gdal.org/formats_list.html or\
www.gdal.org/ogr_formats.html \
(or libs/*_formats.csv for FILETYPE_CODEs.')
cmdParser.add_argument('-n',metavar='N',
type=int,default=1,
help='Accuracy of neural net. 0: lowest. 3: highest.')
cmdParser.add_argument('--lonshift',metavar='N.N',
type=float,default=0,
help='Longitudanal shift of training data.')
cmdParser.add_argument('--latshift',metavar='N.N',
type=float,default=0,
help='Lateral shift of training data .')
cmdParser.add_argument('--shiftformat',metavar='N',
type=int,default=0,
help='Format of longitudinal/lateral shift.\
0: As fraction of image. 1: Georeferenced unites.')
cmdParser.add_argument('--top',metavar='N',
type=int,default=15,
help='Get N most frequent classes.')
cmdParser.add_argument('--key',
type=str,default='Descriptio',
help='Set parameter key for category in GIS file to classify data.')
cmdParser.add_argument('--epsg',metavar='N',
type=int,default=None,
help='EPSG format for GIS data. Is read from data if not set.')
cmdParser.add_argument('--layer',metavar='N',
type=int,default=None,
help='Number of layers to be trained on.')
cmdParser.add_argument('--mode',
type=str,default='gpu',
help='GPU (default) or CPU mode')
cmdParser.add_argument('--sat',
type=str,default=None,
help='Folder with training satellite images, if not in standard location.')
cmdParser.add_argument('--batchsize',metavar='N',
type=int,default=None,
help='Size of training batch (1-4)')
cmdParser.add_argument('--stepsize',metavar='N.N',
type=float,default=None,
help='Size of training step')
cmdParser.add_argument('--maxiter',metavar='N',
type=int,default=None,
help='Maximum iterations at training stage')
cmdParser.add_argument('--datatype',
type=str,default='PNG',
help='Datatype of training input date (PNG, LMDB, LMDB2 or HDF5)')
cmdParser.add_argument('--arg1',
type=str,default=None,
help='Argument 1 for script.')
cmdParser.add_argument('--arg2',
type=str,default=None,
help='Argument 2 for script.')
cmdParser.add_argument('--arg3',
type=str,default=None,
help='Argument 3 for script.')
cmdParser.add_argument('--arg4',
type=str,default=None,
help='Argument 4 for script.')
testParser = cmdParser.add_mutually_exclusive_group(required=False)
testParser.add_argument('--test', dest='test', action='store_true',help='Create test set.')
testParser.add_argument('--no-test', dest='test', action='store_false',help='Do not create test set (default)')
cmdParser.set_defaults(test=False)
backgroundParser = cmdParser.add_mutually_exclusive_group(required=False)
backgroundParser.add_argument('--background', dest='b', action='store_false',help='Classify background for training (default)')
backgroundParser.add_argument('--no-background', dest='b', action='store_true',help='Ignore background for training.')
cmdParser.set_defaults(b=False)
randomParser = cmdParser.add_mutually_exclusive_group(required=False)
randomParser.add_argument('--random', dest='randomImages', action='store_true',help='Use random images within GIS boundary box.')
randomParser.add_argument('--no-random', dest='randomImages', action='store_false',help='Only use images with features (default).')
cmdParser.set_defaults(randomImages=False)
weightParser = cmdParser.add_mutually_exclusive_group(required=False)
weightParser.add_argument('--weights', dest='initweights', action='store_true',help='Initialize weights according to frequency statistics (default).')
weightParser.add_argument('--no-weights', dest='initweights', action='store_false',help='Do not initialize weights.')
cmdParser.set_defaults(initweights=True)
compareParser = cmdParser.add_mutually_exclusive_group(required=False)
compareParser.add_argument('--compares', dest='compare', action='store_true',help='Compare classified results with labels')
compareParser.add_argument('--no-compares', dest='compare', action='store_false',help='Do not compare classified results with labels')
cmdParser.set_defaults(compare=True)
cmdArgs = vars(cmdParser.parse_args())
selectedModule = cmdArgs.get('module')
mapboxtoken = cmdArgs.get('mapbox_token')
inputFile = cmdArgs.get('i')
outputFolder = cmdArgs.get('o')
zoomLevel= cmdArgs.get('z')
datatype = cmdArgs.get('d')
satelliteCount = cmdArgs.get('c')
xpixel = cmdArgs.get('x')
ypixel = cmdArgs.get('y')
scriptFile = cmdArgs.get('s')
net = cmdArgs.get('n')
scriptArg1 = cmdArgs.get('arg1')
scriptArg2 = cmdArgs.get('arg2')
scriptArg3 = cmdArgs.get('arg3')
scriptArg4 = cmdArgs.get('arg4')
scriptArg4 = cmdArgs.get('arg4')
lonshift= cmdArgs.get('lonshift')
latshift= cmdArgs.get('latshift')
layernumber = cmdArgs.get('layer')
shiftformat = cmdArgs.get('shiftformat')
key = cmdArgs.get('key')
mode = cmdArgs.get('mode')
top = cmdArgs.get('top')
epsg = cmdArgs.get('epsg')
test = cmdArgs.get('test')
batchsize = cmdArgs.get('batchsize')
maxiter = cmdArgs.get('maxiter')
stepsize = cmdArgs.get('stepsize')
datatype = cmdArgs.get('datatype')
sat = cmdArgs.get('sat')
b = cmdArgs.get('b')
randomImages = cmdArgs.get('randomImages')
initweights = cmdArgs.get('initweights')
compare = cmdArgs.get('compare')
# Execute according to options
print "Option:",selectedModule
#only import caffe if needed
if selectedModule == 'all' or selectedModule=='train' or selectedModule=='ml':
import train,applyml
if selectedModule == 'all':
inputFile,stats,freq,elements=\
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
outputFolder=outputFolder,
epsg=epsg,
xpixel=xpixel,
ypixel=ypixel,
randomImages=randomImages,
elements=elements)
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
stats=stats,
count=satelliteCount,
epsg=epsg,
key=key,
sat=sat,
randomImages=randomImages,
elements=elements\
)
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
stats=stats,
freq=freq,
elements=elements,
ignorebackground=b,
initweights=initweights,
batchsize=batchsize,
maxiter=maxiter,
stepsize=stepsize,
datatype=datatype,
createTest=test\
)
applyml.apply(outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
#stats=stats,
epsg=epsg,
top=top,
compare=compare,
key=key)
elif selectedModule == 'parse':
parse.parse(inputFile=inputFile,outputFolder=outputFolder,
scriptFile=scriptFile,datatype=datatype,top=top,layernumber=layernumber,
key=key,
scriptArg1=scriptArg1,scriptArg2=scriptArg2,
scriptArg3=scriptArg3,scriptArg4=scriptArg4)
elif selectedModule == 'satellite':
get_satellite.get_satellite(inputFile=inputFile,
mapboxtoken=mapboxtoken,
count=satelliteCount,
zoomLevel=zoomLevel,
epsg=epsg,
outputFolder=outputFolder,
randomImages=randomImages,
xpixel=xpixel,
ypixel=ypixel)
elif selectedModule == 'overlay':
overlay.overlay(outputFolder,inputFile,
xpixel=xpixel,
ypixel=ypixel,
zoomLevel=zoomLevel,
lonshift=lonshift,latshift=latshift,
shiftformat=shiftformat,
top=top,
epsg=epsg,
sat=sat,
count=satelliteCount,
randomImages=randomImages,
key=key
)
elif selectedModule == 'train':
train.train(outputFolder=outputFolder,
inputFile=inputFile,
net=net,
top=top,
key=key,
mode=mode,
xpixel=xpixel,
ypixel=ypixel,
ignorebackground=b,
batchsize=batchsize,
maxiter=maxiter,
datatype=datatype,
stepsize=stepsize,
initweights=initweights,
createTest=test\
)
elif selectedModule == 'ml':
applyml.apply(\
outputFolder,
inputFile,
mode=mode,
ignorebackground=b,
epsg=epsg,
compare=compare,
top=top)
#key=key)
elif selectedModule == 'clear':
clean.clear(inputFile)
else:
print "error - no valid option"
cmdParser.print_help()
| def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2) | identifier_body |
movie_review_NaiveBayes.py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import string
import re
from nltk.stem.porter import PorterStemmer
import random
from sklearn.model_selection import train_test_split
import os
from random import shuffle
# In[ ]:
# In[ ]:
def get_files(files_path, review_type):
try:
output = []
files = os.listdir(files_path)
for file in files:
f = open(files_path + file, 'r', encoding="utf8")
output.append((f.read(), review_type))
return output
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# Load training data
train_pos = get_files('../datasets/movie_reviews/data/alle/train/pos/', 0)
train_neg = get_files('../datasets/movie_reviews/data/alle/train/neg/', 1)
#train_pos = get_files('../datasets/movie_reviews/data/subset/train/pos/', 0)
#train_neg = get_files('../datasets/movie_reviews/data/subset/train/neg/', 1)
print('* TRAINING DATA * ')
print('# positives reviews: ', len(train_pos))
print('# negatives reviews', len(train_neg))
train_data = train_pos + train_neg
print('# total reviews: ', len(train_data))
print('-----------------')
# Load test data
test_pos = get_files('../datasets/movie_reviews/data/alle/test/pos/', 0)
test_neg = get_files('../datasets/movie_reviews/data/alle/test/neg/', 1)
#test_pos = get_files('../datasets/movie_reviews/data/subset/test/pos/', 0)
#test_neg = get_files('../datasets/movie_reviews/data/subset/test/neg/', 1)
print('* TEST DATA * ')
print('# positives reviews: ', len(test_pos))
print('# negatives reviews', len(test_neg))
test_data = test_pos + test_neg
print('# total reviews: ', len(test_data))
# Does not want a 50/50 split between training and test
# Therefore creates one big set of data that later will be split into 80/20 train- and testdata
# a = train_data[::2]
# b = train_data[1::2]
# c = test_data[::2]
# d = test_data[1::2]
# all_reviews = a + b + c + d'
all_r = train_data + test_data
shuffle(all_r)
# In[ ]:
all_reviews = all_r
# In[ ]:
stopwords = []
try:
f = open('../datasets/stopwords.txt', 'r')
stopwords = f.read().split(',')
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# * * * PREPROCESSING * * *
stemmer = PorterStemmer()
preprocessed_reviews = []
for t in all_reviews:
#print(len(preprocessed_reviews))
review = t[0]
review_type = t[1]
# Remove whitespace and punctutation
text = re.sub('[' + string.punctuation + ']', ' ', review)
text = re.sub('[\n\t\r]', '', text)
# Split words into list
words = text.split()
new = []
# Remove stopwords and stem remaining words
for word in words:
stemmed_word = stemmer.stem(word.lower())
if stemmed_word not in stopwords and len(stemmed_word) > 2:
new.append(stemmed_word)
# Add to preproccesed list
preprocessed_reviews.append((new, review_type))
# In[ ]:
count = 0
for r in preprocessed_reviews:
words = r[0]
for w in words:
if w in stopwords:
count += 1
a = 191569
count
# In[ ]:
# Splitting data in trainingdata and testdata (80-20 ratio)
total = len(preprocessed_reviews) #Total number of reviews
test_number = int(0.20 * total) # Number of testing reviews
# Picking randomly
print(test_number)
copy = preprocessed_reviews[:]
test_set = []
taken = {}
while len(test_set) < test_number:
#print(len(train_texts))
num = random.randint(0, test_number - 1)
if num not in taken.keys():
test_set.append(copy.pop(num))
taken[num] = 1
train_set = copy[:] # Trainset is the remaining reviews
len(train_set)/total, len(test_set)/total, len(train_set), len(test_set)
# In[ ]:
# * * * TRAINING THE MODEL * * *
# meaning: Computing probabilities needed for P(Positive|Word)
def total_goods_and_bads(tset):
goods = 0
bads = 0
for t in tset:
goods += 1 if t[1] == 0 else 0
bads += 1 if t[1] == 1 else 0
return goods, bads
total_positive = total_goods_and_bads(train_set)[0]
total_negative = total_goods_and_bads(train_set)[1]
print(total_positive)
print(total_negative)
# In[ ]:
# First making a word counter for pos and neg reviews
pos_word_counter = {}
neg_word_counter = {}
total_words = 0
for t in train_set:
review = t[0]
review_type = t[1]
already_counted = []
for word in review:
total_words += 1
if review_type == 0:
if word not in pos_word_counter:
pos_word_counter[word] = 1
else:
if word not in already_counted:
pos_word_counter[word] += 1
else:
if word not in neg_word_counter:
neg_word_counter[word] = 1
else:
|
already_counted.append(word)
total_words
# In[ ]:
# Removes words that are not inluded in at least 0.15% of the reviews
removed_words = 0
for j in range(len(train_set)):
words = train_set[j][0]
i = 0
while i < len(words):
word = words[i]
word_removed = False
if word in pos_word_counter:
if pos_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
elif word in neg_word_counter:
if neg_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
if not word_removed:
i += 1
j += 1
removed_words
# In[ ]:
def sort_dict(dicti, end):
# Sorterer etter value i dict, gir liste med tupler
most_common_words = sorted(dicti.items(), key = lambda kv: kv[1])
most_common_words.reverse()
most_common_words = most_common_words[:end]
# Lager dict på formen {word: count, ...}
# Vil ha dict fremfor liste med tupler, pga. senere søk
return dict(most_common_words)
most_used_words_pos = sort_dict(pos_word_counter, 25)
most_used_words_neg = sort_dict(neg_word_counter, 25)
most_used_words_pos
# In[ ]:
# Need these 4 probabilities
# 1) Probability that a word appears in positive reviews
# 2) Probability that a word appears in negative reviews
# 3) Overall probability that any given review is positive
# 4) Overall probability that any given reviews is negative
# # Making a dictionary with probabilities for different words appearing in good and bad reviews
# # Example: {'bad': (0.0881, 0.3226)}
probability_appearing = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probability_appearing:
if word in pos_word_counter:
p_appearing_good = pos_word_counter[word]/total_positive
else:
p_appearing_good = 0.1
if word in neg_word_counter:
p_appearing_bad = neg_word_counter[word]/total_negative
else:
p_appearing_bad = 0.1
probability_appearing[word] = (p_appearing_good, p_appearing_bad)
p_pos = total_positive/len(train_set)
p_neg = total_negative/len(train_set)
print(p_good)
print(p_bad)
# Finally we can compute P(Positive | Word)
def p_is_positive_given_word(word):
return (probability_appearing[word][0]*p_pos)/((probability_appearing[word][0]*p_pos + probability_appearing[word][1]*p_neg))
def p_is_negative_given_word(word):
return (probability_appearing[word][1]*p_neg)/((probability_appearing[word][1]*p_neg + probability_appearing[word][0]*p_pos))
p_is_positive_given_word('bad'), p_is_negative_given_word('bad')
# In[ ]:
probabilities = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probabilities:
p_pos = p_is_positive_given_word(word)
p_neg = p_is_negative_given_word(word)
if p_pos == 0:
p_pos = 0.1 # tweaking this value
if p_pos == 1:
p_pos = 0.98
if p_neg == 0:
p_neg = 0.1
if p_neg == 1:
p_neg = 0.98
probabilities[word] = (p_pos, p_neg)
# In[ ]:
# Filter out words that are not informative (probabilities between 0.45 and 0.55)
print(len(probabilities))
for word in list(probabilities):
probs = probabilities[word]
if 0.40 < probs[0] and probs[0] < 0.60 and 0.40 < probs[1] and probs[1] < 0.60:
del probabilities[word]
print(len(probabilities))
# In[ ]:
probabilities
# In[ ]:
# COMBINING INDIVIDUAL PROBABILITIES
# Determining whether a message is spam or ham based only on the presence of one word is error-prone,
# must try to consider all the words (or the most interesting) in the message
from functools import reduce
def p_is_type(words):
words = list(filter(lambda x: x in probabilities, words)) # Filter out words not met during training-fase
pos_probs = []
neg_probs = []
for word in words:
pos_probs.append(probabilities[word][0])
neg_probs.append(probabilities[word][1])
#else:
# probs.append(0.5) # tweaking this value
pos_probs_not = list(map(lambda prob: 1-prob, pos_probs))
neg_probs_not = list(map(lambda prob: 1-prob, neg_probs))
pos_product = reduce(lambda x, y: x * y, pos_probs, 1)
neg_product = reduce(lambda x, y: x * y, neg_probs, 1)
pos_product_not = reduce(lambda x, y: x * y, pos_probs_not, 1)
neg_product_not = reduce(lambda x, y: x * y, neg_probs_not, 1)
return pos_product/(pos_product + pos_product_not), neg_product/(neg_product + neg_product_not)
p_is_type(['good', 'enjoy', 'well']), p_is_type(['terribl', 'hate'])
# In[ ]:
# * * * TESTING THE MODEL * * *
total_correct = 0
true_good_as_good = 0
true_good_as_bad = 0
true_bad_as_bad = 0
true_bad_as_good = 0
count = 0
for t in test_set:
guess = -1
words = t[0]
answer = t[1]
try:
p_positive = p_is_type(words)[0]
p_negative = p_is_type(words)[1]
except:
count += 1
#print(words)
guess = 0 if p_positive > p_negative else 1
if guess == answer:
total_correct += 1
if answer == 0: # true negative
true_good_as_good += 1
else: # true positive
true_bad_as_bad += 1
else:
#print(words, answer)
if answer == 0: # false positive
true_good_as_bad += 1
else: # true negative
true_bad_as_good += 1
true_positives = total_goods_and_bads(test_set)[0]
true_negatives = total_goods_and_bads(test_set)[1]
print('Total test texts: ', len(test_set))
print('Number of correct: ', total_correct)
print('Accuracy: ', total_correct*100/(true_positives+true_negatives))
print('-------------------------------')
print('Positives precision: ', true_good_as_good/(true_good_as_good + true_bad_as_good))
print('Positives recall: ', true_good_as_good/(true_good_as_good + true_good_as_bad))
print('Negatives precision: ', true_bad_as_bad/(true_bad_as_bad + true_good_as_bad))
print('Negatives recall: ', true_bad_as_bad/(true_bad_as_bad + true_bad_as_good))
print('-------------------------------')
# In[ ]:
# * * * VISUALISATIONS * * *
from wordcloud import WordCloud
pos_reviews = ""
neg_reviews = ""
revs = all_reviews[:100]
for t in revs:
review = t[0].split()
s = ""
for word in review:
if len(word) > 2:
s += word + ' '
text = re.sub('[' + string.punctuation + ']', ' ', s)
text = re.sub('[\n\t\r]', '', text)
if t[1] == 0:
pos_reviews += text
else:
neg_reviews += text
# Generate a word cloud image
pos_wordcloud = WordCloud(width=600, height=400).generate(pos_reviews)
#neg_wordcloud = WordCloud(width=600, height=400).generate(neg_reviews)
# In[ ]:
#Spam Word cloud
plt.figure(figsize=(10,8), facecolor='k')
plt.imshow(pos_wordcloud)
plt.axis("off")
plt.tight_layout(pad=0)
# In[ ]:
| if word not in already_counted:
neg_word_counter[word] += 1 | conditional_block |
movie_review_NaiveBayes.py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import string
import re
from nltk.stem.porter import PorterStemmer
import random
from sklearn.model_selection import train_test_split
import os
from random import shuffle
# In[ ]:
# In[ ]:
def get_files(files_path, review_type):
try:
output = []
files = os.listdir(files_path)
for file in files:
f = open(files_path + file, 'r', encoding="utf8")
output.append((f.read(), review_type))
return output
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# Load training data
train_pos = get_files('../datasets/movie_reviews/data/alle/train/pos/', 0)
train_neg = get_files('../datasets/movie_reviews/data/alle/train/neg/', 1)
#train_pos = get_files('../datasets/movie_reviews/data/subset/train/pos/', 0)
#train_neg = get_files('../datasets/movie_reviews/data/subset/train/neg/', 1)
print('* TRAINING DATA * ')
print('# positives reviews: ', len(train_pos))
print('# negatives reviews', len(train_neg))
train_data = train_pos + train_neg
print('# total reviews: ', len(train_data))
print('-----------------')
# Load test data
test_pos = get_files('../datasets/movie_reviews/data/alle/test/pos/', 0)
test_neg = get_files('../datasets/movie_reviews/data/alle/test/neg/', 1)
#test_pos = get_files('../datasets/movie_reviews/data/subset/test/pos/', 0)
#test_neg = get_files('../datasets/movie_reviews/data/subset/test/neg/', 1)
print('* TEST DATA * ')
print('# positives reviews: ', len(test_pos))
print('# negatives reviews', len(test_neg))
test_data = test_pos + test_neg
print('# total reviews: ', len(test_data))
# Does not want a 50/50 split between training and test
# Therefore creates one big set of data that later will be split into 80/20 train- and testdata
# a = train_data[::2]
# b = train_data[1::2]
# c = test_data[::2]
# d = test_data[1::2]
# all_reviews = a + b + c + d'
all_r = train_data + test_data
shuffle(all_r)
# In[ ]:
all_reviews = all_r
# In[ ]:
stopwords = []
try:
f = open('../datasets/stopwords.txt', 'r')
stopwords = f.read().split(',')
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# * * * PREPROCESSING * * *
stemmer = PorterStemmer()
preprocessed_reviews = []
for t in all_reviews:
#print(len(preprocessed_reviews))
review = t[0]
review_type = t[1]
# Remove whitespace and punctutation
text = re.sub('[' + string.punctuation + ']', ' ', review)
text = re.sub('[\n\t\r]', '', text)
# Split words into list
words = text.split()
new = []
# Remove stopwords and stem remaining words
for word in words:
stemmed_word = stemmer.stem(word.lower())
if stemmed_word not in stopwords and len(stemmed_word) > 2:
new.append(stemmed_word)
# Add to preproccesed list
preprocessed_reviews.append((new, review_type))
# In[ ]:
count = 0
for r in preprocessed_reviews:
words = r[0]
for w in words:
if w in stopwords:
count += 1
a = 191569
count
# In[ ]:
# Splitting data in trainingdata and testdata (80-20 ratio)
total = len(preprocessed_reviews) #Total number of reviews
test_number = int(0.20 * total) # Number of testing reviews
# Picking randomly
print(test_number)
copy = preprocessed_reviews[:]
test_set = []
taken = {}
while len(test_set) < test_number:
#print(len(train_texts))
num = random.randint(0, test_number - 1)
if num not in taken.keys():
test_set.append(copy.pop(num))
taken[num] = 1
train_set = copy[:] # Trainset is the remaining reviews
len(train_set)/total, len(test_set)/total, len(train_set), len(test_set)
# In[ ]:
# * * * TRAINING THE MODEL * * *
# meaning: Computing probabilities needed for P(Positive|Word)
def total_goods_and_bads(tset):
goods = 0
bads = 0
for t in tset:
goods += 1 if t[1] == 0 else 0
bads += 1 if t[1] == 1 else 0
return goods, bads
total_positive = total_goods_and_bads(train_set)[0]
total_negative = total_goods_and_bads(train_set)[1]
print(total_positive)
print(total_negative)
# In[ ]:
# First making a word counter for pos and neg reviews
pos_word_counter = {}
neg_word_counter = {}
total_words = 0
for t in train_set:
review = t[0]
review_type = t[1]
already_counted = []
for word in review:
total_words += 1
if review_type == 0:
if word not in pos_word_counter:
pos_word_counter[word] = 1
else:
if word not in already_counted:
pos_word_counter[word] += 1
else:
if word not in neg_word_counter:
neg_word_counter[word] = 1
else:
if word not in already_counted:
neg_word_counter[word] += 1
already_counted.append(word)
total_words
# In[ ]:
# Removes words that are not inluded in at least 0.15% of the reviews
removed_words = 0
for j in range(len(train_set)):
words = train_set[j][0]
i = 0
while i < len(words):
word = words[i]
word_removed = False
if word in pos_word_counter:
if pos_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
elif word in neg_word_counter:
if neg_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
if not word_removed:
i += 1
j += 1
removed_words
# In[ ]:
def sort_dict(dicti, end):
# Sorterer etter value i dict, gir liste med tupler
most_common_words = sorted(dicti.items(), key = lambda kv: kv[1])
most_common_words.reverse()
most_common_words = most_common_words[:end]
# Lager dict på formen {word: count, ...}
# Vil ha dict fremfor liste med tupler, pga. senere søk
return dict(most_common_words)
most_used_words_pos = sort_dict(pos_word_counter, 25)
most_used_words_neg = sort_dict(neg_word_counter, 25)
most_used_words_pos
# In[ ]:
# Need these 4 probabilities
# 1) Probability that a word appears in positive reviews
# 2) Probability that a word appears in negative reviews
# 3) Overall probability that any given review is positive
# 4) Overall probability that any given reviews is negative
# # Making a dictionary with probabilities for different words appearing in good and bad reviews
# # Example: {'bad': (0.0881, 0.3226)}
probability_appearing = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probability_appearing:
if word in pos_word_counter:
p_appearing_good = pos_word_counter[word]/total_positive
else:
p_appearing_good = 0.1
if word in neg_word_counter:
p_appearing_bad = neg_word_counter[word]/total_negative
else:
p_appearing_bad = 0.1
probability_appearing[word] = (p_appearing_good, p_appearing_bad)
p_pos = total_positive/len(train_set)
p_neg = total_negative/len(train_set)
print(p_good)
print(p_bad)
# Finally we can compute P(Positive | Word)
def p_ | ord):
return (probability_appearing[word][0]*p_pos)/((probability_appearing[word][0]*p_pos + probability_appearing[word][1]*p_neg))
def p_is_negative_given_word(word):
return (probability_appearing[word][1]*p_neg)/((probability_appearing[word][1]*p_neg + probability_appearing[word][0]*p_pos))
p_is_positive_given_word('bad'), p_is_negative_given_word('bad')
# In[ ]:
probabilities = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probabilities:
p_pos = p_is_positive_given_word(word)
p_neg = p_is_negative_given_word(word)
if p_pos == 0:
p_pos = 0.1 # tweaking this value
if p_pos == 1:
p_pos = 0.98
if p_neg == 0:
p_neg = 0.1
if p_neg == 1:
p_neg = 0.98
probabilities[word] = (p_pos, p_neg)
# In[ ]:
# Filter out words that are not informative (probabilities between 0.45 and 0.55)
print(len(probabilities))
for word in list(probabilities):
probs = probabilities[word]
if 0.40 < probs[0] and probs[0] < 0.60 and 0.40 < probs[1] and probs[1] < 0.60:
del probabilities[word]
print(len(probabilities))
# In[ ]:
probabilities
# In[ ]:
# COMBINING INDIVIDUAL PROBABILITIES
# Determining whether a message is spam or ham based only on the presence of one word is error-prone,
# must try to consider all the words (or the most interesting) in the message
from functools import reduce
def p_is_type(words):
words = list(filter(lambda x: x in probabilities, words)) # Filter out words not met during training-fase
pos_probs = []
neg_probs = []
for word in words:
pos_probs.append(probabilities[word][0])
neg_probs.append(probabilities[word][1])
#else:
# probs.append(0.5) # tweaking this value
pos_probs_not = list(map(lambda prob: 1-prob, pos_probs))
neg_probs_not = list(map(lambda prob: 1-prob, neg_probs))
pos_product = reduce(lambda x, y: x * y, pos_probs, 1)
neg_product = reduce(lambda x, y: x * y, neg_probs, 1)
pos_product_not = reduce(lambda x, y: x * y, pos_probs_not, 1)
neg_product_not = reduce(lambda x, y: x * y, neg_probs_not, 1)
return pos_product/(pos_product + pos_product_not), neg_product/(neg_product + neg_product_not)
p_is_type(['good', 'enjoy', 'well']), p_is_type(['terribl', 'hate'])
# In[ ]:
# * * * TESTING THE MODEL * * *
total_correct = 0
true_good_as_good = 0
true_good_as_bad = 0
true_bad_as_bad = 0
true_bad_as_good = 0
count = 0
for t in test_set:
guess = -1
words = t[0]
answer = t[1]
try:
p_positive = p_is_type(words)[0]
p_negative = p_is_type(words)[1]
except:
count += 1
#print(words)
guess = 0 if p_positive > p_negative else 1
if guess == answer:
total_correct += 1
if answer == 0: # true negative
true_good_as_good += 1
else: # true positive
true_bad_as_bad += 1
else:
#print(words, answer)
if answer == 0: # false positive
true_good_as_bad += 1
else: # true negative
true_bad_as_good += 1
true_positives = total_goods_and_bads(test_set)[0]
true_negatives = total_goods_and_bads(test_set)[1]
print('Total test texts: ', len(test_set))
print('Number of correct: ', total_correct)
print('Accuracy: ', total_correct*100/(true_positives+true_negatives))
print('-------------------------------')
print('Positives precision: ', true_good_as_good/(true_good_as_good + true_bad_as_good))
print('Positives recall: ', true_good_as_good/(true_good_as_good + true_good_as_bad))
print('Negatives precision: ', true_bad_as_bad/(true_bad_as_bad + true_good_as_bad))
print('Negatives recall: ', true_bad_as_bad/(true_bad_as_bad + true_bad_as_good))
print('-------------------------------')
# In[ ]:
# * * * VISUALISATIONS * * *
from wordcloud import WordCloud
pos_reviews = ""
neg_reviews = ""
revs = all_reviews[:100]
for t in revs:
review = t[0].split()
s = ""
for word in review:
if len(word) > 2:
s += word + ' '
text = re.sub('[' + string.punctuation + ']', ' ', s)
text = re.sub('[\n\t\r]', '', text)
if t[1] == 0:
pos_reviews += text
else:
neg_reviews += text
# Generate a word cloud image
pos_wordcloud = WordCloud(width=600, height=400).generate(pos_reviews)
#neg_wordcloud = WordCloud(width=600, height=400).generate(neg_reviews)
# In[ ]:
#Spam Word cloud
plt.figure(figsize=(10,8), facecolor='k')
plt.imshow(pos_wordcloud)
plt.axis("off")
plt.tight_layout(pad=0)
# In[ ]:
| is_positive_given_word(w | identifier_name |
movie_review_NaiveBayes.py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import string
import re
from nltk.stem.porter import PorterStemmer
import random
from sklearn.model_selection import train_test_split
import os
from random import shuffle
# In[ ]:
# In[ ]:
def get_files(files_path, review_type):
try:
output = []
files = os.listdir(files_path)
for file in files:
f = open(files_path + file, 'r', encoding="utf8")
output.append((f.read(), review_type))
return output
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# Load training data
train_pos = get_files('../datasets/movie_reviews/data/alle/train/pos/', 0)
train_neg = get_files('../datasets/movie_reviews/data/alle/train/neg/', 1)
#train_pos = get_files('../datasets/movie_reviews/data/subset/train/pos/', 0)
#train_neg = get_files('../datasets/movie_reviews/data/subset/train/neg/', 1)
print('* TRAINING DATA * ')
print('# positives reviews: ', len(train_pos))
print('# negatives reviews', len(train_neg))
train_data = train_pos + train_neg
print('# total reviews: ', len(train_data))
print('-----------------')
# Load test data
test_pos = get_files('../datasets/movie_reviews/data/alle/test/pos/', 0)
test_neg = get_files('../datasets/movie_reviews/data/alle/test/neg/', 1)
#test_pos = get_files('../datasets/movie_reviews/data/subset/test/pos/', 0)
#test_neg = get_files('../datasets/movie_reviews/data/subset/test/neg/', 1)
print('* TEST DATA * ')
print('# positives reviews: ', len(test_pos))
print('# negatives reviews', len(test_neg))
test_data = test_pos + test_neg
print('# total reviews: ', len(test_data))
# Does not want a 50/50 split between training and test
# Therefore creates one big set of data that later will be split into 80/20 train- and testdata
# a = train_data[::2]
# b = train_data[1::2]
# c = test_data[::2]
# d = test_data[1::2]
# all_reviews = a + b + c + d'
all_r = train_data + test_data
shuffle(all_r)
# In[ ]:
all_reviews = all_r
# In[ ]:
stopwords = []
try:
f = open('../datasets/stopwords.txt', 'r')
stopwords = f.read().split(',')
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# * * * PREPROCESSING * * *
stemmer = PorterStemmer()
preprocessed_reviews = []
for t in all_reviews:
#print(len(preprocessed_reviews))
review = t[0]
review_type = t[1]
# Remove whitespace and punctutation
text = re.sub('[' + string.punctuation + ']', ' ', review)
text = re.sub('[\n\t\r]', '', text)
# Split words into list
words = text.split()
new = []
# Remove stopwords and stem remaining words
for word in words:
stemmed_word = stemmer.stem(word.lower())
if stemmed_word not in stopwords and len(stemmed_word) > 2:
new.append(stemmed_word)
# Add to preproccesed list
preprocessed_reviews.append((new, review_type))
# In[ ]:
count = 0
for r in preprocessed_reviews:
words = r[0]
for w in words:
if w in stopwords:
count += 1
a = 191569
count
# In[ ]:
# Splitting data in trainingdata and testdata (80-20 ratio)
total = len(preprocessed_reviews) #Total number of reviews
test_number = int(0.20 * total) # Number of testing reviews
# Picking randomly
print(test_number)
copy = preprocessed_reviews[:]
test_set = []
taken = {}
while len(test_set) < test_number:
#print(len(train_texts))
num = random.randint(0, test_number - 1)
if num not in taken.keys():
test_set.append(copy.pop(num))
taken[num] = 1
train_set = copy[:] # Trainset is the remaining reviews
len(train_set)/total, len(test_set)/total, len(train_set), len(test_set)
# In[ ]:
# * * * TRAINING THE MODEL * * *
# meaning: Computing probabilities needed for P(Positive|Word)
def total_goods_and_bads(tset):
goods = 0
bads = 0
for t in tset:
goods += 1 if t[1] == 0 else 0
bads += 1 if t[1] == 1 else 0
return goods, bads
total_positive = total_goods_and_bads(train_set)[0]
total_negative = total_goods_and_bads(train_set)[1]
print(total_positive)
print(total_negative)
# In[ ]:
# First making a word counter for pos and neg reviews
pos_word_counter = {}
neg_word_counter = {}
total_words = 0
for t in train_set:
review = t[0]
review_type = t[1]
already_counted = []
for word in review:
total_words += 1
if review_type == 0:
if word not in pos_word_counter:
pos_word_counter[word] = 1
else:
if word not in already_counted:
pos_word_counter[word] += 1
else:
if word not in neg_word_counter:
neg_word_counter[word] = 1
else:
if word not in already_counted:
neg_word_counter[word] += 1
already_counted.append(word)
total_words
# In[ ]:
# Removes words that are not inluded in at least 0.15% of the reviews
removed_words = 0
for j in range(len(train_set)):
words = train_set[j][0]
i = 0
while i < len(words):
word = words[i]
word_removed = False
if word in pos_word_counter:
if pos_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
elif word in neg_word_counter:
if neg_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
if not word_removed:
i += 1
j += 1
removed_words
# In[ ]:
def sort_dict(dicti, end):
# Sorterer etter value i dict, gir liste med tupler
|
most_used_words_pos = sort_dict(pos_word_counter, 25)
most_used_words_neg = sort_dict(neg_word_counter, 25)
most_used_words_pos
# In[ ]:
# Need these 4 probabilities
# 1) Probability that a word appears in positive reviews
# 2) Probability that a word appears in negative reviews
# 3) Overall probability that any given review is positive
# 4) Overall probability that any given reviews is negative
# # Making a dictionary with probabilities for different words appearing in good and bad reviews
# # Example: {'bad': (0.0881, 0.3226)}
probability_appearing = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probability_appearing:
if word in pos_word_counter:
p_appearing_good = pos_word_counter[word]/total_positive
else:
p_appearing_good = 0.1
if word in neg_word_counter:
p_appearing_bad = neg_word_counter[word]/total_negative
else:
p_appearing_bad = 0.1
probability_appearing[word] = (p_appearing_good, p_appearing_bad)
p_pos = total_positive/len(train_set)
p_neg = total_negative/len(train_set)
print(p_good)
print(p_bad)
# Finally we can compute P(Positive | Word)
def p_is_positive_given_word(word):
return (probability_appearing[word][0]*p_pos)/((probability_appearing[word][0]*p_pos + probability_appearing[word][1]*p_neg))
def p_is_negative_given_word(word):
return (probability_appearing[word][1]*p_neg)/((probability_appearing[word][1]*p_neg + probability_appearing[word][0]*p_pos))
p_is_positive_given_word('bad'), p_is_negative_given_word('bad')
# In[ ]:
probabilities = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probabilities:
p_pos = p_is_positive_given_word(word)
p_neg = p_is_negative_given_word(word)
if p_pos == 0:
p_pos = 0.1 # tweaking this value
if p_pos == 1:
p_pos = 0.98
if p_neg == 0:
p_neg = 0.1
if p_neg == 1:
p_neg = 0.98
probabilities[word] = (p_pos, p_neg)
# In[ ]:
# Filter out words that are not informative (probabilities between 0.45 and 0.55)
print(len(probabilities))
for word in list(probabilities):
probs = probabilities[word]
if 0.40 < probs[0] and probs[0] < 0.60 and 0.40 < probs[1] and probs[1] < 0.60:
del probabilities[word]
print(len(probabilities))
# In[ ]:
probabilities
# In[ ]:
# COMBINING INDIVIDUAL PROBABILITIES
# Determining whether a message is spam or ham based only on the presence of one word is error-prone,
# must try to consider all the words (or the most interesting) in the message
from functools import reduce
def p_is_type(words):
words = list(filter(lambda x: x in probabilities, words)) # Filter out words not met during training-fase
pos_probs = []
neg_probs = []
for word in words:
pos_probs.append(probabilities[word][0])
neg_probs.append(probabilities[word][1])
#else:
# probs.append(0.5) # tweaking this value
pos_probs_not = list(map(lambda prob: 1-prob, pos_probs))
neg_probs_not = list(map(lambda prob: 1-prob, neg_probs))
pos_product = reduce(lambda x, y: x * y, pos_probs, 1)
neg_product = reduce(lambda x, y: x * y, neg_probs, 1)
pos_product_not = reduce(lambda x, y: x * y, pos_probs_not, 1)
neg_product_not = reduce(lambda x, y: x * y, neg_probs_not, 1)
return pos_product/(pos_product + pos_product_not), neg_product/(neg_product + neg_product_not)
p_is_type(['good', 'enjoy', 'well']), p_is_type(['terribl', 'hate'])
# In[ ]:
# * * * TESTING THE MODEL * * *
total_correct = 0
true_good_as_good = 0
true_good_as_bad = 0
true_bad_as_bad = 0
true_bad_as_good = 0
count = 0
for t in test_set:
guess = -1
words = t[0]
answer = t[1]
try:
p_positive = p_is_type(words)[0]
p_negative = p_is_type(words)[1]
except:
count += 1
#print(words)
guess = 0 if p_positive > p_negative else 1
if guess == answer:
total_correct += 1
if answer == 0: # true negative
true_good_as_good += 1
else: # true positive
true_bad_as_bad += 1
else:
#print(words, answer)
if answer == 0: # false positive
true_good_as_bad += 1
else: # true negative
true_bad_as_good += 1
true_positives = total_goods_and_bads(test_set)[0]
true_negatives = total_goods_and_bads(test_set)[1]
print('Total test texts: ', len(test_set))
print('Number of correct: ', total_correct)
print('Accuracy: ', total_correct*100/(true_positives+true_negatives))
print('-------------------------------')
print('Positives precision: ', true_good_as_good/(true_good_as_good + true_bad_as_good))
print('Positives recall: ', true_good_as_good/(true_good_as_good + true_good_as_bad))
print('Negatives precision: ', true_bad_as_bad/(true_bad_as_bad + true_good_as_bad))
print('Negatives recall: ', true_bad_as_bad/(true_bad_as_bad + true_bad_as_good))
print('-------------------------------')
# In[ ]:
# * * * VISUALISATIONS * * *
from wordcloud import WordCloud
pos_reviews = ""
neg_reviews = ""
revs = all_reviews[:100]
for t in revs:
review = t[0].split()
s = ""
for word in review:
if len(word) > 2:
s += word + ' '
text = re.sub('[' + string.punctuation + ']', ' ', s)
text = re.sub('[\n\t\r]', '', text)
if t[1] == 0:
pos_reviews += text
else:
neg_reviews += text
# Generate a word cloud image
pos_wordcloud = WordCloud(width=600, height=400).generate(pos_reviews)
#neg_wordcloud = WordCloud(width=600, height=400).generate(neg_reviews)
# In[ ]:
#Spam Word cloud
plt.figure(figsize=(10,8), facecolor='k')
plt.imshow(pos_wordcloud)
plt.axis("off")
plt.tight_layout(pad=0)
# In[ ]:
| most_common_words = sorted(dicti.items(), key = lambda kv: kv[1])
most_common_words.reverse()
most_common_words = most_common_words[:end]
# Lager dict på formen {word: count, ...}
# Vil ha dict fremfor liste med tupler, pga. senere søk
return dict(most_common_words) | identifier_body |
movie_review_NaiveBayes.py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import string
import re
from nltk.stem.porter import PorterStemmer
import random
from sklearn.model_selection import train_test_split
import os
from random import shuffle
# In[ ]:
# In[ ]:
def get_files(files_path, review_type):
try:
output = []
files = os.listdir(files_path)
for file in files:
f = open(files_path + file, 'r', encoding="utf8")
output.append((f.read(), review_type))
return output
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# Load training data
train_pos = get_files('../datasets/movie_reviews/data/alle/train/pos/', 0)
train_neg = get_files('../datasets/movie_reviews/data/alle/train/neg/', 1)
#train_pos = get_files('../datasets/movie_reviews/data/subset/train/pos/', 0)
#train_neg = get_files('../datasets/movie_reviews/data/subset/train/neg/', 1)
print('* TRAINING DATA * ')
print('# positives reviews: ', len(train_pos))
print('# negatives reviews', len(train_neg))
train_data = train_pos + train_neg
print('# total reviews: ', len(train_data))
print('-----------------')
# Load test data
test_pos = get_files('../datasets/movie_reviews/data/alle/test/pos/', 0)
test_neg = get_files('../datasets/movie_reviews/data/alle/test/neg/', 1)
#test_pos = get_files('../datasets/movie_reviews/data/subset/test/pos/', 0)
#test_neg = get_files('../datasets/movie_reviews/data/subset/test/neg/', 1)
print('* TEST DATA * ')
print('# positives reviews: ', len(test_pos))
print('# negatives reviews', len(test_neg))
test_data = test_pos + test_neg
print('# total reviews: ', len(test_data))
# Does not want a 50/50 split between training and test
# Therefore creates one big set of data that later will be split into 80/20 train- and testdata
# a = train_data[::2]
# b = train_data[1::2]
# c = test_data[::2]
# d = test_data[1::2]
# all_reviews = a + b + c + d'
all_r = train_data + test_data
shuffle(all_r)
# In[ ]:
all_reviews = all_r
# In[ ]:
stopwords = []
try:
f = open('../datasets/stopwords.txt', 'r')
stopwords = f.read().split(',')
except IOError:
print('Problem opening file')
finally:
f.close()
# In[ ]:
# * * * PREPROCESSING * * *
stemmer = PorterStemmer()
preprocessed_reviews = []
for t in all_reviews:
#print(len(preprocessed_reviews))
review = t[0]
review_type = t[1]
# Remove whitespace and punctutation
text = re.sub('[' + string.punctuation + ']', ' ', review)
text = re.sub('[\n\t\r]', '', text)
# Split words into list
words = text.split()
new = []
# Remove stopwords and stem remaining words
for word in words:
stemmed_word = stemmer.stem(word.lower())
if stemmed_word not in stopwords and len(stemmed_word) > 2:
new.append(stemmed_word)
# Add to preproccesed list
preprocessed_reviews.append((new, review_type))
# In[ ]:
count = 0
for r in preprocessed_reviews:
words = r[0]
for w in words:
if w in stopwords:
count += 1
a = 191569
count
# In[ ]:
# Splitting data in trainingdata and testdata (80-20 ratio)
total = len(preprocessed_reviews) #Total number of reviews
test_number = int(0.20 * total) # Number of testing reviews
# Picking randomly
print(test_number)
copy = preprocessed_reviews[:]
test_set = []
taken = {}
while len(test_set) < test_number:
#print(len(train_texts))
num = random.randint(0, test_number - 1)
if num not in taken.keys():
test_set.append(copy.pop(num))
taken[num] = 1
train_set = copy[:] # Trainset is the remaining reviews
len(train_set)/total, len(test_set)/total, len(train_set), len(test_set)
# In[ ]:
# * * * TRAINING THE MODEL * * *
# meaning: Computing probabilities needed for P(Positive|Word)
def total_goods_and_bads(tset):
goods = 0
bads = 0
for t in tset:
goods += 1 if t[1] == 0 else 0
bads += 1 if t[1] == 1 else 0
return goods, bads
total_positive = total_goods_and_bads(train_set)[0]
total_negative = total_goods_and_bads(train_set)[1]
print(total_positive)
print(total_negative)
# In[ ]:
# First making a word counter for pos and neg reviews
pos_word_counter = {}
neg_word_counter = {}
total_words = 0
for t in train_set:
review = t[0]
review_type = t[1]
already_counted = []
for word in review:
total_words += 1
if review_type == 0:
if word not in pos_word_counter:
pos_word_counter[word] = 1
else:
if word not in already_counted:
pos_word_counter[word] += 1
else:
if word not in neg_word_counter:
neg_word_counter[word] = 1
else:
if word not in already_counted:
neg_word_counter[word] += 1
already_counted.append(word)
total_words
# In[ ]:
# Removes words that are not inluded in at least 0.15% of the reviews
removed_words = 0
for j in range(len(train_set)):
words = train_set[j][0]
i = 0
while i < len(words):
word = words[i]
word_removed = False
if word in pos_word_counter:
if pos_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word)
word_removed = True
removed_words += 1
elif word in neg_word_counter:
if neg_word_counter[word] < 0.0015*len(train_set):
train_set[j][0].remove(word) | word_removed = True
removed_words += 1
if not word_removed:
i += 1
j += 1
removed_words
# In[ ]:
def sort_dict(dicti, end):
# Sorterer etter value i dict, gir liste med tupler
most_common_words = sorted(dicti.items(), key = lambda kv: kv[1])
most_common_words.reverse()
most_common_words = most_common_words[:end]
# Lager dict på formen {word: count, ...}
# Vil ha dict fremfor liste med tupler, pga. senere søk
return dict(most_common_words)
most_used_words_pos = sort_dict(pos_word_counter, 25)
most_used_words_neg = sort_dict(neg_word_counter, 25)
most_used_words_pos
# In[ ]:
# Need these 4 probabilities
# 1) Probability that a word appears in positive reviews
# 2) Probability that a word appears in negative reviews
# 3) Overall probability that any given review is positive
# 4) Overall probability that any given reviews is negative
# # Making a dictionary with probabilities for different words appearing in good and bad reviews
# # Example: {'bad': (0.0881, 0.3226)}
probability_appearing = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probability_appearing:
if word in pos_word_counter:
p_appearing_good = pos_word_counter[word]/total_positive
else:
p_appearing_good = 0.1
if word in neg_word_counter:
p_appearing_bad = neg_word_counter[word]/total_negative
else:
p_appearing_bad = 0.1
probability_appearing[word] = (p_appearing_good, p_appearing_bad)
p_pos = total_positive/len(train_set)
p_neg = total_negative/len(train_set)
print(p_good)
print(p_bad)
# Finally we can compute P(Positive | Word)
def p_is_positive_given_word(word):
return (probability_appearing[word][0]*p_pos)/((probability_appearing[word][0]*p_pos + probability_appearing[word][1]*p_neg))
def p_is_negative_given_word(word):
return (probability_appearing[word][1]*p_neg)/((probability_appearing[word][1]*p_neg + probability_appearing[word][0]*p_pos))
p_is_positive_given_word('bad'), p_is_negative_given_word('bad')
# In[ ]:
probabilities = {}
for t in train_set:
text = t[0]
for word in text:
if word not in probabilities:
p_pos = p_is_positive_given_word(word)
p_neg = p_is_negative_given_word(word)
if p_pos == 0:
p_pos = 0.1 # tweaking this value
if p_pos == 1:
p_pos = 0.98
if p_neg == 0:
p_neg = 0.1
if p_neg == 1:
p_neg = 0.98
probabilities[word] = (p_pos, p_neg)
# In[ ]:
# Filter out words that are not informative (probabilities between 0.45 and 0.55)
print(len(probabilities))
for word in list(probabilities):
probs = probabilities[word]
if 0.40 < probs[0] and probs[0] < 0.60 and 0.40 < probs[1] and probs[1] < 0.60:
del probabilities[word]
print(len(probabilities))
# In[ ]:
probabilities
# In[ ]:
# COMBINING INDIVIDUAL PROBABILITIES
# Determining whether a message is spam or ham based only on the presence of one word is error-prone,
# must try to consider all the words (or the most interesting) in the message
from functools import reduce
def p_is_type(words):
words = list(filter(lambda x: x in probabilities, words)) # Filter out words not met during training-fase
pos_probs = []
neg_probs = []
for word in words:
pos_probs.append(probabilities[word][0])
neg_probs.append(probabilities[word][1])
#else:
# probs.append(0.5) # tweaking this value
pos_probs_not = list(map(lambda prob: 1-prob, pos_probs))
neg_probs_not = list(map(lambda prob: 1-prob, neg_probs))
pos_product = reduce(lambda x, y: x * y, pos_probs, 1)
neg_product = reduce(lambda x, y: x * y, neg_probs, 1)
pos_product_not = reduce(lambda x, y: x * y, pos_probs_not, 1)
neg_product_not = reduce(lambda x, y: x * y, neg_probs_not, 1)
return pos_product/(pos_product + pos_product_not), neg_product/(neg_product + neg_product_not)
p_is_type(['good', 'enjoy', 'well']), p_is_type(['terribl', 'hate'])
# In[ ]:
# * * * TESTING THE MODEL * * *
total_correct = 0
true_good_as_good = 0
true_good_as_bad = 0
true_bad_as_bad = 0
true_bad_as_good = 0
count = 0
for t in test_set:
guess = -1
words = t[0]
answer = t[1]
try:
p_positive = p_is_type(words)[0]
p_negative = p_is_type(words)[1]
except:
count += 1
#print(words)
guess = 0 if p_positive > p_negative else 1
if guess == answer:
total_correct += 1
if answer == 0: # true negative
true_good_as_good += 1
else: # true positive
true_bad_as_bad += 1
else:
#print(words, answer)
if answer == 0: # false positive
true_good_as_bad += 1
else: # true negative
true_bad_as_good += 1
true_positives = total_goods_and_bads(test_set)[0]
true_negatives = total_goods_and_bads(test_set)[1]
print('Total test texts: ', len(test_set))
print('Number of correct: ', total_correct)
print('Accuracy: ', total_correct*100/(true_positives+true_negatives))
print('-------------------------------')
print('Positives precision: ', true_good_as_good/(true_good_as_good + true_bad_as_good))
print('Positives recall: ', true_good_as_good/(true_good_as_good + true_good_as_bad))
print('Negatives precision: ', true_bad_as_bad/(true_bad_as_bad + true_good_as_bad))
print('Negatives recall: ', true_bad_as_bad/(true_bad_as_bad + true_bad_as_good))
print('-------------------------------')
# In[ ]:
# * * * VISUALISATIONS * * *
from wordcloud import WordCloud
pos_reviews = ""
neg_reviews = ""
revs = all_reviews[:100]
for t in revs:
review = t[0].split()
s = ""
for word in review:
if len(word) > 2:
s += word + ' '
text = re.sub('[' + string.punctuation + ']', ' ', s)
text = re.sub('[\n\t\r]', '', text)
if t[1] == 0:
pos_reviews += text
else:
neg_reviews += text
# Generate a word cloud image
pos_wordcloud = WordCloud(width=600, height=400).generate(pos_reviews)
#neg_wordcloud = WordCloud(width=600, height=400).generate(neg_reviews)
# In[ ]:
#Spam Word cloud
plt.figure(figsize=(10,8), facecolor='k')
plt.imshow(pos_wordcloud)
plt.axis("off")
plt.tight_layout(pad=0)
# In[ ]: | random_line_split | |
tls-server.rs | // SPDX-FileCopyrightText: Copyright (c) 2017-2023 slowtec GmbH <post@slowtec.de>
// SPDX-License-Identifier: MIT OR Apache-2.0
// load_certs() and particially load_keys() functions were copied from an example of the tokio tls library, available at:
// https://github.com/tokio-rs/tls/blob/master/tokio-rustls/examples/server/src/main.rs
//! TCP server example
use std::{
collections::HashMap,
fs::File,
io::{self, BufReader},
net::SocketAddr,
path::Path,
sync::{Arc, Mutex},
time::Duration,
};
use futures::future;
use pkcs8::der::Decode;
use rustls_pemfile::{certs, pkcs8_private_keys};
use tokio::net::{TcpListener, TcpStream};
use tokio_modbus::{prelude::*, server::tcp::Server};
use tokio_rustls::rustls::{self, Certificate, OwnedTrustAnchor, PrivateKey};
use tokio_rustls::{TlsAcceptor, TlsConnector};
use webpki::TrustAnchor;
fn load_certs(path: &Path) -> io::Result<Vec<Certificate>> {
certs(&mut BufReader::new(File::open(path)?))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid cert"))
.map(|mut certs| certs.drain(..).map(Certificate).collect())
}
fn load_keys(path: &Path, password: Option<&str>) -> io::Result<Vec<PrivateKey>> {
let expected_tag = match &password {
Some(_) => "ENCRYPTED PRIVATE KEY",
None => "PRIVATE KEY",
};
if expected_tag.eq("PRIVATE KEY") {
pkcs8_private_keys(&mut BufReader::new(File::open(path)?))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid key"))
.map(|mut keys| keys.drain(..).map(PrivateKey).collect())
} else {
let content = std::fs::read(path)?;
let mut iter = pem::parse_many(content)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err.to_string()))?
.into_iter()
.filter(|x| x.tag() == expected_tag)
.map(|x| x.contents().to_vec());
match iter.next() {
Some(key) => match password {
Some(password) => {
let encrypted =
pkcs8::EncryptedPrivateKeyInfo::from_der(&key).map_err(|err| {
io::Error::new(io::ErrorKind::InvalidData, err.to_string())
})?;
let decrypted = encrypted.decrypt(password).map_err(|err| {
io::Error::new(io::ErrorKind::InvalidData, err.to_string())
})?;
let key = decrypted.as_bytes().to_vec();
let key = rustls::PrivateKey(key);
let private_keys = vec![key];
io::Result::Ok(private_keys)
}
None => io::Result::Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid key")),
},
None => io::Result::Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid key")),
}
}
}
struct ExampleService {
input_registers: Arc<Mutex<HashMap<u16, u16>>>,
holding_registers: Arc<Mutex<HashMap<u16, u16>>>,
}
impl tokio_modbus::server::Service for ExampleService {
type Request = Request<'static>;
type Response = Response;
type Error = std::io::Error;
type Future = future::Ready<Result<Self::Response, Self::Error>>;
fn call(&self, req: Self::Request) -> Self::Future {
match req {
Request::ReadInputRegisters(addr, cnt) => {
match register_read(&self.input_registers.lock().unwrap(), addr, cnt) {
Ok(values) => future::ready(Ok(Response::ReadInputRegisters(values))),
Err(err) => future::ready(Err(err)),
}
}
Request::ReadHoldingRegisters(addr, cnt) => {
match register_read(&self.holding_registers.lock().unwrap(), addr, cnt) {
Ok(values) => future::ready(Ok(Response::ReadHoldingRegisters(values))),
Err(err) => future::ready(Err(err)),
}
}
Request::WriteMultipleRegisters(addr, values) => {
match register_write(&mut self.holding_registers.lock().unwrap(), addr, &values) {
Ok(_) => future::ready(Ok(Response::WriteMultipleRegisters(
addr,
values.len() as u16,
))),
Err(err) => future::ready(Err(err)),
}
}
Request::WriteSingleRegister(addr, value) => {
match register_write(
&mut self.holding_registers.lock().unwrap(),
addr,
std::slice::from_ref(&value),
) {
Ok(_) => future::ready(Ok(Response::WriteSingleRegister(addr, value))),
Err(err) => future::ready(Err(err)),
}
}
_ => {
println!("SERVER: Exception::IllegalFunction - Unimplemented function code in request: {req:?}");
// TODO: We want to return a Modbus Exception response `IllegalFunction`. https://github.com/slowtec/tokio-modbus/issues/165
future::ready(Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
"Unimplemented function code in request".to_string(),
)))
}
}
}
}
impl ExampleService {
fn new() -> Self {
// Insert some test data as register values.
let mut input_registers = HashMap::new();
input_registers.insert(0, 1234);
input_registers.insert(1, 5678);
let mut holding_registers = HashMap::new();
holding_registers.insert(0, 10);
holding_registers.insert(1, 20);
holding_registers.insert(2, 30);
holding_registers.insert(3, 40);
Self {
input_registers: Arc::new(Mutex::new(input_registers)),
holding_registers: Arc::new(Mutex::new(holding_registers)),
}
}
}
/// Helper function implementing reading registers from a HashMap.
fn register_read(
registers: &HashMap<u16, u16>,
addr: u16,
cnt: u16,
) -> Result<Vec<u16>, std::io::Error> {
let mut response_values = vec![0; cnt.into()];
for i in 0..cnt {
let reg_addr = addr + i;
if let Some(r) = registers.get(®_addr) {
response_values[i as usize] = *r;
} else {
// TODO: Return a Modbus Exception response `IllegalDataAddress` https://github.com/slowtec/tokio-modbus/issues/165
println!("SERVER: Exception::IllegalDataAddress");
return Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
format!("no register at address {reg_addr}"),
));
}
}
Ok(response_values)
}
/// Write a holding register. Used by both the write single register
/// and write multiple registers requests.
fn register_write(
registers: &mut HashMap<u16, u16>,
addr: u16,
values: &[u16],
) -> Result<(), std::io::Error> {
for (i, value) in values.iter().enumerate() {
let reg_addr = addr + i as u16;
if let Some(r) = registers.get_mut(®_addr) {
*r = *value;
} else {
// TODO: Return a Modbus Exception response `IllegalDataAddress` https://github.com/slowtec/tokio-modbus/issues/165
println!("SERVER: Exception::IllegalDataAddress");
return Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
format!("no register at address {reg_addr}"),
));
}
}
Ok(())
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let socket_addr = "127.0.0.1:8802".parse()?;
tokio::select! {
_ = server_context(socket_addr) => unreachable!(),
_ = client_context(socket_addr) => println!("Exiting"),
}
Ok(())
}
async fn | (socket_addr: SocketAddr) -> anyhow::Result<()> {
println!("Starting up server on {socket_addr}");
let listener = TcpListener::bind(socket_addr).await?;
let server = Server::new(listener);
let on_connected = |stream, _socket_addr| async move {
let cert_path = Path::new("./pki/server.pem");
let key_path = Path::new("./pki/server.key");
let certs = load_certs(cert_path)?;
let mut keys = load_keys(key_path, None)?;
let config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certs, keys.remove(0))
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))?;
let acceptor = TlsAcceptor::from(Arc::new(config));
let service = ExampleService::new();
let stream = acceptor.accept(stream).await;
match stream {
Ok(stream) => Ok(Some((service, stream))),
Err(_) => Ok(None),
}
};
let on_process_error = |err| {
eprintln!("{err}");
};
server.serve(&on_connected, on_process_error).await?;
Ok(())
}
async fn client_context(socket_addr: SocketAddr) {
use tokio_modbus::prelude::*;
tokio::join!(
async {
// Give the server some time for starting up
tokio::time::sleep(Duration::from_secs(1)).await;
println!("Connecting client...");
let mut root_cert_store = rustls::RootCertStore::empty();
let ca_path = Path::new("./pki/ca.pem");
let mut pem = BufReader::new(File::open(ca_path).unwrap());
let certs = rustls_pemfile::certs(&mut pem).unwrap();
let trust_anchors = certs.iter().map(|cert| {
let ta = TrustAnchor::try_from_cert_der(&cert[..]).unwrap();
OwnedTrustAnchor::from_subject_spki_name_constraints(
ta.subject,
ta.spki,
ta.name_constraints,
)
});
root_cert_store.add_trust_anchors(trust_anchors);
let domain = "localhost";
let cert_path = Path::new("./pki/client.pem");
let key_path = Path::new("./pki/client.key");
let certs = load_certs(cert_path).unwrap();
let mut keys = load_keys(key_path, None).unwrap();
let config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(root_cert_store)
.with_client_auth_cert(certs, keys.remove(0))
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))
.unwrap();
let connector = TlsConnector::from(Arc::new(config));
let stream = TcpStream::connect(&socket_addr).await.unwrap();
stream.set_nodelay(true).unwrap();
let domain = rustls::ServerName::try_from(domain)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid dnsname"))
.unwrap();
let transport = connector.connect(domain, stream).await.unwrap();
// Tokio modbus transport layer setup
let mut ctx = tcp::attach(transport);
println!("CLIENT: Reading 2 input registers...");
let response = ctx.read_input_registers(0x00, 2).await.unwrap();
println!("CLIENT: The result is '{response:?}'");
assert_eq!(response, [1234, 5678]);
println!("CLIENT: Writing 2 holding registers...");
ctx.write_multiple_registers(0x01, &[7777, 8888])
.await
.unwrap();
// Read back a block including the two registers we wrote.
println!("CLIENT: Reading 4 holding registers...");
let response = ctx.read_holding_registers(0x00, 4).await.unwrap();
println!("CLIENT: The result is '{response:?}'");
assert_eq!(response, [10, 7777, 8888, 40]);
// Now we try to read with an invalid register address.
// This should return a Modbus exception response with the code
// IllegalDataAddress.
println!("CLIENT: Reading nonexisting holding register address... (should return IllegalDataAddress)");
let response = ctx.read_holding_registers(0x100, 1).await;
println!("CLIENT: The result is '{response:?}'");
assert!(response.is_err());
// TODO: How can Modbus client identify Modbus exception responses? E.g. here we expect IllegalDataAddress
// Question here: https://github.com/slowtec/tokio-modbus/issues/169
println!("CLIENT: Done.")
},
tokio::time::sleep(Duration::from_secs(5))
);
}
| server_context | identifier_name |
tls-server.rs | // SPDX-FileCopyrightText: Copyright (c) 2017-2023 slowtec GmbH <post@slowtec.de>
// SPDX-License-Identifier: MIT OR Apache-2.0
// load_certs() and particially load_keys() functions were copied from an example of the tokio tls library, available at:
// https://github.com/tokio-rs/tls/blob/master/tokio-rustls/examples/server/src/main.rs
//! TCP server example
use std::{
collections::HashMap,
fs::File,
io::{self, BufReader},
net::SocketAddr,
path::Path,
sync::{Arc, Mutex},
time::Duration,
};
use futures::future;
use pkcs8::der::Decode;
use rustls_pemfile::{certs, pkcs8_private_keys};
use tokio::net::{TcpListener, TcpStream};
use tokio_modbus::{prelude::*, server::tcp::Server};
use tokio_rustls::rustls::{self, Certificate, OwnedTrustAnchor, PrivateKey};
use tokio_rustls::{TlsAcceptor, TlsConnector};
use webpki::TrustAnchor;
fn load_certs(path: &Path) -> io::Result<Vec<Certificate>> {
certs(&mut BufReader::new(File::open(path)?))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid cert"))
.map(|mut certs| certs.drain(..).map(Certificate).collect())
}
fn load_keys(path: &Path, password: Option<&str>) -> io::Result<Vec<PrivateKey>> {
let expected_tag = match &password {
Some(_) => "ENCRYPTED PRIVATE KEY",
None => "PRIVATE KEY",
};
if expected_tag.eq("PRIVATE KEY") {
pkcs8_private_keys(&mut BufReader::new(File::open(path)?))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid key"))
.map(|mut keys| keys.drain(..).map(PrivateKey).collect())
} else {
let content = std::fs::read(path)?;
let mut iter = pem::parse_many(content)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err.to_string()))?
.into_iter()
.filter(|x| x.tag() == expected_tag)
.map(|x| x.contents().to_vec());
match iter.next() {
Some(key) => match password {
Some(password) => {
let encrypted =
pkcs8::EncryptedPrivateKeyInfo::from_der(&key).map_err(|err| {
io::Error::new(io::ErrorKind::InvalidData, err.to_string())
})?;
let decrypted = encrypted.decrypt(password).map_err(|err| {
io::Error::new(io::ErrorKind::InvalidData, err.to_string())
})?;
let key = decrypted.as_bytes().to_vec();
let key = rustls::PrivateKey(key);
let private_keys = vec![key];
io::Result::Ok(private_keys)
}
None => io::Result::Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid key")),
},
None => io::Result::Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid key")),
}
}
}
struct ExampleService {
input_registers: Arc<Mutex<HashMap<u16, u16>>>,
holding_registers: Arc<Mutex<HashMap<u16, u16>>>,
}
impl tokio_modbus::server::Service for ExampleService {
type Request = Request<'static>;
type Response = Response;
type Error = std::io::Error;
type Future = future::Ready<Result<Self::Response, Self::Error>>;
| Ok(values) => future::ready(Ok(Response::ReadInputRegisters(values))),
Err(err) => future::ready(Err(err)),
}
}
Request::ReadHoldingRegisters(addr, cnt) => {
match register_read(&self.holding_registers.lock().unwrap(), addr, cnt) {
Ok(values) => future::ready(Ok(Response::ReadHoldingRegisters(values))),
Err(err) => future::ready(Err(err)),
}
}
Request::WriteMultipleRegisters(addr, values) => {
match register_write(&mut self.holding_registers.lock().unwrap(), addr, &values) {
Ok(_) => future::ready(Ok(Response::WriteMultipleRegisters(
addr,
values.len() as u16,
))),
Err(err) => future::ready(Err(err)),
}
}
Request::WriteSingleRegister(addr, value) => {
match register_write(
&mut self.holding_registers.lock().unwrap(),
addr,
std::slice::from_ref(&value),
) {
Ok(_) => future::ready(Ok(Response::WriteSingleRegister(addr, value))),
Err(err) => future::ready(Err(err)),
}
}
_ => {
println!("SERVER: Exception::IllegalFunction - Unimplemented function code in request: {req:?}");
// TODO: We want to return a Modbus Exception response `IllegalFunction`. https://github.com/slowtec/tokio-modbus/issues/165
future::ready(Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
"Unimplemented function code in request".to_string(),
)))
}
}
}
}
impl ExampleService {
fn new() -> Self {
// Insert some test data as register values.
let mut input_registers = HashMap::new();
input_registers.insert(0, 1234);
input_registers.insert(1, 5678);
let mut holding_registers = HashMap::new();
holding_registers.insert(0, 10);
holding_registers.insert(1, 20);
holding_registers.insert(2, 30);
holding_registers.insert(3, 40);
Self {
input_registers: Arc::new(Mutex::new(input_registers)),
holding_registers: Arc::new(Mutex::new(holding_registers)),
}
}
}
/// Helper function implementing reading registers from a HashMap.
fn register_read(
registers: &HashMap<u16, u16>,
addr: u16,
cnt: u16,
) -> Result<Vec<u16>, std::io::Error> {
let mut response_values = vec![0; cnt.into()];
for i in 0..cnt {
let reg_addr = addr + i;
if let Some(r) = registers.get(®_addr) {
response_values[i as usize] = *r;
} else {
// TODO: Return a Modbus Exception response `IllegalDataAddress` https://github.com/slowtec/tokio-modbus/issues/165
println!("SERVER: Exception::IllegalDataAddress");
return Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
format!("no register at address {reg_addr}"),
));
}
}
Ok(response_values)
}
/// Write a holding register. Used by both the write single register
/// and write multiple registers requests.
fn register_write(
registers: &mut HashMap<u16, u16>,
addr: u16,
values: &[u16],
) -> Result<(), std::io::Error> {
for (i, value) in values.iter().enumerate() {
let reg_addr = addr + i as u16;
if let Some(r) = registers.get_mut(®_addr) {
*r = *value;
} else {
// TODO: Return a Modbus Exception response `IllegalDataAddress` https://github.com/slowtec/tokio-modbus/issues/165
println!("SERVER: Exception::IllegalDataAddress");
return Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
format!("no register at address {reg_addr}"),
));
}
}
Ok(())
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let socket_addr = "127.0.0.1:8802".parse()?;
tokio::select! {
_ = server_context(socket_addr) => unreachable!(),
_ = client_context(socket_addr) => println!("Exiting"),
}
Ok(())
}
async fn server_context(socket_addr: SocketAddr) -> anyhow::Result<()> {
println!("Starting up server on {socket_addr}");
let listener = TcpListener::bind(socket_addr).await?;
let server = Server::new(listener);
let on_connected = |stream, _socket_addr| async move {
let cert_path = Path::new("./pki/server.pem");
let key_path = Path::new("./pki/server.key");
let certs = load_certs(cert_path)?;
let mut keys = load_keys(key_path, None)?;
let config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certs, keys.remove(0))
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))?;
let acceptor = TlsAcceptor::from(Arc::new(config));
let service = ExampleService::new();
let stream = acceptor.accept(stream).await;
match stream {
Ok(stream) => Ok(Some((service, stream))),
Err(_) => Ok(None),
}
};
let on_process_error = |err| {
eprintln!("{err}");
};
server.serve(&on_connected, on_process_error).await?;
Ok(())
}
async fn client_context(socket_addr: SocketAddr) {
use tokio_modbus::prelude::*;
tokio::join!(
async {
// Give the server some time for starting up
tokio::time::sleep(Duration::from_secs(1)).await;
println!("Connecting client...");
let mut root_cert_store = rustls::RootCertStore::empty();
let ca_path = Path::new("./pki/ca.pem");
let mut pem = BufReader::new(File::open(ca_path).unwrap());
let certs = rustls_pemfile::certs(&mut pem).unwrap();
let trust_anchors = certs.iter().map(|cert| {
let ta = TrustAnchor::try_from_cert_der(&cert[..]).unwrap();
OwnedTrustAnchor::from_subject_spki_name_constraints(
ta.subject,
ta.spki,
ta.name_constraints,
)
});
root_cert_store.add_trust_anchors(trust_anchors);
let domain = "localhost";
let cert_path = Path::new("./pki/client.pem");
let key_path = Path::new("./pki/client.key");
let certs = load_certs(cert_path).unwrap();
let mut keys = load_keys(key_path, None).unwrap();
let config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(root_cert_store)
.with_client_auth_cert(certs, keys.remove(0))
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))
.unwrap();
let connector = TlsConnector::from(Arc::new(config));
let stream = TcpStream::connect(&socket_addr).await.unwrap();
stream.set_nodelay(true).unwrap();
let domain = rustls::ServerName::try_from(domain)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid dnsname"))
.unwrap();
let transport = connector.connect(domain, stream).await.unwrap();
// Tokio modbus transport layer setup
let mut ctx = tcp::attach(transport);
println!("CLIENT: Reading 2 input registers...");
let response = ctx.read_input_registers(0x00, 2).await.unwrap();
println!("CLIENT: The result is '{response:?}'");
assert_eq!(response, [1234, 5678]);
println!("CLIENT: Writing 2 holding registers...");
ctx.write_multiple_registers(0x01, &[7777, 8888])
.await
.unwrap();
// Read back a block including the two registers we wrote.
println!("CLIENT: Reading 4 holding registers...");
let response = ctx.read_holding_registers(0x00, 4).await.unwrap();
println!("CLIENT: The result is '{response:?}'");
assert_eq!(response, [10, 7777, 8888, 40]);
// Now we try to read with an invalid register address.
// This should return a Modbus exception response with the code
// IllegalDataAddress.
println!("CLIENT: Reading nonexisting holding register address... (should return IllegalDataAddress)");
let response = ctx.read_holding_registers(0x100, 1).await;
println!("CLIENT: The result is '{response:?}'");
assert!(response.is_err());
// TODO: How can Modbus client identify Modbus exception responses? E.g. here we expect IllegalDataAddress
// Question here: https://github.com/slowtec/tokio-modbus/issues/169
println!("CLIENT: Done.")
},
tokio::time::sleep(Duration::from_secs(5))
);
} | fn call(&self, req: Self::Request) -> Self::Future {
match req {
Request::ReadInputRegisters(addr, cnt) => {
match register_read(&self.input_registers.lock().unwrap(), addr, cnt) { | random_line_split |
tls-server.rs | // SPDX-FileCopyrightText: Copyright (c) 2017-2023 slowtec GmbH <post@slowtec.de>
// SPDX-License-Identifier: MIT OR Apache-2.0
// load_certs() and particially load_keys() functions were copied from an example of the tokio tls library, available at:
// https://github.com/tokio-rs/tls/blob/master/tokio-rustls/examples/server/src/main.rs
//! TCP server example
use std::{
collections::HashMap,
fs::File,
io::{self, BufReader},
net::SocketAddr,
path::Path,
sync::{Arc, Mutex},
time::Duration,
};
use futures::future;
use pkcs8::der::Decode;
use rustls_pemfile::{certs, pkcs8_private_keys};
use tokio::net::{TcpListener, TcpStream};
use tokio_modbus::{prelude::*, server::tcp::Server};
use tokio_rustls::rustls::{self, Certificate, OwnedTrustAnchor, PrivateKey};
use tokio_rustls::{TlsAcceptor, TlsConnector};
use webpki::TrustAnchor;
fn load_certs(path: &Path) -> io::Result<Vec<Certificate>> {
certs(&mut BufReader::new(File::open(path)?))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid cert"))
.map(|mut certs| certs.drain(..).map(Certificate).collect())
}
fn load_keys(path: &Path, password: Option<&str>) -> io::Result<Vec<PrivateKey>> {
let expected_tag = match &password {
Some(_) => "ENCRYPTED PRIVATE KEY",
None => "PRIVATE KEY",
};
if expected_tag.eq("PRIVATE KEY") {
pkcs8_private_keys(&mut BufReader::new(File::open(path)?))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid key"))
.map(|mut keys| keys.drain(..).map(PrivateKey).collect())
} else {
let content = std::fs::read(path)?;
let mut iter = pem::parse_many(content)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err.to_string()))?
.into_iter()
.filter(|x| x.tag() == expected_tag)
.map(|x| x.contents().to_vec());
match iter.next() {
Some(key) => match password {
Some(password) => {
let encrypted =
pkcs8::EncryptedPrivateKeyInfo::from_der(&key).map_err(|err| {
io::Error::new(io::ErrorKind::InvalidData, err.to_string())
})?;
let decrypted = encrypted.decrypt(password).map_err(|err| {
io::Error::new(io::ErrorKind::InvalidData, err.to_string())
})?;
let key = decrypted.as_bytes().to_vec();
let key = rustls::PrivateKey(key);
let private_keys = vec![key];
io::Result::Ok(private_keys)
}
None => io::Result::Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid key")),
},
None => io::Result::Err(io::Error::new(io::ErrorKind::InvalidInput, "invalid key")),
}
}
}
struct ExampleService {
input_registers: Arc<Mutex<HashMap<u16, u16>>>,
holding_registers: Arc<Mutex<HashMap<u16, u16>>>,
}
impl tokio_modbus::server::Service for ExampleService {
type Request = Request<'static>;
type Response = Response;
type Error = std::io::Error;
type Future = future::Ready<Result<Self::Response, Self::Error>>;
fn call(&self, req: Self::Request) -> Self::Future |
}
impl ExampleService {
fn new() -> Self {
// Insert some test data as register values.
let mut input_registers = HashMap::new();
input_registers.insert(0, 1234);
input_registers.insert(1, 5678);
let mut holding_registers = HashMap::new();
holding_registers.insert(0, 10);
holding_registers.insert(1, 20);
holding_registers.insert(2, 30);
holding_registers.insert(3, 40);
Self {
input_registers: Arc::new(Mutex::new(input_registers)),
holding_registers: Arc::new(Mutex::new(holding_registers)),
}
}
}
/// Helper function implementing reading registers from a HashMap.
fn register_read(
registers: &HashMap<u16, u16>,
addr: u16,
cnt: u16,
) -> Result<Vec<u16>, std::io::Error> {
let mut response_values = vec![0; cnt.into()];
for i in 0..cnt {
let reg_addr = addr + i;
if let Some(r) = registers.get(®_addr) {
response_values[i as usize] = *r;
} else {
// TODO: Return a Modbus Exception response `IllegalDataAddress` https://github.com/slowtec/tokio-modbus/issues/165
println!("SERVER: Exception::IllegalDataAddress");
return Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
format!("no register at address {reg_addr}"),
));
}
}
Ok(response_values)
}
/// Write a holding register. Used by both the write single register
/// and write multiple registers requests.
fn register_write(
registers: &mut HashMap<u16, u16>,
addr: u16,
values: &[u16],
) -> Result<(), std::io::Error> {
for (i, value) in values.iter().enumerate() {
let reg_addr = addr + i as u16;
if let Some(r) = registers.get_mut(®_addr) {
*r = *value;
} else {
// TODO: Return a Modbus Exception response `IllegalDataAddress` https://github.com/slowtec/tokio-modbus/issues/165
println!("SERVER: Exception::IllegalDataAddress");
return Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
format!("no register at address {reg_addr}"),
));
}
}
Ok(())
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let socket_addr = "127.0.0.1:8802".parse()?;
tokio::select! {
_ = server_context(socket_addr) => unreachable!(),
_ = client_context(socket_addr) => println!("Exiting"),
}
Ok(())
}
async fn server_context(socket_addr: SocketAddr) -> anyhow::Result<()> {
println!("Starting up server on {socket_addr}");
let listener = TcpListener::bind(socket_addr).await?;
let server = Server::new(listener);
let on_connected = |stream, _socket_addr| async move {
let cert_path = Path::new("./pki/server.pem");
let key_path = Path::new("./pki/server.key");
let certs = load_certs(cert_path)?;
let mut keys = load_keys(key_path, None)?;
let config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certs, keys.remove(0))
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))?;
let acceptor = TlsAcceptor::from(Arc::new(config));
let service = ExampleService::new();
let stream = acceptor.accept(stream).await;
match stream {
Ok(stream) => Ok(Some((service, stream))),
Err(_) => Ok(None),
}
};
let on_process_error = |err| {
eprintln!("{err}");
};
server.serve(&on_connected, on_process_error).await?;
Ok(())
}
async fn client_context(socket_addr: SocketAddr) {
use tokio_modbus::prelude::*;
tokio::join!(
async {
// Give the server some time for starting up
tokio::time::sleep(Duration::from_secs(1)).await;
println!("Connecting client...");
let mut root_cert_store = rustls::RootCertStore::empty();
let ca_path = Path::new("./pki/ca.pem");
let mut pem = BufReader::new(File::open(ca_path).unwrap());
let certs = rustls_pemfile::certs(&mut pem).unwrap();
let trust_anchors = certs.iter().map(|cert| {
let ta = TrustAnchor::try_from_cert_der(&cert[..]).unwrap();
OwnedTrustAnchor::from_subject_spki_name_constraints(
ta.subject,
ta.spki,
ta.name_constraints,
)
});
root_cert_store.add_trust_anchors(trust_anchors);
let domain = "localhost";
let cert_path = Path::new("./pki/client.pem");
let key_path = Path::new("./pki/client.key");
let certs = load_certs(cert_path).unwrap();
let mut keys = load_keys(key_path, None).unwrap();
let config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(root_cert_store)
.with_client_auth_cert(certs, keys.remove(0))
.map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))
.unwrap();
let connector = TlsConnector::from(Arc::new(config));
let stream = TcpStream::connect(&socket_addr).await.unwrap();
stream.set_nodelay(true).unwrap();
let domain = rustls::ServerName::try_from(domain)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid dnsname"))
.unwrap();
let transport = connector.connect(domain, stream).await.unwrap();
// Tokio modbus transport layer setup
let mut ctx = tcp::attach(transport);
println!("CLIENT: Reading 2 input registers...");
let response = ctx.read_input_registers(0x00, 2).await.unwrap();
println!("CLIENT: The result is '{response:?}'");
assert_eq!(response, [1234, 5678]);
println!("CLIENT: Writing 2 holding registers...");
ctx.write_multiple_registers(0x01, &[7777, 8888])
.await
.unwrap();
// Read back a block including the two registers we wrote.
println!("CLIENT: Reading 4 holding registers...");
let response = ctx.read_holding_registers(0x00, 4).await.unwrap();
println!("CLIENT: The result is '{response:?}'");
assert_eq!(response, [10, 7777, 8888, 40]);
// Now we try to read with an invalid register address.
// This should return a Modbus exception response with the code
// IllegalDataAddress.
println!("CLIENT: Reading nonexisting holding register address... (should return IllegalDataAddress)");
let response = ctx.read_holding_registers(0x100, 1).await;
println!("CLIENT: The result is '{response:?}'");
assert!(response.is_err());
// TODO: How can Modbus client identify Modbus exception responses? E.g. here we expect IllegalDataAddress
// Question here: https://github.com/slowtec/tokio-modbus/issues/169
println!("CLIENT: Done.")
},
tokio::time::sleep(Duration::from_secs(5))
);
}
| {
match req {
Request::ReadInputRegisters(addr, cnt) => {
match register_read(&self.input_registers.lock().unwrap(), addr, cnt) {
Ok(values) => future::ready(Ok(Response::ReadInputRegisters(values))),
Err(err) => future::ready(Err(err)),
}
}
Request::ReadHoldingRegisters(addr, cnt) => {
match register_read(&self.holding_registers.lock().unwrap(), addr, cnt) {
Ok(values) => future::ready(Ok(Response::ReadHoldingRegisters(values))),
Err(err) => future::ready(Err(err)),
}
}
Request::WriteMultipleRegisters(addr, values) => {
match register_write(&mut self.holding_registers.lock().unwrap(), addr, &values) {
Ok(_) => future::ready(Ok(Response::WriteMultipleRegisters(
addr,
values.len() as u16,
))),
Err(err) => future::ready(Err(err)),
}
}
Request::WriteSingleRegister(addr, value) => {
match register_write(
&mut self.holding_registers.lock().unwrap(),
addr,
std::slice::from_ref(&value),
) {
Ok(_) => future::ready(Ok(Response::WriteSingleRegister(addr, value))),
Err(err) => future::ready(Err(err)),
}
}
_ => {
println!("SERVER: Exception::IllegalFunction - Unimplemented function code in request: {req:?}");
// TODO: We want to return a Modbus Exception response `IllegalFunction`. https://github.com/slowtec/tokio-modbus/issues/165
future::ready(Err(std::io::Error::new(
std::io::ErrorKind::AddrNotAvailable,
"Unimplemented function code in request".to_string(),
)))
}
}
} | identifier_body |
program.py | import discord
import os
import zlib
import io
import re
import aiohttp
from util import fuzzy
from discord.ext import commands
from pistonapi import PistonAPI
piston = PistonAPI()
class SphinxObjectFileReader:
# Inspired by Sphinx's InventoryFileReader
BUFSIZE = 16 * 1024
def __init__(self, buffer):
self.stream = io.BytesIO(buffer)
def readline(self):
return self.stream.readline().decode('utf-8')
def skipline(self):
self.stream.readline()
def read_compressed_chunks(self):
decompressor = zlib.decompressobj()
while True:
chunk = self.stream.read(self.BUFSIZE)
if len(chunk) == 0:
break
yield decompressor.decompress(chunk)
yield decompressor.flush()
def read_compressed_lines(self):
buf = b''
for chunk in self.read_compressed_chunks():
buf += chunk
pos = buf.find(b'\n')
while pos != -1:
yield buf[:pos].decode('utf-8')
buf = buf[pos + 1:]
pos = buf.find(b'\n')
class ProgrammingCog(commands.Cog, name="Programming"):
"""Commands for programmers"""
def __init__(self, client):
self.client = client
self.bot = client
self.client.session = aiohttp.ClientSession(loop=self.client.loop)
self.regex = re.compile(r"(\w*)\s*(?:```)(\w*)?([\s\S]*)(?:```$)")
@property
def session(self):
return self.bot.http._HTTPClient__session | async def _run_code(self, *, lang: str, code: str):
res = await self.session.post(
"https://emkc.org/api/v1/piston/execute",
json={"language": lang, "source": code})
return await res.json()
@commands.command()
async def run(self, ctx: commands.Context, *, codeblock: str):
"""
Run code and get results instantly
**Note**: You must use codeblocks around the code
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
matches = self.regex.findall(codeblock)
if not matches:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description="Couldn't quite see your codeblock"))
lang = matches[0][0] or matches[0][1]
if not lang:
return await ctx.reply(embed=discord.Embed(title="Uh-oh",
description="Couldn't find the language hinted in the codeblock or before it"))
code = matches[0][2]
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
@commands.command()
async def runl(self, ctx: commands.Context, lang: str, *, code: str):
"""
Run a single line of code, **must** specify language as first argument
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
async def _send_result(self, ctx: commands.Context, result: dict):
if "message" in result:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description=result["message"]))
output = result['output']
# if len(output) > 2000:
# url = await create_guest_paste_bin(self.session, output)
# return await ctx.reply("Your output was too long, so here's the pastebin link " + url)
embed = discord.Embed(
title=f"{result['language'][0].upper() + result['language'][1:]}")
newline = '\n'
rep = {"python3": "py", "python2": "py", 'node': 'js'}
rep = dict((re.escape(k), v) for k, v in rep.items())
pattern = re.compile("|".join(rep.keys()))
converted_language = pattern.sub(lambda m: rep[re.escape(m.group(0))], result['language'])
limit = 1024 - (29 + len(converted_language))
output = f"```{converted_language}\n{output[:limit]}```{(len(output) > limit) * (newline + '**Output shortened**')}"
embed.add_field(name="Output", value=output or "**No output**")
try:
await ctx.reply(embed=embed)
except:
await ctx.reply(output)
def parse_object_inv(self, stream, url):
# key: URL
# n.b.: key doesn't have `discord` or `discord.ext.commands` namespaces
result = {}
# first line is version info
inv_version = stream.readline().rstrip()
if inv_version != '# Sphinx inventory version 2':
raise RuntimeError('Invalid objects.inv file version.')
# next line is "# Project: <name>"
# then after that is "# Version: <version>"
projname = stream.readline().rstrip()[11:]
version = stream.readline().rstrip()[11:]
# next line says if it's a zlib header
line = stream.readline()
if 'zlib' not in line:
raise RuntimeError('Invalid objects.inv file, not z-lib compatible.')
# This code mostly comes from the Sphinx repository.
entry_regex = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+(\S+)\s+(.*)')
for line in stream.read_compressed_lines():
match = entry_regex.match(line.rstrip())
if not match:
continue
name, directive, prio, location, dispname = match.groups()
domain, _, subdirective = directive.partition(':')
if directive == 'py:module' and name in result:
# From the Sphinx Repository:
# due to a bug in 1.1 and below,
# two inventory entries are created
# for Python modules, and the first
# one is correct
continue
# Most documentation pages have a label
if directive == 'std:doc':
subdirective = 'label'
if location.endswith('$'):
location = location[:-1] + name
key = name if dispname == '-' else dispname
prefix = f'{subdirective}:' if domain == 'std' else ''
if projname == 'discord.py':
key = key.replace('discord.ext.commands.', '').replace('discord.', '')
result[f'{prefix}{key}'] = os.path.join(url, location)
return result
async def build_rtfm_lookup_table(self, page_types):
cache = {}
for key, page in page_types.items():
sub = cache[key] = {}
async with self.bot.session.get(page + '/objects.inv') as resp:
if resp.status != 200:
raise RuntimeError('Cannot build rtfm lookup table, try again later.')
stream = SphinxObjectFileReader(await resp.read())
cache[key] = self.parse_object_inv(stream, page)
self._rtfm_cache = cache
async def do_rtfm(self, ctx, key, obj):
page_types = {
'latest': 'https://discordpy.readthedocs.io/en/latest',
'latest-jp': 'https://discordpy.readthedocs.io/ja/latest',
'python': 'https://docs.python.org/3',
'python-jp': 'https://docs.python.org/ja/3',
}
if obj is None:
await ctx.send(page_types[key])
return
if not hasattr(self, '_rtfm_cache'):
await ctx.trigger_typing()
await self.build_rtfm_lookup_table(page_types)
obj = re.sub(r'^(?:discord\.(?:ext\.)?)?(?:commands\.)?(.+)', r'\1', obj)
if key.startswith('latest'):
# point the abc.Messageable types properly:
q = obj.lower()
for name in dir(discord.abc.Messageable):
if name[0] == '_':
continue
if q == name:
obj = f'abc.Messageable.{name}'
break
cache = list(self._rtfm_cache[key].items())
def transform(tup):
return tup[0]
matches = fuzzy.finder(obj, cache, key=lambda t: t[0], lazy=False)[:8]
e = discord.Embed(colour=discord.Colour.blurple())
if len(matches) == 0:
return await ctx.send('Could not find anything. Sorry.')
e.description = '\n'.join(f'[`{key}`]({url})' for key, url in matches)
await ctx.send(embed=e)
def transform_rtfm_language_key(self, ctx, prefix):
if ctx.guild is not None:
# 日本語 category
if ctx.channel.category_id == 490287576670928914:
return prefix + '-jp'
# d.py unofficial JP
elif ctx.guild.id == 463986890190749698:
return prefix + '-jp'
return prefix
@commands.group(aliases=['rtfd'], invoke_without_command=True)
async def rtfm(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity.
Events, objects, and functions are all supported through a
a cruddy fuzzy algorithm.
"""
key = self.transform_rtfm_language_key(ctx, 'latest')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='jp')
async def rtfm_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity (Japanese)."""
await self.do_rtfm(ctx, 'latest-jp', obj)
@rtfm.command(name='python', aliases=['py'])
async def rtfm_python(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity."""
key = self.transform_rtfm_language_key(ctx, 'python')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='py-jp', aliases=['py-ja'])
async def rtfm_python_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity (Japanese)."""
await self.do_rtfm(ctx, 'python-jp', obj)
async def _member_stats(self, ctx, member, total_uses):
e = discord.Embed(title='RTFM Stats')
e.set_author(name=str(member), icon_url=member.avatar_url)
@commands.command(name="evalute",description="コードを評価します")
async def evalute(self,ctx, n, *, code):
nm = n.lower()
a = code.replace("```", "")
if nm == "py":
b = (piston.execute(language="py", version="3.9", code=a))
c = str(b)
em = discord.Embed(title="Python Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "java":
b = (piston.execute(language="java", version="15.0.2", code=a))
c = str(b)
em = discord.Embed(title="Java Code Output!",
description=f'```py\n出力:\n{c}```',
color=discord.Color.red())
elif nm == "js":
b = (piston.execute(language="js", version="15.10.0", code=a))
c = str(b)
em = discord.Embed(title="JavaScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "go":
b = (piston.execute(language="go", version="1.16.2", code=a))
c = str(b)
em = discord.Embed(title="Go Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "ts":
b = (piston.execute(language="typescript", version="4.2.3", code=a))
c = str(b)
em = discord.Embed(title="TypeScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "bf":
b = (piston.execute(language="brainfuck", version="2.7.3", code=a))
c = str(b)
em = discord.Embed(title="BrainFuck Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "php":
b = (piston.execute(language="php",version="8.0",code=a))
c = str(b)
em = discord.Embed(title="PHP code出力",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
else:
em = discord.Embed(title="その言語は対応していません")
await ctx.send(embed=em)
def setup(client):
client.add_cog(ProgrammingCog(client)) | random_line_split | |
program.py | import discord
import os
import zlib
import io
import re
import aiohttp
from util import fuzzy
from discord.ext import commands
from pistonapi import PistonAPI
piston = PistonAPI()
class SphinxObjectFileReader:
# Inspired by Sphinx's InventoryFileReader
BUFSIZE = 16 * 1024
def __init__(self, buffer):
self.stream = io.BytesIO(buffer)
def readline(self):
return self.stream.readline().decode('utf-8')
def skipline(self):
self.stream.readline()
def read_compressed_chunks(self):
decompressor = zlib.decompressobj()
while True:
chunk = self.stream.read(self.BUFSIZE)
if len(chunk) == 0:
break
yield decompressor.decompress(chunk)
yield decompressor.flush()
def read_compressed_lines(self):
buf = b''
for chunk in self.read_compressed_chunks():
buf += chunk
pos = buf.find(b'\n')
while pos != -1:
yield buf[:pos].decode('utf-8')
buf = buf[pos + 1:]
pos = buf.find(b'\n')
class ProgrammingCog(commands.Cog, name="Programming"):
"""Commands for programmers"""
def __init__(self, client):
self.client = client
self.bot = client
self.client.session = aiohttp.ClientSession(loop=self.client.loop)
self.regex = re.compile(r"(\w*)\s*(?:```)(\w*)?([\s\S]*)(?:```$)")
@property
def session(self):
return self.bot.http._HTTPClient__session
async def _run_code(self, *, lang: str, code: str):
res = await self.session.post(
"https://emkc.org/api/v1/piston/execute",
json={"language": lang, "source": code})
return await res.json()
@commands.command()
async def run(self, ctx: commands.Context, *, codeblock: str):
"""
Run code and get results instantly
**Note**: You must use codeblocks around the code
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
matches = self.regex.findall(codeblock)
if not matches:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description="Couldn't quite see your codeblock"))
lang = matches[0][0] or matches[0][1]
if not lang:
return await ctx.reply(embed=discord.Embed(title="Uh-oh",
description="Couldn't find the language hinted in the codeblock or before it"))
code = matches[0][2]
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
@commands.command()
async def runl(self, ctx: commands.Context, lang: str, *, code: str):
"""
Run a single line of code, **must** specify language as first argument
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
async def _send_result(self, ctx: commands.Context, result: dict):
if "message" in result:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description=result["message"]))
output = result['output']
# if len(output) > 2000:
# url = await create_guest_paste_bin(self.session, output)
# return await ctx.reply("Your output was too long, so here's the pastebin link " + url)
embed = discord.Embed(
title=f"{result['language'][0].upper() + result['language'][1:]}")
newline = '\n'
rep = {"python3": "py", "python2": "py", 'node': 'js'}
rep = dict((re.escape(k), v) for k, v in rep.items())
pattern = re.compile("|".join(rep.keys()))
converted_language = pattern.sub(lambda m: rep[re.escape(m.group(0))], result['language'])
limit = 1024 - (29 + len(converted_language))
output = f"```{converted_language}\n{output[:limit]}```{(len(output) > limit) * (newline + '**Output shortened**')}"
embed.add_field(name="Output", value=output or "**No output**")
try:
await ctx.reply(embed=embed)
except:
await ctx.reply(output)
def parse_object_inv(self, stream, url):
# key: URL
# n.b.: key doesn't have `discord` or `discord.ext.commands` namespaces
result = {}
# first line is version info
inv_version = stream.readline().rstrip()
if inv_version != '# Sphinx inventory version 2':
raise RuntimeError('Invalid objects.inv file version.')
# next line is "# Project: <name>"
# then after that is "# Version: <version>"
projname = stream.readline().rstrip()[11:]
version = stream.readline().rstrip()[11:]
# next line says if it's a zlib header
line = stream.readline()
if 'zlib' not in line:
raise RuntimeError('Invalid objects.inv file, not z-lib compatible.')
# This code mostly comes from the Sphinx repository.
entry_regex = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+(\S+)\s+(.*)')
for line in stream.read_compressed_lines():
match = entry_regex.match(line.rstrip())
if not match:
continue
name, directive, prio, location, dispname = match.groups()
domain, _, subdirective = directive.partition(':')
if directive == 'py:module' and name in result:
# From the Sphinx Repository:
# due to a bug in 1.1 and below,
# two inventory entries are created
# for Python modules, and the first
# one is correct
continue
# Most documentation pages have a label
if directive == 'std:doc':
subdirective = 'label'
if location.endswith('$'):
location = location[:-1] + name
key = name if dispname == '-' else dispname
prefix = f'{subdirective}:' if domain == 'std' else ''
if projname == 'discord.py':
key = key.replace('discord.ext.commands.', '').replace('discord.', '')
result[f'{prefix}{key}'] = os.path.join(url, location)
return result
async def build_rtfm_lookup_table(self, page_types):
cache = {}
for key, page in page_types.items():
sub = cache[key] = {}
async with self.bot.session.get(page + '/objects.inv') as resp:
if resp.status != 200:
raise RuntimeError('Cannot build rtfm lookup table, try again later.')
stream = SphinxObjectFileReader(await resp.read())
cache[key] = self.parse_object_inv(stream, page)
self._rtfm_cache = cache
async def do_rtfm(self, ctx, key, obj):
page_types = {
'latest': 'https://discordpy.readthedocs.io/en/latest',
'latest-jp': 'https://discordpy.readthedocs.io/ja/latest',
'python': 'https://docs.python.org/3',
'python-jp': 'https://docs.python.org/ja/3',
}
if obj is None:
await ctx.send(page_types[key])
return
if not hasattr(self, '_rtfm_cache'):
await ctx.trigger_typing()
await self.build_rtfm_lookup_table(page_types)
obj = re.sub(r'^(?:discord\.(?:ext\.)?)?(?:commands\.)?(.+)', r'\1', obj)
if key.startswith('latest'):
# point the abc.Messageable types properly:
q = obj.lower()
for name in dir(discord.abc.Messageable):
if name[0] == '_':
continue
if q == name:
obj = f'abc.Messageable.{name}'
break
cache = list(self._rtfm_cache[key].items())
def transform(tup):
return tup[0]
matches = fuzzy.finder(obj, cache, key=lambda t: t[0], lazy=False)[:8]
e = discord.Embed(colour=discord.Colour.blurple())
if len(matches) == 0:
return await ctx.send('Could not find anything. Sorry.')
e.description = '\n'.join(f'[`{key}`]({url})' for key, url in matches)
await ctx.send(embed=e)
def transform_rtfm_language_key(self, ctx, prefix):
if ctx.guild is not None:
# 日本語 category
if ctx.channel.category_id == 490287576670928914:
return prefix + '-jp'
# d.py unofficial JP
elif ctx.guild.id == 463986890190749698:
return prefix + '-jp'
return prefix
@commands.group(aliases=['rtfd'], invoke_without_command=True)
async def rtfm(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity.
Events, objects, and functions are all supported through a
a cruddy fuzzy algorithm.
"""
key = self.transform_rtfm_language_key(ctx, 'latest')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='jp')
async def rtfm_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity (Japanese)."""
await self.do_rtfm(ctx, 'latest-jp', obj)
@rtfm.command(name='python', aliases=['py'])
async def rtfm_python(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity."""
key = self.transform_rtfm_language_key(ctx, 'python')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='py-jp', aliases=['py-ja'])
async def rtfm_p | ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity (Japanese)."""
await self.do_rtfm(ctx, 'python-jp', obj)
async def _member_stats(self, ctx, member, total_uses):
e = discord.Embed(title='RTFM Stats')
e.set_author(name=str(member), icon_url=member.avatar_url)
@commands.command(name="evalute",description="コードを評価します")
async def evalute(self,ctx, n, *, code):
nm = n.lower()
a = code.replace("```", "")
if nm == "py":
b = (piston.execute(language="py", version="3.9", code=a))
c = str(b)
em = discord.Embed(title="Python Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "java":
b = (piston.execute(language="java", version="15.0.2", code=a))
c = str(b)
em = discord.Embed(title="Java Code Output!",
description=f'```py\n出力:\n{c}```',
color=discord.Color.red())
elif nm == "js":
b = (piston.execute(language="js", version="15.10.0", code=a))
c = str(b)
em = discord.Embed(title="JavaScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "go":
b = (piston.execute(language="go", version="1.16.2", code=a))
c = str(b)
em = discord.Embed(title="Go Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "ts":
b = (piston.execute(language="typescript", version="4.2.3", code=a))
c = str(b)
em = discord.Embed(title="TypeScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "bf":
b = (piston.execute(language="brainfuck", version="2.7.3", code=a))
c = str(b)
em = discord.Embed(title="BrainFuck Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "php":
b = (piston.execute(language="php",version="8.0",code=a))
c = str(b)
em = discord.Embed(title="PHP code出力",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
else:
em = discord.Embed(title="その言語は対応していません")
await ctx.send(embed=em)
def setup(client):
client.add_cog(ProgrammingCog(client)) | ython_jp(self, | identifier_name |
program.py | import discord
import os
import zlib
import io
import re
import aiohttp
from util import fuzzy
from discord.ext import commands
from pistonapi import PistonAPI
piston = PistonAPI()
class SphinxObjectFileReader:
# Inspired by Sphinx's InventoryFileReader
BUFSIZE = 16 * 1024
def __init__(self, buffer):
self.stream = io.BytesIO(buffer)
def readline(self):
return self.stream.readline().decode('utf-8')
def skipline(self):
self.stream.readline()
def read_compressed_chunks(self):
decompressor = zlib.decompressobj()
while True:
chunk = self.stream.read(self.BUFSIZE)
if len(chunk) == 0:
break
yield decompressor.decompress(chunk)
yield decompressor.flush()
def read_compressed_lines(self):
buf = b''
for chunk in self.read_compressed_chunks():
buf += chunk
pos = buf.find(b'\n')
while pos != -1:
yield buf[:pos].decode('utf-8')
buf = buf[pos + 1:]
pos = buf.find(b'\n')
class ProgrammingCog(commands.Cog, name="Programming"):
"""Commands for programmers"""
def __init__(self, client):
self.client = client
self.bot = client
self.client.session = aiohttp.ClientSession(loop=self.client.loop)
self.regex = re.compile(r"(\w*)\s*(?:```)(\w*)?([\s\S]*)(?:```$)")
@property
def session(self):
return self.bot.http._HTTPClient__session
async def _run_code(self, *, lang: str, code: str):
res = await self.session.post(
"https://emkc.org/api/v1/piston/execute",
json={"language": lang, "source": code})
return await res.json()
@commands.command()
async def run(self, ctx: commands.Context, *, codeblock: str):
"""
Run code and get results instantly
**Note**: You must use codeblocks around the code
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
matches = self.regex.findall(codeblock)
if not matches:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description="Couldn't quite see your codeblock"))
lang = matches[0][0] or matches[0][1]
if not lang:
return await ctx.reply(embed=discord.Embed(title="Uh-oh",
description="Couldn't find the language hinted in the codeblock or before it"))
code = matches[0][2]
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
@commands.command()
async def runl(self, ctx: commands.Context, lang: str, *, code: str):
"""
Run a single line of code, **must** specify language as first argument
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
async def _send_result(self, ctx: commands.Context, result: dict):
if "message" in result:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description=result["message"]))
output = result['output']
# if len(output) > 2000:
# url = await create_guest_paste_bin(self.session, output)
# return await ctx.reply("Your output was too long, so here's the pastebin link " + url)
embed = discord.Embed(
title=f"{result['language'][0].upper() + result['language'][1:]}")
newline = '\n'
rep = {"python3": "py", "python2": "py", 'node': 'js'}
rep = dict((re.escape(k), v) for k, v in rep.items())
pattern = re.compile("|".join(rep.keys()))
converted_language = pattern.sub(lambda m: rep[re.escape(m.group(0))], result['language'])
limit = 1024 - (29 + len(converted_language))
output = f"```{converted_language}\n{output[:limit]}```{(len(output) > limit) * (newline + '**Output shortened**')}"
embed.add_field(name="Output", value=output or "**No output**")
try:
await ctx.reply(embed=embed)
except:
await ctx.reply(output)
def parse_object_inv(self, stream, url):
# key: URL
# n.b.: key doesn't have `discord` or `discord.ext.commands` namespaces
result = {}
# first line is version info
inv_version = stream.readline().rstrip()
if inv_version != '# Sphinx inventory version 2':
raise RuntimeError('Invalid objects.inv file version.')
# next line is "# Project: <name>"
# then after that is "# Version: <version>"
projname = stream.readline().rstrip()[11:]
version = stream.readline().rstrip()[11:]
# next line says if it's a zlib header
line = stream.readline()
if 'zlib' not in line:
raise RuntimeError('Invalid objects.inv file, not z-lib compatible.')
# This code mostly comes from the Sphinx repository.
entry_regex = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+(\S+)\s+(.*)')
for line in stream.read_compressed_lines():
match = entry_regex.match(line.rstrip())
if not match:
continue
name, directive, prio, location, dispname = match.groups()
domain, _, subdirective = directive.partition(':')
if directive == 'py:module' and name in result:
# From the Sphinx Repository:
# due to a bug in 1.1 and below,
# two inventory entries are created
# for Python modules, and the first
# one is correct
continue
# Most documentation pages have a label
if directive == 'std:doc':
subdirective = 'label'
if location.endswith('$'):
location = location[:-1] + name
key = name if dispname == '-' else dispname
prefix = f'{subdirective}:' if domain == 'std' else ''
if projname == 'discord.py':
key = key.replace('discord.ext.commands.', '').replace('discord.', '')
result[f'{prefix}{key}'] = os.path.join(url, location)
return result
async def build_rtfm_lookup_table(self, page_types):
cache = {}
for key, page in page_types.items():
sub = cache[key] = {}
async with self.bot.session.get(page + '/objects.inv') as resp:
if resp.status != 200:
raise RuntimeError('Cannot build rtfm lookup table, try again later.')
stream = SphinxObjectFileReader(await resp.read())
cache[key] = self.parse_object_inv(stream, page)
self._rtfm_cache = cache
async def do_rtfm(self, ctx, key, obj):
page_types = {
'latest': 'https://discordpy.readthedocs.io/en/latest',
'latest-jp': 'https://discordpy.readthedocs.io/ja/latest',
'python': 'https://docs.python.org/3',
'python-jp': 'https://docs.python.org/ja/3',
}
if obj is None:
await ctx.send(page_types[key])
return
if not hasattr(self, '_rtfm_cache'):
await ctx.trigger_typing()
await self.build_rtfm_lookup_table(page_types)
obj = re.sub(r'^(?:discord\.(?:ext\.)?)?(?:commands\.)?(.+)', r'\1', obj)
if key.startswith('latest'):
# point the abc.Messageable types properly:
q = obj.lower()
for name in dir(discord.abc.Messageable):
if name[0] == '_':
continue
if q == name:
obj = f'abc.Messageable.{name}'
break
cache = list(self._rtfm_cache[key].items())
def transform(tup):
return tup[0]
matches = fuzzy.finder(obj, cache, key=lambda t: t[0], lazy=False)[:8]
e = discord.Embed(colour=discord.Colour.blurple())
if len(matches) == 0:
return await ctx.send('Could not find anything. Sorry.')
e.description = '\n'.join(f'[`{key}`]({url})' for key, url in matches)
await ctx.send(embed=e)
def transform_rtfm_language_key(self, ctx, prefix):
if ctx.guild is not None:
# 日本語 category
if ctx.channel.category_id == 490287576670928914:
return prefix + '-jp'
# d.py unofficial JP
elif ctx.guild.id == 463986890190749698:
return prefix + '-jp'
return prefix
@commands.group(aliases=['rtfd'], invoke_without_command=True)
async def rtfm(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity.
Events, objects, and functions are all supported through a
a cruddy fuzzy algorithm.
"""
key = self.transform_rtfm_language_key(ctx, 'latest')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='jp')
async def rtfm_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity (Japanese)."""
await self.do_rtfm(ctx, 'latest-jp', obj)
@rtfm.command(name='python', aliases=['py'])
async def rtfm_python(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity."""
key = self.transform_rtfm_language_key(ctx, 'python')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='py-jp', aliases=['py-ja'])
async def rtfm_python_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity (Japanese)."""
await self.do_rtfm(ctx, 'python-jp', obj)
async def _member_stats(self, ctx, member, total_uses):
e = discord.Embed(title='RTFM Stats')
e.set_author(name=str(member), icon_url=member.avatar_url)
@commands.command(name="evalute",description="コードを評価します")
async def evalute(self,ctx, n, *, code):
nm = n.lower()
a = code.replace("```", "")
if nm == "py":
b = (piston.execute(language="py", version="3.9", code=a))
c = str(b)
em = discord.Embed(title="Python Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "java":
b = (piston.execute(language="java", version="15.0.2", code=a))
c = str(b)
em = discord.Embed(title="Java Code Output!",
description=f'```py\n出力:\n{c}```',
color=discord.Color.red())
elif nm == "js":
b = (piston.execute(language="js", version="15.10.0", code=a))
c = str(b)
em = discord.Embed(title="JavaScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "go":
b = (piston.execute(language="go", version="1.16.2", code=a))
c = str(b)
em = discord.Embed(title="Go Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "ts":
b = (piston.execute(language="typescript", version="4.2.3", code=a))
c = str(b)
em = discord.Embed(title="TypeScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "bf":
b = (piston.execute(language="brainfuck", version="2.7.3", code=a))
c = str(b)
em = discord.Embed(title="BrainFuck Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "php":
b = (piston.execute(language="php",version="8.0",code=a))
c = str(b)
em = discord.Embed(title="PHP code出力",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
else:
em = discord.Embed(title="その言語は対応していません")
await ctx.send(embed=em)
def setup(client):
client.add_cog(ProgrammingCog(client)) | identifier_body | ||
program.py | import discord
import os
import zlib
import io
import re
import aiohttp
from util import fuzzy
from discord.ext import commands
from pistonapi import PistonAPI
piston = PistonAPI()
class SphinxObjectFileReader:
# Inspired by Sphinx's InventoryFileReader
BUFSIZE = 16 * 1024
def __init__(self, buffer):
self.stream = io.BytesIO(buffer)
def readline(self):
return self.stream.readline().decode('utf-8')
def skipline(self):
self.stream.readline()
def read_compressed_chunks(self):
decompressor = zlib.decompressobj()
while True:
chunk = self.stream.read(self.BUFSIZE)
if len(chunk) == 0:
break
yield decompressor.decompress(chunk)
yield decompressor.flush()
def read_compressed_lines(self):
buf = b''
for chunk in self.read_compressed_chunks():
buf += chunk
pos = buf.find(b'\n')
while pos != -1:
yield buf[:pos].decode('utf-8')
buf = buf[pos + 1:]
pos = buf.find(b'\n')
class ProgrammingCog(commands.Cog, name="Programming"):
"""Commands for programmers"""
def __init__(self, client):
self.client = client
self.bot = client
self.client.session = aiohttp.ClientSession(loop=self.client.loop)
self.regex = re.compile(r"(\w*)\s*(?:```)(\w*)?([\s\S]*)(?:```$)")
@property
def session(self):
return self.bot.http._HTTPClient__session
async def _run_code(self, *, lang: str, code: str):
res = await self.session.post(
"https://emkc.org/api/v1/piston/execute",
json={"language": lang, "source": code})
return await res.json()
@commands.command()
async def run(self, ctx: commands.Context, *, codeblock: str):
"""
Run code and get results instantly
**Note**: You must use codeblocks around the code
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
matches = self.regex.findall(codeblock)
if not matches:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description="Couldn't quite see your codeblock"))
lang = matches[0][0] or matches[0][1]
if not lang:
return await ctx.reply(embed=discord.Embed(title="Uh-oh",
description="Couldn't find the language hinted in the codeblock or before it"))
code = matches[0][2]
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
@commands.command()
async def runl(self, ctx: commands.Context, lang: str, *, code: str):
"""
Run a single line of code, **must** specify language as first argument
Supported languages: awk, bash, brainfuck, c, cpp, crystal, csharp, d, dash, deno, elixer, emacs, go, haskell, java, jelly, julia, kotlin, lisp, lua, nasm, nasm64, nim, node, osabie, paradoc, perl, php, prolog, python2, python, ruby, rust, scala, swift, typescript, zig
"""
result = await self._run_code(lang=lang, code=code)
await self._send_result(ctx, result)
async def _send_result(self, ctx: commands.Context, result: dict):
if "message" in result:
return await ctx.reply(embed=discord.Embed(title="Uh-oh", description=result["message"]))
output = result['output']
# if len(output) > 2000:
# url = await create_guest_paste_bin(self.session, output)
# return await ctx.reply("Your output was too long, so here's the pastebin link " + url)
embed = discord.Embed(
title=f"{result['language'][0].upper() + result['language'][1:]}")
newline = '\n'
rep = {"python3": "py", "python2": "py", 'node': 'js'}
rep = dict((re.escape(k), v) for k, v in rep.items())
pattern = re.compile("|".join(rep.keys()))
converted_language = pattern.sub(lambda m: rep[re.escape(m.group(0))], result['language'])
limit = 1024 - (29 + len(converted_language))
output = f"```{converted_language}\n{output[:limit]}```{(len(output) > limit) * (newline + '**Output shortened**')}"
embed.add_field(name="Output", value=output or "**No output**")
try:
await ctx.reply(embed=embed)
except:
await ctx.reply(output)
def parse_object_inv(self, stream, url):
# key: URL
# n.b.: key doesn't have `discord` or `discord.ext.commands` namespaces
result = {}
# first line is version info
inv_version = stream.readline().rstrip()
if inv_version != '# Sphinx inventory version 2':
raise RuntimeError('Invalid objects.inv file version.')
# next line is "# Project: <name>"
# then after that is "# Version: <version>"
projname = stream.readline().rstrip()[11:]
version = stream.readline().rstrip()[11:]
# next line says if it's a zlib header
line = stream.readline()
if 'zlib' not in line:
|
# This code mostly comes from the Sphinx repository.
entry_regex = re.compile(r'(?x)(.+?)\s+(\S*:\S*)\s+(-?\d+)\s+(\S+)\s+(.*)')
for line in stream.read_compressed_lines():
match = entry_regex.match(line.rstrip())
if not match:
continue
name, directive, prio, location, dispname = match.groups()
domain, _, subdirective = directive.partition(':')
if directive == 'py:module' and name in result:
# From the Sphinx Repository:
# due to a bug in 1.1 and below,
# two inventory entries are created
# for Python modules, and the first
# one is correct
continue
# Most documentation pages have a label
if directive == 'std:doc':
subdirective = 'label'
if location.endswith('$'):
location = location[:-1] + name
key = name if dispname == '-' else dispname
prefix = f'{subdirective}:' if domain == 'std' else ''
if projname == 'discord.py':
key = key.replace('discord.ext.commands.', '').replace('discord.', '')
result[f'{prefix}{key}'] = os.path.join(url, location)
return result
async def build_rtfm_lookup_table(self, page_types):
cache = {}
for key, page in page_types.items():
sub = cache[key] = {}
async with self.bot.session.get(page + '/objects.inv') as resp:
if resp.status != 200:
raise RuntimeError('Cannot build rtfm lookup table, try again later.')
stream = SphinxObjectFileReader(await resp.read())
cache[key] = self.parse_object_inv(stream, page)
self._rtfm_cache = cache
async def do_rtfm(self, ctx, key, obj):
page_types = {
'latest': 'https://discordpy.readthedocs.io/en/latest',
'latest-jp': 'https://discordpy.readthedocs.io/ja/latest',
'python': 'https://docs.python.org/3',
'python-jp': 'https://docs.python.org/ja/3',
}
if obj is None:
await ctx.send(page_types[key])
return
if not hasattr(self, '_rtfm_cache'):
await ctx.trigger_typing()
await self.build_rtfm_lookup_table(page_types)
obj = re.sub(r'^(?:discord\.(?:ext\.)?)?(?:commands\.)?(.+)', r'\1', obj)
if key.startswith('latest'):
# point the abc.Messageable types properly:
q = obj.lower()
for name in dir(discord.abc.Messageable):
if name[0] == '_':
continue
if q == name:
obj = f'abc.Messageable.{name}'
break
cache = list(self._rtfm_cache[key].items())
def transform(tup):
return tup[0]
matches = fuzzy.finder(obj, cache, key=lambda t: t[0], lazy=False)[:8]
e = discord.Embed(colour=discord.Colour.blurple())
if len(matches) == 0:
return await ctx.send('Could not find anything. Sorry.')
e.description = '\n'.join(f'[`{key}`]({url})' for key, url in matches)
await ctx.send(embed=e)
def transform_rtfm_language_key(self, ctx, prefix):
if ctx.guild is not None:
# 日本語 category
if ctx.channel.category_id == 490287576670928914:
return prefix + '-jp'
# d.py unofficial JP
elif ctx.guild.id == 463986890190749698:
return prefix + '-jp'
return prefix
@commands.group(aliases=['rtfd'], invoke_without_command=True)
async def rtfm(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity.
Events, objects, and functions are all supported through a
a cruddy fuzzy algorithm.
"""
key = self.transform_rtfm_language_key(ctx, 'latest')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='jp')
async def rtfm_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a discord.py entity (Japanese)."""
await self.do_rtfm(ctx, 'latest-jp', obj)
@rtfm.command(name='python', aliases=['py'])
async def rtfm_python(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity."""
key = self.transform_rtfm_language_key(ctx, 'python')
await self.do_rtfm(ctx, key, obj)
@rtfm.command(name='py-jp', aliases=['py-ja'])
async def rtfm_python_jp(self, ctx, *, obj: str = None):
"""Gives you a documentation link for a Python entity (Japanese)."""
await self.do_rtfm(ctx, 'python-jp', obj)
async def _member_stats(self, ctx, member, total_uses):
e = discord.Embed(title='RTFM Stats')
e.set_author(name=str(member), icon_url=member.avatar_url)
@commands.command(name="evalute",description="コードを評価します")
async def evalute(self,ctx, n, *, code):
nm = n.lower()
a = code.replace("```", "")
if nm == "py":
b = (piston.execute(language="py", version="3.9", code=a))
c = str(b)
em = discord.Embed(title="Python Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "java":
b = (piston.execute(language="java", version="15.0.2", code=a))
c = str(b)
em = discord.Embed(title="Java Code Output!",
description=f'```py\n出力:\n{c}```',
color=discord.Color.red())
elif nm == "js":
b = (piston.execute(language="js", version="15.10.0", code=a))
c = str(b)
em = discord.Embed(title="JavaScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "go":
b = (piston.execute(language="go", version="1.16.2", code=a))
c = str(b)
em = discord.Embed(title="Go Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "ts":
b = (piston.execute(language="typescript", version="4.2.3", code=a))
c = str(b)
em = discord.Embed(title="TypeScript Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "bf":
b = (piston.execute(language="brainfuck", version="2.7.3", code=a))
c = str(b)
em = discord.Embed(title="BrainFuck Code Output!",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
elif nm == "php":
b = (piston.execute(language="php",version="8.0",code=a))
c = str(b)
em = discord.Embed(title="PHP code出力",
description=f'```py\nOutput:\n{c}```',
color=discord.Color.red())
else:
em = discord.Embed(title="その言語は対応していません")
await ctx.send(embed=em)
def setup(client):
client.add_cog(ProgrammingCog(client)) | raise RuntimeError('Invalid objects.inv file, not z-lib compatible.') | conditional_block |
packer.rs | // Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::Error;
use crate::{Slate, SlateVersion, Slatepack, SlatepackArmor};
use ed25519_dalek::PublicKey as DalekPublicKey;
use ed25519_dalek::SecretKey as DalekSecretKey;
use crate::slatepack::slatepack::SlatePurpose;
#[derive(Clone, Debug)]
/// Arguments, mostly for encrypting decrypting a slatepack
pub struct Slatepacker {
/// Sender address, None for wrapped
pub sender: Option<DalekPublicKey>,
/// Recipient addresses, None for wrapped
pub recipient: Option<DalekPublicKey>,
/// The content purpose. It customize serializer/deserializer for us.
pub content: SlatePurpose,
/// Slate data.
pub slate: Slate,
}
impl Slatepacker {
/// Swap a slate with the packer. Slate is expecte to be full
pub fn wrap_slate(slate: Slate) -> Self {
Self {
sender: None,
recipient: None,
content: SlatePurpose::FullSlate,
slate,
}
}
/// Pack everything into the armored slatepack
pub fn encrypt_to_send(
slate: Slate,
slate_version: SlateVersion,
content: SlatePurpose,
sender: DalekPublicKey,
recipient: Option<DalekPublicKey>, // Encrypted only if recipient is some
secret: &DalekSecretKey,
use_test_rng: bool,
) -> Result<String, Error> {
let pack = Slatepack {
sender: Some(sender),
recipient: recipient,
content,
slate: slate,
};
let (slate_bin, encrypted) = pack.to_binary(slate_version, secret, use_test_rng)?;
SlatepackArmor::encode(&slate_bin, encrypted)
}
/// return slatepack
pub fn decrypt_slatepack(data: &[u8], dec_key: &DalekSecretKey) -> Result<Self, Error> {
let (slate_bytes, encrypted) = SlatepackArmor::decode(data)?;
let slatepack = Slatepack::from_binary(&slate_bytes, encrypted, dec_key)?;
let Slatepack {
sender,
recipient,
content,
slate,
} = slatepack;
Ok(Self {
sender,
recipient,
content,
slate,
})
}
/// Get Transaction ID related into form this slatepack
pub fn get_content(&self) -> SlatePurpose {
self.content.clone()
}
/// Get Sender info. It is needed to send the response back
pub fn get_sender(&self) -> Option<DalekPublicKey> |
/// Get Sender info. It is needed to send the response back
pub fn get_recipient(&self) -> Option<DalekPublicKey> {
self.recipient.clone()
}
/// Convert this slate back to the resulting slate. Since the slate pack contain only the change set,
/// to recover the data it is required original slate to merge with.
pub fn to_result_slate(self) -> Slate {
self.slate
}
}
#[test]
fn slatepack_io_test() {
use crate::grin_core::core::KernelFeatures;
use crate::grin_core::core::{Input, Output, OutputFeatures, Transaction, TxKernel};
use crate::grin_core::global;
use crate::grin_keychain::BlindingFactor;
use crate::grin_keychain::ExtKeychain;
use crate::grin_util as util;
use crate::grin_util::secp::pedersen::{Commitment, RangeProof};
use crate::grin_util::secp::Signature;
use crate::grin_util::secp::{PublicKey, Secp256k1, SecretKey};
use crate::proof::proofaddress;
use crate::proof::proofaddress::ProvableAddress;
use crate::slate::{PaymentInfo, VersionCompatInfo};
use crate::ParticipantData;
use uuid::Uuid;
use x25519_dalek::PublicKey as xDalekPublicKey;
global::set_local_chain_type(global::ChainTypes::AutomatedTesting);
let bytes_16: [u8; 16] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
let bytes_32: [u8; 32] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32,
];
let bytes_32_2: [u8; 32] = [
2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33,
];
let bytes_33: [u8; 33] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33,
];
let bytes_64: [u8; 64] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
];
let sk = SecretKey::from_slice(&bytes_32).unwrap();
let secp = Secp256k1::new();
let dalek_sk = DalekSecretKey::from_bytes(&bytes_32).unwrap();
let dalek_pk = DalekPublicKey::from(&dalek_sk);
let dalek_sk2 = DalekSecretKey::from_bytes(&bytes_32_2).unwrap();
let dalek_pk2 = DalekPublicKey::from(&dalek_sk2);
// Let's test out Dalec 2 xDalec algebra.
let dalek_xpk = proofaddress::tor_pub_2_slatepack_pub(&dalek_pk).unwrap();
let dalek_xpk2 = proofaddress::tor_pub_2_slatepack_pub(&dalek_pk2).unwrap();
let dalek_xsk = proofaddress::tor_secret_2_slatepack_secret(&dalek_sk);
let dalek_xsk2 = proofaddress::tor_secret_2_slatepack_secret(&dalek_sk2);
let builded_xpk = xDalekPublicKey::from(&dalek_xsk);
let builded_xpk2 = xDalekPublicKey::from(&dalek_xsk2);
assert_eq!(dalek_xpk.as_bytes(), builded_xpk.as_bytes());
assert_eq!(dalek_xpk2.as_bytes(), builded_xpk2.as_bytes());
// check if Diffie Hoffman works...
let shared_secret1 = dalek_xsk.diffie_hellman(&dalek_xpk2);
let shared_secret2 = dalek_xsk2.diffie_hellman(&dalek_xpk);
assert_eq!(shared_secret1.as_bytes(), shared_secret2.as_bytes());
// Note, Slate Data is fake. Just some randome numbers, it will not pass validation of any type
let mut slate_enc = Slate {
compact_slate: true, // Slatepack works only for compact models.
num_participants: 2,
id: Uuid::from_bytes(bytes_16),
tx: Transaction::empty()
.with_offset(BlindingFactor::from_slice(&bytes_32) )
.with_input( Input::new( OutputFeatures::Plain, Commitment(bytes_33)) )
.with_output( Output::new(OutputFeatures::Plain, Commitment(bytes_33), RangeProof::zero()))
.with_kernel( TxKernel::with_features(KernelFeatures::Plain { fee: 321 }) ),
offset: BlindingFactor::from_slice(&bytes_32),
amount: 30000000000000000,
fee: 321,
height: 67,
lock_height: 0,
ttl_cutoff_height: Some(54),
participant_data: vec![
ParticipantData {
id: 0,
public_blind_excess: PublicKey::from_secret_key( &secp, &sk).unwrap(),
public_nonce: PublicKey::from_secret_key( &secp, &sk).unwrap(),
part_sig: None,
message: Some("message 1 to send".to_string()),
message_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
},
ParticipantData {
id: 1,
public_blind_excess: PublicKey::from_secret_key( &secp, &sk).unwrap(),
public_nonce: PublicKey::from_secret_key( &secp, &sk).unwrap(),
part_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
message: Some("message 2 to send".to_string()),
message_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
}
],
version_info: VersionCompatInfo {
version: 3,
block_header_version: 1,
},
payment_proof: Some(PaymentInfo {
sender_address: ProvableAddress::from_str("a5ib4b2l5snzdgxzpdzouwxwvn4c3setpp5t5j2tr37n3uy3665qwnqd").unwrap(),
receiver_address: ProvableAddress::from_str("a5ib4b2l5snzdgxzpdzouwxwvn4c3setpp5t5j2tr37n3uy3665qwnqd").unwrap(),
receiver_signature: Some( util::to_hex(&bytes_64) ),
}),
};
// updating kernel excess
slate_enc.tx.body.kernels[0].excess = slate_enc.calc_excess::<ExtKeychain>(None).unwrap();
let slate_enc_str = format!("{:?}", slate_enc);
println!("start encrypted slate = {}", slate_enc_str);
// Not encoded, just want to review the data...
let slatepack_string_encrypted = Slatepacker::encrypt_to_send(
slate_enc.clone(),
SlateVersion::SP,
SlatePurpose::FullSlate,
dalek_pk.clone(),
Some(dalek_pk2.clone()), // sending to self, should be fine...
&dalek_sk,
true,
)
.unwrap();
println!("slatepack encrypted = {}", slatepack_string_encrypted);
// Not encoded, just want to review the data...
let slatepack_string_binary = Slatepacker::encrypt_to_send(
slate_enc.clone(),
SlateVersion::SP,
SlatePurpose::FullSlate,
dalek_pk.clone(),
None, // No recipient, should trigger non encrypted mode.
&dalek_sk,
true,
)
.unwrap();
println!("slatepack binary = {}", slatepack_string_binary);
assert!(slatepack_string_encrypted.len() > slatepack_string_binary.len());
// Testing if can open from a backup
let slatepack =
Slatepacker::decrypt_slatepack(slatepack_string_encrypted.as_bytes(), &dalek_sk).unwrap();
let res_slate = slatepack.to_result_slate();
let slate2_str = format!("{:?}", res_slate);
println!("res_slate = {:?}", slate2_str);
assert_eq!(slate_enc_str, slate2_str);
// Testing if another party can open it
let slatepack =
Slatepacker::decrypt_slatepack(slatepack_string_encrypted.as_bytes(), &dalek_sk2).unwrap();
let res_slate = slatepack.to_result_slate();
let slate2_str = format!("{:?}", res_slate);
println!("res_slate2 = {:?}", slate2_str);
assert_eq!(slate_enc_str, slate2_str);
// Testing if can decode form the binary
let slatepack = Slatepacker::decrypt_slatepack(
slatepack_string_binary.as_bytes(),
&DalekSecretKey::from_bytes(&[1; 32]).unwrap(),
)
.unwrap();
let res_slate = slatepack.to_result_slate();
let slate3_str = format!("{:?}", res_slate);
println!("slate3_str = {:?}", slate3_str);
assert_eq!(slate_enc_str, slate3_str);
}
| {
self.sender.clone()
} | identifier_body |
packer.rs | // Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::Error;
use crate::{Slate, SlateVersion, Slatepack, SlatepackArmor};
use ed25519_dalek::PublicKey as DalekPublicKey;
use ed25519_dalek::SecretKey as DalekSecretKey;
use crate::slatepack::slatepack::SlatePurpose;
#[derive(Clone, Debug)]
/// Arguments, mostly for encrypting decrypting a slatepack
pub struct Slatepacker {
/// Sender address, None for wrapped
pub sender: Option<DalekPublicKey>,
/// Recipient addresses, None for wrapped
pub recipient: Option<DalekPublicKey>,
/// The content purpose. It customize serializer/deserializer for us.
pub content: SlatePurpose,
/// Slate data.
pub slate: Slate,
}
impl Slatepacker {
/// Swap a slate with the packer. Slate is expecte to be full
pub fn wrap_slate(slate: Slate) -> Self {
Self {
sender: None,
recipient: None,
content: SlatePurpose::FullSlate,
slate,
}
}
/// Pack everything into the armored slatepack
pub fn encrypt_to_send(
slate: Slate,
slate_version: SlateVersion,
content: SlatePurpose,
sender: DalekPublicKey,
recipient: Option<DalekPublicKey>, // Encrypted only if recipient is some
secret: &DalekSecretKey,
use_test_rng: bool,
) -> Result<String, Error> {
let pack = Slatepack {
sender: Some(sender),
recipient: recipient,
content,
slate: slate,
};
let (slate_bin, encrypted) = pack.to_binary(slate_version, secret, use_test_rng)?;
SlatepackArmor::encode(&slate_bin, encrypted)
}
/// return slatepack
pub fn decrypt_slatepack(data: &[u8], dec_key: &DalekSecretKey) -> Result<Self, Error> {
let (slate_bytes, encrypted) = SlatepackArmor::decode(data)?;
let slatepack = Slatepack::from_binary(&slate_bytes, encrypted, dec_key)?;
let Slatepack {
sender,
recipient,
content,
slate,
} = slatepack;
Ok(Self {
sender,
recipient,
content,
slate,
})
}
/// Get Transaction ID related into form this slatepack
pub fn | (&self) -> SlatePurpose {
self.content.clone()
}
/// Get Sender info. It is needed to send the response back
pub fn get_sender(&self) -> Option<DalekPublicKey> {
self.sender.clone()
}
/// Get Sender info. It is needed to send the response back
pub fn get_recipient(&self) -> Option<DalekPublicKey> {
self.recipient.clone()
}
/// Convert this slate back to the resulting slate. Since the slate pack contain only the change set,
/// to recover the data it is required original slate to merge with.
pub fn to_result_slate(self) -> Slate {
self.slate
}
}
#[test]
fn slatepack_io_test() {
use crate::grin_core::core::KernelFeatures;
use crate::grin_core::core::{Input, Output, OutputFeatures, Transaction, TxKernel};
use crate::grin_core::global;
use crate::grin_keychain::BlindingFactor;
use crate::grin_keychain::ExtKeychain;
use crate::grin_util as util;
use crate::grin_util::secp::pedersen::{Commitment, RangeProof};
use crate::grin_util::secp::Signature;
use crate::grin_util::secp::{PublicKey, Secp256k1, SecretKey};
use crate::proof::proofaddress;
use crate::proof::proofaddress::ProvableAddress;
use crate::slate::{PaymentInfo, VersionCompatInfo};
use crate::ParticipantData;
use uuid::Uuid;
use x25519_dalek::PublicKey as xDalekPublicKey;
global::set_local_chain_type(global::ChainTypes::AutomatedTesting);
let bytes_16: [u8; 16] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
let bytes_32: [u8; 32] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32,
];
let bytes_32_2: [u8; 32] = [
2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33,
];
let bytes_33: [u8; 33] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33,
];
let bytes_64: [u8; 64] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
];
let sk = SecretKey::from_slice(&bytes_32).unwrap();
let secp = Secp256k1::new();
let dalek_sk = DalekSecretKey::from_bytes(&bytes_32).unwrap();
let dalek_pk = DalekPublicKey::from(&dalek_sk);
let dalek_sk2 = DalekSecretKey::from_bytes(&bytes_32_2).unwrap();
let dalek_pk2 = DalekPublicKey::from(&dalek_sk2);
// Let's test out Dalec 2 xDalec algebra.
let dalek_xpk = proofaddress::tor_pub_2_slatepack_pub(&dalek_pk).unwrap();
let dalek_xpk2 = proofaddress::tor_pub_2_slatepack_pub(&dalek_pk2).unwrap();
let dalek_xsk = proofaddress::tor_secret_2_slatepack_secret(&dalek_sk);
let dalek_xsk2 = proofaddress::tor_secret_2_slatepack_secret(&dalek_sk2);
let builded_xpk = xDalekPublicKey::from(&dalek_xsk);
let builded_xpk2 = xDalekPublicKey::from(&dalek_xsk2);
assert_eq!(dalek_xpk.as_bytes(), builded_xpk.as_bytes());
assert_eq!(dalek_xpk2.as_bytes(), builded_xpk2.as_bytes());
// check if Diffie Hoffman works...
let shared_secret1 = dalek_xsk.diffie_hellman(&dalek_xpk2);
let shared_secret2 = dalek_xsk2.diffie_hellman(&dalek_xpk);
assert_eq!(shared_secret1.as_bytes(), shared_secret2.as_bytes());
// Note, Slate Data is fake. Just some randome numbers, it will not pass validation of any type
let mut slate_enc = Slate {
compact_slate: true, // Slatepack works only for compact models.
num_participants: 2,
id: Uuid::from_bytes(bytes_16),
tx: Transaction::empty()
.with_offset(BlindingFactor::from_slice(&bytes_32) )
.with_input( Input::new( OutputFeatures::Plain, Commitment(bytes_33)) )
.with_output( Output::new(OutputFeatures::Plain, Commitment(bytes_33), RangeProof::zero()))
.with_kernel( TxKernel::with_features(KernelFeatures::Plain { fee: 321 }) ),
offset: BlindingFactor::from_slice(&bytes_32),
amount: 30000000000000000,
fee: 321,
height: 67,
lock_height: 0,
ttl_cutoff_height: Some(54),
participant_data: vec![
ParticipantData {
id: 0,
public_blind_excess: PublicKey::from_secret_key( &secp, &sk).unwrap(),
public_nonce: PublicKey::from_secret_key( &secp, &sk).unwrap(),
part_sig: None,
message: Some("message 1 to send".to_string()),
message_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
},
ParticipantData {
id: 1,
public_blind_excess: PublicKey::from_secret_key( &secp, &sk).unwrap(),
public_nonce: PublicKey::from_secret_key( &secp, &sk).unwrap(),
part_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
message: Some("message 2 to send".to_string()),
message_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
}
],
version_info: VersionCompatInfo {
version: 3,
block_header_version: 1,
},
payment_proof: Some(PaymentInfo {
sender_address: ProvableAddress::from_str("a5ib4b2l5snzdgxzpdzouwxwvn4c3setpp5t5j2tr37n3uy3665qwnqd").unwrap(),
receiver_address: ProvableAddress::from_str("a5ib4b2l5snzdgxzpdzouwxwvn4c3setpp5t5j2tr37n3uy3665qwnqd").unwrap(),
receiver_signature: Some( util::to_hex(&bytes_64) ),
}),
};
// updating kernel excess
slate_enc.tx.body.kernels[0].excess = slate_enc.calc_excess::<ExtKeychain>(None).unwrap();
let slate_enc_str = format!("{:?}", slate_enc);
println!("start encrypted slate = {}", slate_enc_str);
// Not encoded, just want to review the data...
let slatepack_string_encrypted = Slatepacker::encrypt_to_send(
slate_enc.clone(),
SlateVersion::SP,
SlatePurpose::FullSlate,
dalek_pk.clone(),
Some(dalek_pk2.clone()), // sending to self, should be fine...
&dalek_sk,
true,
)
.unwrap();
println!("slatepack encrypted = {}", slatepack_string_encrypted);
// Not encoded, just want to review the data...
let slatepack_string_binary = Slatepacker::encrypt_to_send(
slate_enc.clone(),
SlateVersion::SP,
SlatePurpose::FullSlate,
dalek_pk.clone(),
None, // No recipient, should trigger non encrypted mode.
&dalek_sk,
true,
)
.unwrap();
println!("slatepack binary = {}", slatepack_string_binary);
assert!(slatepack_string_encrypted.len() > slatepack_string_binary.len());
// Testing if can open from a backup
let slatepack =
Slatepacker::decrypt_slatepack(slatepack_string_encrypted.as_bytes(), &dalek_sk).unwrap();
let res_slate = slatepack.to_result_slate();
let slate2_str = format!("{:?}", res_slate);
println!("res_slate = {:?}", slate2_str);
assert_eq!(slate_enc_str, slate2_str);
// Testing if another party can open it
let slatepack =
Slatepacker::decrypt_slatepack(slatepack_string_encrypted.as_bytes(), &dalek_sk2).unwrap();
let res_slate = slatepack.to_result_slate();
let slate2_str = format!("{:?}", res_slate);
println!("res_slate2 = {:?}", slate2_str);
assert_eq!(slate_enc_str, slate2_str);
// Testing if can decode form the binary
let slatepack = Slatepacker::decrypt_slatepack(
slatepack_string_binary.as_bytes(),
&DalekSecretKey::from_bytes(&[1; 32]).unwrap(),
)
.unwrap();
let res_slate = slatepack.to_result_slate();
let slate3_str = format!("{:?}", res_slate);
println!("slate3_str = {:?}", slate3_str);
assert_eq!(slate_enc_str, slate3_str);
}
| get_content | identifier_name |
packer.rs | // Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::Error;
use crate::{Slate, SlateVersion, Slatepack, SlatepackArmor};
use ed25519_dalek::PublicKey as DalekPublicKey;
use ed25519_dalek::SecretKey as DalekSecretKey;
use crate::slatepack::slatepack::SlatePurpose;
#[derive(Clone, Debug)]
/// Arguments, mostly for encrypting decrypting a slatepack
pub struct Slatepacker {
/// Sender address, None for wrapped
pub sender: Option<DalekPublicKey>,
/// Recipient addresses, None for wrapped
pub recipient: Option<DalekPublicKey>,
/// The content purpose. It customize serializer/deserializer for us.
pub content: SlatePurpose,
/// Slate data.
pub slate: Slate,
}
impl Slatepacker {
/// Swap a slate with the packer. Slate is expecte to be full
pub fn wrap_slate(slate: Slate) -> Self {
Self {
sender: None,
recipient: None,
content: SlatePurpose::FullSlate,
slate,
}
}
/// Pack everything into the armored slatepack
pub fn encrypt_to_send(
slate: Slate,
slate_version: SlateVersion,
content: SlatePurpose,
sender: DalekPublicKey,
recipient: Option<DalekPublicKey>, // Encrypted only if recipient is some
secret: &DalekSecretKey,
use_test_rng: bool,
) -> Result<String, Error> {
let pack = Slatepack {
sender: Some(sender),
recipient: recipient,
content,
slate: slate,
};
let (slate_bin, encrypted) = pack.to_binary(slate_version, secret, use_test_rng)?;
SlatepackArmor::encode(&slate_bin, encrypted)
}
/// return slatepack
pub fn decrypt_slatepack(data: &[u8], dec_key: &DalekSecretKey) -> Result<Self, Error> {
let (slate_bytes, encrypted) = SlatepackArmor::decode(data)?;
let slatepack = Slatepack::from_binary(&slate_bytes, encrypted, dec_key)?;
let Slatepack {
sender,
recipient,
content,
slate,
} = slatepack;
Ok(Self {
sender,
recipient,
content,
slate,
})
}
/// Get Transaction ID related into form this slatepack
pub fn get_content(&self) -> SlatePurpose {
self.content.clone()
}
/// Get Sender info. It is needed to send the response back
pub fn get_sender(&self) -> Option<DalekPublicKey> {
self.sender.clone()
}
/// Get Sender info. It is needed to send the response back
pub fn get_recipient(&self) -> Option<DalekPublicKey> {
self.recipient.clone()
}
/// Convert this slate back to the resulting slate. Since the slate pack contain only the change set,
/// to recover the data it is required original slate to merge with.
pub fn to_result_slate(self) -> Slate {
self.slate
}
}
#[test]
fn slatepack_io_test() {
use crate::grin_core::core::KernelFeatures;
use crate::grin_core::core::{Input, Output, OutputFeatures, Transaction, TxKernel};
use crate::grin_core::global;
use crate::grin_keychain::BlindingFactor;
use crate::grin_keychain::ExtKeychain;
use crate::grin_util as util;
use crate::grin_util::secp::pedersen::{Commitment, RangeProof};
use crate::grin_util::secp::Signature;
use crate::grin_util::secp::{PublicKey, Secp256k1, SecretKey};
use crate::proof::proofaddress;
use crate::proof::proofaddress::ProvableAddress;
use crate::slate::{PaymentInfo, VersionCompatInfo};
use crate::ParticipantData;
use uuid::Uuid;
use x25519_dalek::PublicKey as xDalekPublicKey;
global::set_local_chain_type(global::ChainTypes::AutomatedTesting);
let bytes_16: [u8; 16] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
let bytes_32: [u8; 32] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32,
];
let bytes_32_2: [u8; 32] = [
2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33,
];
let bytes_33: [u8; 33] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33,
];
let bytes_64: [u8; 64] = [ | 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32,
];
let sk = SecretKey::from_slice(&bytes_32).unwrap();
let secp = Secp256k1::new();
let dalek_sk = DalekSecretKey::from_bytes(&bytes_32).unwrap();
let dalek_pk = DalekPublicKey::from(&dalek_sk);
let dalek_sk2 = DalekSecretKey::from_bytes(&bytes_32_2).unwrap();
let dalek_pk2 = DalekPublicKey::from(&dalek_sk2);
// Let's test out Dalec 2 xDalec algebra.
let dalek_xpk = proofaddress::tor_pub_2_slatepack_pub(&dalek_pk).unwrap();
let dalek_xpk2 = proofaddress::tor_pub_2_slatepack_pub(&dalek_pk2).unwrap();
let dalek_xsk = proofaddress::tor_secret_2_slatepack_secret(&dalek_sk);
let dalek_xsk2 = proofaddress::tor_secret_2_slatepack_secret(&dalek_sk2);
let builded_xpk = xDalekPublicKey::from(&dalek_xsk);
let builded_xpk2 = xDalekPublicKey::from(&dalek_xsk2);
assert_eq!(dalek_xpk.as_bytes(), builded_xpk.as_bytes());
assert_eq!(dalek_xpk2.as_bytes(), builded_xpk2.as_bytes());
// check if Diffie Hoffman works...
let shared_secret1 = dalek_xsk.diffie_hellman(&dalek_xpk2);
let shared_secret2 = dalek_xsk2.diffie_hellman(&dalek_xpk);
assert_eq!(shared_secret1.as_bytes(), shared_secret2.as_bytes());
// Note, Slate Data is fake. Just some randome numbers, it will not pass validation of any type
let mut slate_enc = Slate {
compact_slate: true, // Slatepack works only for compact models.
num_participants: 2,
id: Uuid::from_bytes(bytes_16),
tx: Transaction::empty()
.with_offset(BlindingFactor::from_slice(&bytes_32) )
.with_input( Input::new( OutputFeatures::Plain, Commitment(bytes_33)) )
.with_output( Output::new(OutputFeatures::Plain, Commitment(bytes_33), RangeProof::zero()))
.with_kernel( TxKernel::with_features(KernelFeatures::Plain { fee: 321 }) ),
offset: BlindingFactor::from_slice(&bytes_32),
amount: 30000000000000000,
fee: 321,
height: 67,
lock_height: 0,
ttl_cutoff_height: Some(54),
participant_data: vec![
ParticipantData {
id: 0,
public_blind_excess: PublicKey::from_secret_key( &secp, &sk).unwrap(),
public_nonce: PublicKey::from_secret_key( &secp, &sk).unwrap(),
part_sig: None,
message: Some("message 1 to send".to_string()),
message_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
},
ParticipantData {
id: 1,
public_blind_excess: PublicKey::from_secret_key( &secp, &sk).unwrap(),
public_nonce: PublicKey::from_secret_key( &secp, &sk).unwrap(),
part_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
message: Some("message 2 to send".to_string()),
message_sig: Some(Signature::from_compact(&util::from_hex("89cc3c1480fea655f29d300fcf68d0cfbf53f96a1d6b1219486b64385ed7ed89acf96f1532b31ac8309e611583b1ecf37090e79700fae3683cf682c0043b3029").unwrap()).unwrap()),
}
],
version_info: VersionCompatInfo {
version: 3,
block_header_version: 1,
},
payment_proof: Some(PaymentInfo {
sender_address: ProvableAddress::from_str("a5ib4b2l5snzdgxzpdzouwxwvn4c3setpp5t5j2tr37n3uy3665qwnqd").unwrap(),
receiver_address: ProvableAddress::from_str("a5ib4b2l5snzdgxzpdzouwxwvn4c3setpp5t5j2tr37n3uy3665qwnqd").unwrap(),
receiver_signature: Some( util::to_hex(&bytes_64) ),
}),
};
// updating kernel excess
slate_enc.tx.body.kernels[0].excess = slate_enc.calc_excess::<ExtKeychain>(None).unwrap();
let slate_enc_str = format!("{:?}", slate_enc);
println!("start encrypted slate = {}", slate_enc_str);
// Not encoded, just want to review the data...
let slatepack_string_encrypted = Slatepacker::encrypt_to_send(
slate_enc.clone(),
SlateVersion::SP,
SlatePurpose::FullSlate,
dalek_pk.clone(),
Some(dalek_pk2.clone()), // sending to self, should be fine...
&dalek_sk,
true,
)
.unwrap();
println!("slatepack encrypted = {}", slatepack_string_encrypted);
// Not encoded, just want to review the data...
let slatepack_string_binary = Slatepacker::encrypt_to_send(
slate_enc.clone(),
SlateVersion::SP,
SlatePurpose::FullSlate,
dalek_pk.clone(),
None, // No recipient, should trigger non encrypted mode.
&dalek_sk,
true,
)
.unwrap();
println!("slatepack binary = {}", slatepack_string_binary);
assert!(slatepack_string_encrypted.len() > slatepack_string_binary.len());
// Testing if can open from a backup
let slatepack =
Slatepacker::decrypt_slatepack(slatepack_string_encrypted.as_bytes(), &dalek_sk).unwrap();
let res_slate = slatepack.to_result_slate();
let slate2_str = format!("{:?}", res_slate);
println!("res_slate = {:?}", slate2_str);
assert_eq!(slate_enc_str, slate2_str);
// Testing if another party can open it
let slatepack =
Slatepacker::decrypt_slatepack(slatepack_string_encrypted.as_bytes(), &dalek_sk2).unwrap();
let res_slate = slatepack.to_result_slate();
let slate2_str = format!("{:?}", res_slate);
println!("res_slate2 = {:?}", slate2_str);
assert_eq!(slate_enc_str, slate2_str);
// Testing if can decode form the binary
let slatepack = Slatepacker::decrypt_slatepack(
slatepack_string_binary.as_bytes(),
&DalekSecretKey::from_bytes(&[1; 32]).unwrap(),
)
.unwrap();
let res_slate = slatepack.to_result_slate();
let slate3_str = format!("{:?}", res_slate);
println!("slate3_str = {:?}", slate3_str);
assert_eq!(slate_enc_str, slate3_str);
} | random_line_split | |
json.go | // Copyright 2020 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gerrit
import (
"fmt"
"net/url"
"sort"
"strconv"
"strings"
"google.golang.org/protobuf/types/known/timestamppb"
"go.chromium.org/luci/common/errors"
gerritpb "go.chromium.org/luci/common/proto/gerrit"
)
// This file contains code related to JSON representations of messages that are
// used for requests to the Gerrit REST API, and unmarshalling code to convert
// from the JSON representations to protos defined in `gerritpb`.
//
// Each of these structs corresponds to an entity described at
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#json-entities
// and also to a message in `gerritpb`, and each has a unmarshalling method
// called ToProto.
// timestamp implements customized JSON marshal/unmarshal behavior that matches
// the timestamp format used in Gerrit.
type accountInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (a *accountInfo) ToProto() *gerritpb.AccountInfo {
if a == nil {
return nil
}
return &gerritpb.AccountInfo{
Name: a.Name,
Email: a.Email,
SecondaryEmails: a.SecondaryEmails,
Username: a.Username,
AccountId: a.AccountID,
}
}
type ownerInfo struct {
Account accountInfo `json:"account,omitempty"`
}
// changeInfo represents JSON for a gerritpb.ChangeInfo on the wire.
type changeInfo struct {
Number int64 `json:"_number"`
Owner *accountInfo `json:"owner"`
Project string `json:"project"`
Branch string `json:"branch"`
ChangeID string `json:"change_id"`
Reviewers map[string][]*accountInfo `json:"reviewers"`
Hashtags []string `json:"hashtags"`
Subject string `json:"subject"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
CurrentRevision string `json:"current_revision"`
Revisions map[string]*revisionInfo `json:"revisions"`
Labels map[string]*labelInfo `json:"labels"`
Messages []changeMessageInfo `json:"messages"`
Requirements []requirement `json:"requirements"`
SubmitRequirements []*submitRequirementResultInfo `json:"submit_requirements"`
Created Timestamp `json:"created"`
Updated Timestamp `json:"updated"`
Submitted Timestamp `json:"submitted"`
Submittable bool `json:"submittable,omitempty"`
IsPrivate bool `json:"is_private,omitempty"`
MetaRevID string `json:"meta_rev_id,omitempty"`
RevertOf int64 `json:"revert_of,omitempty"`
CherryPickOfChange int64 `json:"cherry_pick_of_change,omitempty"`
// MoreChanges may be set on the last change in a response to a query for
// changes, but this is not a property of the change itself and is not
// needed in gerritpb.ChangeInfo.
MoreChanges bool `json:"_more_changes"`
}
func (ci *changeInfo) ToProto() (*gerritpb.ChangeInfo, error) {
ret := &gerritpb.ChangeInfo{
Number: ci.Number,
Owner: ci.Owner.ToProto(),
Project: ci.Project,
Ref: branchToRef(ci.Branch),
Subject: ci.Subject,
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[ci.Status]),
Hashtags: ci.Hashtags,
CurrentRevision: ci.CurrentRevision,
Submittable: ci.Submittable,
IsPrivate: ci.IsPrivate,
MetaRevId: ci.MetaRevID,
Created: timestamppb.New(ci.Created.Time),
Updated: timestamppb.New(ci.Updated.Time),
Submitted: timestamppb.New(ci.Submitted.Time),
RevertOf: ci.RevertOf,
CherryPickOfChange: ci.CherryPickOfChange,
Branch: ci.Branch,
}
if ci.Revisions != nil {
ret.Revisions = make(map[string]*gerritpb.RevisionInfo, len(ci.Revisions))
for rev, info := range ci.Revisions {
ret.Revisions[rev] = info.ToProto()
}
}
if ci.Labels != nil {
ret.Labels = make(map[string]*gerritpb.LabelInfo, len(ci.Labels))
for label, info := range ci.Labels {
ret.Labels[label] = info.ToProto()
}
}
if ci.Messages != nil {
ret.Messages = make([]*gerritpb.ChangeMessageInfo, len(ci.Messages))
for i, msg := range ci.Messages {
ret.Messages[i] = msg.ToProto()
}
}
var err error
if ci.Requirements != nil {
ret.Requirements = make([]*gerritpb.Requirement, len(ci.Requirements))
for i, r := range ci.Requirements {
if ret.Requirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.SubmitRequirements != nil {
ret.SubmitRequirements = make([]*gerritpb.SubmitRequirementResultInfo,
len(ci.SubmitRequirements))
for i, r := range ci.SubmitRequirements {
if ret.SubmitRequirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.Reviewers != nil {
ret.Reviewers = &gerritpb.ReviewerStatusMap{}
if accs, exist := ci.Reviewers["REVIEWER"]; exist {
ret.Reviewers.Reviewers = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Reviewers[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["CC"]; exist {
ret.Reviewers.Ccs = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["REMOVED"]; exist {
ret.Reviewers.Removed = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
}
return ret, nil
}
type labelInfo struct {
Optional bool `json:"optional"`
Approved *accountInfo `json:"approved"`
Rejected *accountInfo `json:"rejected"`
Recommended *accountInfo `json:"recommended"`
Disliked *accountInfo `json:"disliked"`
Blocking bool `json:"blocking"`
Value int32 `json:"value"`
DefaultValue int32 `json:"default_value"`
All []*approvalInfo `json:"all"`
Values map[string]string `json:"values"`
}
func (li *labelInfo) ToProto() *gerritpb.LabelInfo {
ret := &gerritpb.LabelInfo{
Optional: li.Optional,
Approved: li.Approved.ToProto(),
Rejected: li.Rejected.ToProto(),
Recommended: li.Recommended.ToProto(),
Disliked: li.Disliked.ToProto(),
Blocking: li.Blocking,
Value: li.Value,
DefaultValue: li.DefaultValue,
}
if len(li.All) > 0 {
ret.All = make([]*gerritpb.ApprovalInfo, len(li.All))
for i, a := range li.All {
ret.All[i] = a.ToProto()
}
}
if li.Values != nil {
ret.Values = make(map[int32]string, len(li.Values))
for value, description := range li.Values {
i, err := strconv.ParseInt(strings.TrimSpace(value), 10, 32)
// Error is silently ignored for consistency with other parts of code.
if err == nil {
ret.Values[int32(i)] = description
}
}
}
return ret
}
type approvalInfo struct {
accountInfo
Value int32 `json:"value"`
PermittedVotingRange *gerritpb.VotingRangeInfo `json:"permitted_voting_range"`
Date Timestamp `json:"date"`
Tag string `json:"tag"`
PostSubmit bool `json:"post_submit"`
}
func (ai *approvalInfo) ToProto() *gerritpb.ApprovalInfo {
ret := &gerritpb.ApprovalInfo{
User: ai.accountInfo.ToProto(),
Value: ai.Value,
PermittedVotingRange: ai.PermittedVotingRange,
Date: timestamppb.New(ai.Date.Time),
Tag: ai.Tag,
PostSubmit: ai.PostSubmit,
}
return ret
}
type changeMessageInfo struct {
ID string `json:"id"`
Author *accountInfo `json:"author"`
RealAuthor *accountInfo `json:"real_author"`
Date Timestamp `json:"date"`
Message string `json:"message"`
Tag string `json:"tag"`
}
func (cmi *changeMessageInfo) | () *gerritpb.ChangeMessageInfo {
if cmi == nil {
return nil
}
return &gerritpb.ChangeMessageInfo{
Id: cmi.ID,
Author: cmi.Author.ToProto(),
RealAuthor: cmi.RealAuthor.ToProto(),
Date: timestamppb.New(cmi.Date.Time),
Message: cmi.Message,
Tag: cmi.Tag,
}
}
type requirement struct {
Status string `json:"status"`
FallbackText string `json:"fallback_text"`
Type string `json:"type"`
}
func (r *requirement) ToProto() (*gerritpb.Requirement, error) {
stringVal := "REQUIREMENT_STATUS_" + r.Status
numVal, found := gerritpb.Requirement_Status_value[stringVal]
if !found {
return nil, errors.Reason("no Status enum value for %q", r.Status).Err()
}
return &gerritpb.Requirement{
Status: gerritpb.Requirement_Status(numVal),
FallbackText: r.FallbackText,
Type: r.Type,
}, nil
}
type fileInfo struct {
LinesInserted int32 `json:"lines_inserted"`
LinesDeleted int32 `json:"lines_deleted"`
SizeDelta int64 `json:"size_delta"`
Size int64 `json:"size"`
}
func (fi *fileInfo) ToProto() *gerritpb.FileInfo {
return &gerritpb.FileInfo{
LinesInserted: fi.LinesInserted,
LinesDeleted: fi.LinesDeleted,
SizeDelta: fi.SizeDelta,
Size: fi.Size,
}
}
type revisionInfo struct {
Kind string `json:"kind"`
Number int `json:"_number"`
Uploader *accountInfo `json:"uploader"`
Ref string `json:"ref"`
Created Timestamp `json:"created"`
Description string `json:"description"`
Files map[string]*fileInfo `json:"files"`
Commit *commitInfo `json:"commit"`
}
func (ri *revisionInfo) ToProto() *gerritpb.RevisionInfo {
ret := &gerritpb.RevisionInfo{
Number: int32(ri.Number),
Uploader: ri.Uploader.ToProto(),
Ref: ri.Ref,
Created: timestamppb.New(ri.Created.Time),
Description: ri.Description,
}
if v, ok := gerritpb.RevisionInfo_Kind_value[ri.Kind]; ok {
ret.Kind = gerritpb.RevisionInfo_Kind(v)
}
if ri.Files != nil {
ret.Files = make(map[string]*gerritpb.FileInfo, len(ri.Files))
for i, fi := range ri.Files {
ret.Files[i] = fi.ToProto()
}
}
if ri.Commit != nil {
ret.Commit = ri.Commit.ToProto()
}
return ret
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#git-person-info
type gitPersonInfo struct {
Name string `json:"name"`
Email string `json:"email"`
}
func (g *gitPersonInfo) ToProto() *gerritpb.GitPersonInfo {
return &gerritpb.GitPersonInfo{
Name: g.Name,
Email: g.Email,
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#commit-info
type commitInfo struct {
Commit string `json:"commit"`
Parents []*commitInfo `json:"parents"`
Author *gitPersonInfo `json:"author"`
Committer *gitPersonInfo `json:"committer"`
Subject string `json:"subject"`
Message string `json:"message"`
}
func (c *commitInfo) ToProto() *gerritpb.CommitInfo {
parents := make([]*gerritpb.CommitInfo_Parent, len(c.Parents))
for i, p := range c.Parents {
parents[i] = &gerritpb.CommitInfo_Parent{Id: p.Commit}
}
return &gerritpb.CommitInfo{
Id: c.Commit,
Parents: parents,
Message: c.Message,
Author: c.Author.ToProto(),
// TODO(tandrii): support other fields once added.
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#related-change-and-commit-info
type relatedChangeAndCommitInfo struct {
Project string `json:"project"`
ChangeID string `json:"change_id"`
Commit commitInfo `json:"commit"`
Number int64 `json:"_change_number"`
Patchset int64 `json:"_revision_number"`
CurrentPatchset int64 `json:"_current_revision_number"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
}
func (r *relatedChangeAndCommitInfo) ToProto() *gerritpb.GetRelatedChangesResponse_ChangeAndCommit {
return &gerritpb.GetRelatedChangesResponse_ChangeAndCommit{
Project: r.Project,
Number: r.Number,
Patchset: r.Patchset,
CurrentPatchset: r.CurrentPatchset,
Commit: r.Commit.ToProto(),
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[r.Status]),
}
}
type mergeableInfo struct {
SubmitType string `json:"submit_type"`
Strategy string `json:"strategy"`
Mergeable bool `json:"mergeable"`
CommitMerged bool `json:"commit_merged"`
ContentMerged bool `json:"content_merged"`
Conflicts []string `json:"conflicts"`
MergeableInto []string `json:"mergeable_into"`
}
func (mi *mergeableInfo) ToProto() (*gerritpb.MergeableInfo, error) {
// Convert something like 'simple-two-way-in-core' to 'SIMPLE_TWO_WAY_IN_CORE'.
strategyEnumName := strings.Replace(strings.ToUpper(mi.Strategy), "-", "_", -1)
strategyEnumNum, found := gerritpb.MergeableStrategy_value[strategyEnumName]
if !found {
return nil, errors.Reason("no MergeableStrategy enum value for %q", strategyEnumName).Err()
}
submitTypeEnumNum, found := gerritpb.MergeableInfo_SubmitType_value[mi.SubmitType]
if !found {
return nil, errors.Reason("no SubmitType enum value for %q", mi.SubmitType).Err()
}
return &gerritpb.MergeableInfo{
SubmitType: gerritpb.MergeableInfo_SubmitType(submitTypeEnumNum),
Strategy: gerritpb.MergeableStrategy(strategyEnumNum),
Mergeable: mi.Mergeable,
CommitMerged: mi.CommitMerged,
ContentMerged: mi.ContentMerged,
Conflicts: mi.Conflicts,
MergeableInto: mi.MergeableInto,
}, nil
}
type addReviewerRequest struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
Confirmed bool `json:"confirmed,omitempty"`
Notify string `json:"notify,omitempty"`
}
type reviewerInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
Approvals map[string]string `json:"approvals,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (ri *reviewerInfo) ToProtoReviewerInfo() (*gerritpb.ReviewerInfo, error) {
approvals := make(map[string]int32, 0)
for label, score := range ri.Approvals {
score = strings.TrimLeft(score, " ")
scoreInt, err := strconv.ParseInt(score, 10, 32)
if err != nil {
return nil, errors.Annotate(err, "parsing approvals").Err()
}
approvals[label] = int32(scoreInt)
}
return &gerritpb.ReviewerInfo{
Account: &gerritpb.AccountInfo{
Name: ri.Name,
Email: ri.Email,
SecondaryEmails: ri.SecondaryEmails,
Username: ri.Username,
AccountId: ri.AccountID,
},
Approvals: approvals,
}, nil
}
type addReviewerResult struct {
Input string `json:"input"`
Reviewers []reviewerInfo `json:"reviewers,omitempty"`
Ccs []reviewerInfo `json:"ccs,omitempty"`
Error string `json:"error,omitempty"`
Confirm bool `json:"confirm,omitempty"`
}
func (rr *addReviewerResult) ToProto() (*gerritpb.AddReviewerResult, error) {
reviewers := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Reviewers {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
reviewers = append(reviewers, rInfo)
}
ccs := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Ccs {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
ccs = append(ccs, rInfo)
}
return &gerritpb.AddReviewerResult{
Input: rr.Input,
Reviewers: reviewers,
Ccs: ccs,
Error: rr.Error,
Confirm: rr.Confirm,
}, nil
}
func enumToString(v int32, m map[int32]string) string {
if v == 0 {
return ""
}
prefixLen := strings.LastIndex(m[0], "UNSPECIFIED")
return m[v][prefixLen:]
}
type reviewInput struct {
Message string `json:"message,omitempty"`
Labels map[string]int32 `json:"labels,omitempty"`
Tag string `json:"tag,omitempty"`
Notify string `json:"notify,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
OnBehalfOf int64 `json:"on_behalf_of,omitempty"`
Ready bool `json:"ready,omitempty"`
WorkInProgress bool `json:"work_in_progress,omitempty"`
AddToAttentionSet []*attentionSetInput `json:"add_to_attention_set,omitempty"`
RemoveFromAttentionSet []*attentionSetInput `json:"remove_from_attention_set,omitempty"`
IgnoreAutomaticAttentionSetRules bool `json:"ignore_automatic_attention_set_rules,omitempty"`
Reviewers []*reviewerInput `json:"reviewers,omitempty"`
}
type notifyInfo struct {
Accounts []int64 `json:"accounts,omitempty"`
}
type notifyDetails map[string]*notifyInfo
func toNotifyDetails(in *gerritpb.NotifyDetails) notifyDetails {
recipients := in.GetRecipients()
if len(recipients) == 0 {
return nil
}
res := make(map[string]*notifyInfo, len(recipients))
for _, recipient := range recipients {
if len(recipient.Info.GetAccounts()) == 0 {
continue
}
rt := recipient.RecipientType
if rt == gerritpb.NotifyDetails_RECIPIENT_TYPE_UNSPECIFIED {
// Must have been caught in validation.
panic(fmt.Errorf("must specify recipient type"))
}
rts := enumToString(int32(rt.Number()), gerritpb.NotifyDetails_RecipientType_name)
if ni, ok := res[rts]; !ok {
ni = ¬ifyInfo{
Accounts: make([]int64, len(recipient.Info.GetAccounts())),
}
for i, aid := range recipient.Info.GetAccounts() {
ni.Accounts[i] = aid
}
res[rts] = ni
} else {
ni.Accounts = append(ni.Accounts, recipient.Info.GetAccounts()...)
}
}
for _, ni := range res {
// Sort & dedup accounts in each notification bucket.
sort.Slice(ni.Accounts, func(i, j int) bool { return ni.Accounts[i] < ni.Accounts[j] })
n := 0
for i := 1; i < len(ni.Accounts); i++ {
if ni.Accounts[n] == ni.Accounts[i] {
continue
}
n++
ni.Accounts[n] = ni.Accounts[i]
}
ni.Accounts = ni.Accounts[:n+1]
}
return res
}
type attentionSetInput struct {
User string `json:"user"`
Reason string `json:"reason"`
Notify string `json:"string,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
}
func toAttentionSetInput(in *gerritpb.AttentionSetInput) *attentionSetInput {
return &attentionSetInput{
User: in.User,
Reason: in.Reason,
Notify: enumToString(int32(in.Notify.Number()), gerritpb.Notify_name),
NotifyDetails: toNotifyDetails(in.NotifyDetails),
}
}
func toAttentionSetInputs(in []*gerritpb.AttentionSetInput) []*attentionSetInput {
if len(in) == 0 {
return nil
}
out := make([]*attentionSetInput, len(in))
for i, x := range in {
out[i] = toAttentionSetInput(x)
}
return out
}
type reviewerInput struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
}
func toReviewerInputs(in []*gerritpb.ReviewerInput) []*reviewerInput {
if len(in) == 0 {
return nil
}
out := make([]*reviewerInput, len(in))
for i, x := range in {
out[i] = &reviewerInput{
Reviewer: x.Reviewer,
State: enumToString(int32(x.State.Number()), gerritpb.ReviewerInput_State_name),
}
}
return out
}
type reviewResult struct {
Labels map[string]int32 `json:"labels,omitempty"`
Reviewers map[string]*addReviewerResult `json:"reviewers,omitempty"`
}
func (rr *reviewResult) ToProto() (*gerritpb.ReviewResult, error) {
result := &gerritpb.ReviewResult{
Labels: rr.Labels,
}
if len(rr.Reviewers) == 0 {
return result, nil
}
reviewers := make(map[string]*gerritpb.AddReviewerResult, len(rr.Reviewers))
for i, x := range rr.Reviewers {
reviewerDetails, err := x.ToProto()
if err != nil {
return nil, err
}
reviewers[i] = reviewerDetails
}
result.Reviewers = reviewers
return result, nil
}
type projectInfo struct {
ID string `json:"id,omitempty"`
Parent string `json:"parent,omitempty"`
Description string `json:"description,omitempty"`
State string `json:"state,omitempty"`
Branches map[string]string `json:"branches,omitempty"`
WebLinks []*gerritpb.WebLinkInfo `json:"web_links,omitempty"`
}
func (pi *projectInfo) ToProto() (*gerritpb.ProjectInfo, error) {
stateEnumVal := "PROJECT_STATE_" + pi.State
stateEnumNum, found := gerritpb.ProjectInfo_State_value[stateEnumVal]
if !found {
return nil, errors.Reason("no State enum value for %q", pi.State).Err()
}
projectName, err := url.QueryUnescape(pi.ID)
if err != nil {
return nil, errors.Annotate(err, "decoding name").Err()
}
absoluteRefs := make(map[string]string, len(pi.Branches))
for ref, sha1 := range pi.Branches {
absoluteRefs[branchToRef(ref)] = sha1
}
return &gerritpb.ProjectInfo{
Name: projectName,
Parent: pi.Parent,
Description: pi.Description,
State: gerritpb.ProjectInfo_State(stateEnumNum),
Refs: absoluteRefs,
WebLinks: pi.WebLinks,
}, nil
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#submit-info
type submitInfo struct {
Status string `json:"status"`
}
func (si *submitInfo) ToProto() *gerritpb.SubmitInfo {
return &gerritpb.SubmitInfo{
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[si.Status]),
}
}
type submitRequirementResultInfo struct {
Name string `json:"name"`
Description string `json:"description"`
Status string `json:"status"`
IsLegacy bool `json:"is_legacy"`
ApplicabilityExpressionResult *submitRequirementExpressionInfo `json:"applicability_expression_result"`
SubmittabilityExpressionResult *submitRequirementExpressionInfo `json:"submittability_expression_result"`
OverrideExpressionResult *submitRequirementExpressionInfo `json:"override_expression_result"`
}
func (ri *submitRequirementResultInfo) ToProto() (*gerritpb.SubmitRequirementResultInfo, error) {
numVal, found := gerritpb.SubmitRequirementResultInfo_Status_value[ri.Status]
if !found {
return nil, errors.Reason("no Status enum value for %q", ri.Status).Err()
}
return &gerritpb.SubmitRequirementResultInfo{
Name: ri.Name,
Description: ri.Description,
Status: gerritpb.SubmitRequirementResultInfo_Status(numVal),
IsLegacy: ri.IsLegacy,
ApplicabilityExpressionResult: ri.ApplicabilityExpressionResult.ToProto(),
SubmittabilityExpressionResult: ri.SubmittabilityExpressionResult.ToProto(),
OverrideExpressionResult: ri.OverrideExpressionResult.ToProto(),
}, nil
}
type submitRequirementExpressionInfo struct {
Expression string `json:"expression"`
Fulfilled bool `json:"fulfilled"`
PassingAtoms []string `json:"passing_atoms"`
FailingAtoms []string `json:"failing_atoms"`
ErrorMessage string `json:"error_message"`
}
func (ei *submitRequirementExpressionInfo) ToProto() *gerritpb.SubmitRequirementExpressionInfo {
if ei == nil {
return nil
}
return &gerritpb.SubmitRequirementExpressionInfo{
Expression: ei.Expression,
Fulfilled: ei.Fulfilled,
PassingAtoms: ei.PassingAtoms,
FailingAtoms: ei.FailingAtoms,
ErrorMessage: ei.ErrorMessage,
}
}
type metaDiff struct {
Added *changeInfo `json:"added"`
Removed *changeInfo `json:"removed"`
OldChangeInfo *changeInfo `json:"old_change_info"`
NewChangeInfo *changeInfo `json:"new_change_info"`
}
func (md *metaDiff) ToProto() (*gerritpb.MetaDiff, error) {
var resp gerritpb.MetaDiff
var ci *gerritpb.ChangeInfo
var err error
if ci, err = md.Added.ToProto(); err != nil {
return nil, err
}
resp.Added = ci
if ci, err = md.Removed.ToProto(); err != nil {
return nil, err
}
resp.Removed = ci
if ci, err = md.OldChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.OldChangeInfo = ci
if ci, err = md.NewChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.NewChangeInfo = ci
return &resp, nil
}
| ToProto | identifier_name |
json.go | // Copyright 2020 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gerrit
import (
"fmt"
"net/url"
"sort"
"strconv"
"strings"
"google.golang.org/protobuf/types/known/timestamppb"
"go.chromium.org/luci/common/errors"
gerritpb "go.chromium.org/luci/common/proto/gerrit"
)
// This file contains code related to JSON representations of messages that are
// used for requests to the Gerrit REST API, and unmarshalling code to convert
// from the JSON representations to protos defined in `gerritpb`.
//
// Each of these structs corresponds to an entity described at
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#json-entities
// and also to a message in `gerritpb`, and each has a unmarshalling method
// called ToProto.
// timestamp implements customized JSON marshal/unmarshal behavior that matches
// the timestamp format used in Gerrit.
type accountInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (a *accountInfo) ToProto() *gerritpb.AccountInfo {
if a == nil {
return nil
}
return &gerritpb.AccountInfo{
Name: a.Name,
Email: a.Email,
SecondaryEmails: a.SecondaryEmails,
Username: a.Username,
AccountId: a.AccountID,
}
}
type ownerInfo struct {
Account accountInfo `json:"account,omitempty"`
}
// changeInfo represents JSON for a gerritpb.ChangeInfo on the wire.
type changeInfo struct {
Number int64 `json:"_number"`
Owner *accountInfo `json:"owner"`
Project string `json:"project"`
Branch string `json:"branch"`
ChangeID string `json:"change_id"`
Reviewers map[string][]*accountInfo `json:"reviewers"`
Hashtags []string `json:"hashtags"`
Subject string `json:"subject"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
CurrentRevision string `json:"current_revision"`
Revisions map[string]*revisionInfo `json:"revisions"`
Labels map[string]*labelInfo `json:"labels"`
Messages []changeMessageInfo `json:"messages"`
Requirements []requirement `json:"requirements"`
SubmitRequirements []*submitRequirementResultInfo `json:"submit_requirements"`
Created Timestamp `json:"created"`
Updated Timestamp `json:"updated"`
Submitted Timestamp `json:"submitted"`
Submittable bool `json:"submittable,omitempty"`
IsPrivate bool `json:"is_private,omitempty"`
MetaRevID string `json:"meta_rev_id,omitempty"`
RevertOf int64 `json:"revert_of,omitempty"`
CherryPickOfChange int64 `json:"cherry_pick_of_change,omitempty"`
// MoreChanges may be set on the last change in a response to a query for
// changes, but this is not a property of the change itself and is not
// needed in gerritpb.ChangeInfo.
MoreChanges bool `json:"_more_changes"`
}
func (ci *changeInfo) ToProto() (*gerritpb.ChangeInfo, error) {
ret := &gerritpb.ChangeInfo{
Number: ci.Number,
Owner: ci.Owner.ToProto(),
Project: ci.Project,
Ref: branchToRef(ci.Branch),
Subject: ci.Subject,
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[ci.Status]),
Hashtags: ci.Hashtags,
CurrentRevision: ci.CurrentRevision,
Submittable: ci.Submittable,
IsPrivate: ci.IsPrivate,
MetaRevId: ci.MetaRevID,
Created: timestamppb.New(ci.Created.Time),
Updated: timestamppb.New(ci.Updated.Time),
Submitted: timestamppb.New(ci.Submitted.Time),
RevertOf: ci.RevertOf,
CherryPickOfChange: ci.CherryPickOfChange,
Branch: ci.Branch,
}
if ci.Revisions != nil {
ret.Revisions = make(map[string]*gerritpb.RevisionInfo, len(ci.Revisions))
for rev, info := range ci.Revisions {
ret.Revisions[rev] = info.ToProto()
}
}
if ci.Labels != nil {
ret.Labels = make(map[string]*gerritpb.LabelInfo, len(ci.Labels))
for label, info := range ci.Labels {
ret.Labels[label] = info.ToProto()
}
}
if ci.Messages != nil {
ret.Messages = make([]*gerritpb.ChangeMessageInfo, len(ci.Messages))
for i, msg := range ci.Messages {
ret.Messages[i] = msg.ToProto()
}
}
var err error
if ci.Requirements != nil {
ret.Requirements = make([]*gerritpb.Requirement, len(ci.Requirements))
for i, r := range ci.Requirements {
if ret.Requirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.SubmitRequirements != nil {
ret.SubmitRequirements = make([]*gerritpb.SubmitRequirementResultInfo,
len(ci.SubmitRequirements))
for i, r := range ci.SubmitRequirements {
if ret.SubmitRequirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.Reviewers != nil {
ret.Reviewers = &gerritpb.ReviewerStatusMap{}
if accs, exist := ci.Reviewers["REVIEWER"]; exist {
ret.Reviewers.Reviewers = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Reviewers[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["CC"]; exist {
ret.Reviewers.Ccs = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["REMOVED"]; exist {
ret.Reviewers.Removed = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
}
return ret, nil
}
type labelInfo struct {
Optional bool `json:"optional"`
Approved *accountInfo `json:"approved"`
Rejected *accountInfo `json:"rejected"`
Recommended *accountInfo `json:"recommended"`
Disliked *accountInfo `json:"disliked"`
Blocking bool `json:"blocking"`
Value int32 `json:"value"`
DefaultValue int32 `json:"default_value"`
All []*approvalInfo `json:"all"`
Values map[string]string `json:"values"`
}
func (li *labelInfo) ToProto() *gerritpb.LabelInfo {
ret := &gerritpb.LabelInfo{
Optional: li.Optional,
Approved: li.Approved.ToProto(),
Rejected: li.Rejected.ToProto(),
Recommended: li.Recommended.ToProto(),
Disliked: li.Disliked.ToProto(),
Blocking: li.Blocking,
Value: li.Value,
DefaultValue: li.DefaultValue,
}
if len(li.All) > 0 {
ret.All = make([]*gerritpb.ApprovalInfo, len(li.All))
for i, a := range li.All {
ret.All[i] = a.ToProto()
}
}
if li.Values != nil {
ret.Values = make(map[int32]string, len(li.Values))
for value, description := range li.Values {
i, err := strconv.ParseInt(strings.TrimSpace(value), 10, 32)
// Error is silently ignored for consistency with other parts of code.
if err == nil {
ret.Values[int32(i)] = description
}
}
}
return ret
}
type approvalInfo struct {
accountInfo
Value int32 `json:"value"`
PermittedVotingRange *gerritpb.VotingRangeInfo `json:"permitted_voting_range"`
Date Timestamp `json:"date"`
Tag string `json:"tag"`
PostSubmit bool `json:"post_submit"`
}
func (ai *approvalInfo) ToProto() *gerritpb.ApprovalInfo {
ret := &gerritpb.ApprovalInfo{
User: ai.accountInfo.ToProto(),
Value: ai.Value,
PermittedVotingRange: ai.PermittedVotingRange,
Date: timestamppb.New(ai.Date.Time),
Tag: ai.Tag,
PostSubmit: ai.PostSubmit,
}
return ret
}
type changeMessageInfo struct {
ID string `json:"id"`
Author *accountInfo `json:"author"`
RealAuthor *accountInfo `json:"real_author"`
Date Timestamp `json:"date"`
Message string `json:"message"`
Tag string `json:"tag"`
}
func (cmi *changeMessageInfo) ToProto() *gerritpb.ChangeMessageInfo |
type requirement struct {
Status string `json:"status"`
FallbackText string `json:"fallback_text"`
Type string `json:"type"`
}
func (r *requirement) ToProto() (*gerritpb.Requirement, error) {
stringVal := "REQUIREMENT_STATUS_" + r.Status
numVal, found := gerritpb.Requirement_Status_value[stringVal]
if !found {
return nil, errors.Reason("no Status enum value for %q", r.Status).Err()
}
return &gerritpb.Requirement{
Status: gerritpb.Requirement_Status(numVal),
FallbackText: r.FallbackText,
Type: r.Type,
}, nil
}
type fileInfo struct {
LinesInserted int32 `json:"lines_inserted"`
LinesDeleted int32 `json:"lines_deleted"`
SizeDelta int64 `json:"size_delta"`
Size int64 `json:"size"`
}
func (fi *fileInfo) ToProto() *gerritpb.FileInfo {
return &gerritpb.FileInfo{
LinesInserted: fi.LinesInserted,
LinesDeleted: fi.LinesDeleted,
SizeDelta: fi.SizeDelta,
Size: fi.Size,
}
}
type revisionInfo struct {
Kind string `json:"kind"`
Number int `json:"_number"`
Uploader *accountInfo `json:"uploader"`
Ref string `json:"ref"`
Created Timestamp `json:"created"`
Description string `json:"description"`
Files map[string]*fileInfo `json:"files"`
Commit *commitInfo `json:"commit"`
}
func (ri *revisionInfo) ToProto() *gerritpb.RevisionInfo {
ret := &gerritpb.RevisionInfo{
Number: int32(ri.Number),
Uploader: ri.Uploader.ToProto(),
Ref: ri.Ref,
Created: timestamppb.New(ri.Created.Time),
Description: ri.Description,
}
if v, ok := gerritpb.RevisionInfo_Kind_value[ri.Kind]; ok {
ret.Kind = gerritpb.RevisionInfo_Kind(v)
}
if ri.Files != nil {
ret.Files = make(map[string]*gerritpb.FileInfo, len(ri.Files))
for i, fi := range ri.Files {
ret.Files[i] = fi.ToProto()
}
}
if ri.Commit != nil {
ret.Commit = ri.Commit.ToProto()
}
return ret
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#git-person-info
type gitPersonInfo struct {
Name string `json:"name"`
Email string `json:"email"`
}
func (g *gitPersonInfo) ToProto() *gerritpb.GitPersonInfo {
return &gerritpb.GitPersonInfo{
Name: g.Name,
Email: g.Email,
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#commit-info
type commitInfo struct {
Commit string `json:"commit"`
Parents []*commitInfo `json:"parents"`
Author *gitPersonInfo `json:"author"`
Committer *gitPersonInfo `json:"committer"`
Subject string `json:"subject"`
Message string `json:"message"`
}
func (c *commitInfo) ToProto() *gerritpb.CommitInfo {
parents := make([]*gerritpb.CommitInfo_Parent, len(c.Parents))
for i, p := range c.Parents {
parents[i] = &gerritpb.CommitInfo_Parent{Id: p.Commit}
}
return &gerritpb.CommitInfo{
Id: c.Commit,
Parents: parents,
Message: c.Message,
Author: c.Author.ToProto(),
// TODO(tandrii): support other fields once added.
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#related-change-and-commit-info
type relatedChangeAndCommitInfo struct {
Project string `json:"project"`
ChangeID string `json:"change_id"`
Commit commitInfo `json:"commit"`
Number int64 `json:"_change_number"`
Patchset int64 `json:"_revision_number"`
CurrentPatchset int64 `json:"_current_revision_number"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
}
func (r *relatedChangeAndCommitInfo) ToProto() *gerritpb.GetRelatedChangesResponse_ChangeAndCommit {
return &gerritpb.GetRelatedChangesResponse_ChangeAndCommit{
Project: r.Project,
Number: r.Number,
Patchset: r.Patchset,
CurrentPatchset: r.CurrentPatchset,
Commit: r.Commit.ToProto(),
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[r.Status]),
}
}
type mergeableInfo struct {
SubmitType string `json:"submit_type"`
Strategy string `json:"strategy"`
Mergeable bool `json:"mergeable"`
CommitMerged bool `json:"commit_merged"`
ContentMerged bool `json:"content_merged"`
Conflicts []string `json:"conflicts"`
MergeableInto []string `json:"mergeable_into"`
}
func (mi *mergeableInfo) ToProto() (*gerritpb.MergeableInfo, error) {
// Convert something like 'simple-two-way-in-core' to 'SIMPLE_TWO_WAY_IN_CORE'.
strategyEnumName := strings.Replace(strings.ToUpper(mi.Strategy), "-", "_", -1)
strategyEnumNum, found := gerritpb.MergeableStrategy_value[strategyEnumName]
if !found {
return nil, errors.Reason("no MergeableStrategy enum value for %q", strategyEnumName).Err()
}
submitTypeEnumNum, found := gerritpb.MergeableInfo_SubmitType_value[mi.SubmitType]
if !found {
return nil, errors.Reason("no SubmitType enum value for %q", mi.SubmitType).Err()
}
return &gerritpb.MergeableInfo{
SubmitType: gerritpb.MergeableInfo_SubmitType(submitTypeEnumNum),
Strategy: gerritpb.MergeableStrategy(strategyEnumNum),
Mergeable: mi.Mergeable,
CommitMerged: mi.CommitMerged,
ContentMerged: mi.ContentMerged,
Conflicts: mi.Conflicts,
MergeableInto: mi.MergeableInto,
}, nil
}
type addReviewerRequest struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
Confirmed bool `json:"confirmed,omitempty"`
Notify string `json:"notify,omitempty"`
}
type reviewerInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
Approvals map[string]string `json:"approvals,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (ri *reviewerInfo) ToProtoReviewerInfo() (*gerritpb.ReviewerInfo, error) {
approvals := make(map[string]int32, 0)
for label, score := range ri.Approvals {
score = strings.TrimLeft(score, " ")
scoreInt, err := strconv.ParseInt(score, 10, 32)
if err != nil {
return nil, errors.Annotate(err, "parsing approvals").Err()
}
approvals[label] = int32(scoreInt)
}
return &gerritpb.ReviewerInfo{
Account: &gerritpb.AccountInfo{
Name: ri.Name,
Email: ri.Email,
SecondaryEmails: ri.SecondaryEmails,
Username: ri.Username,
AccountId: ri.AccountID,
},
Approvals: approvals,
}, nil
}
type addReviewerResult struct {
Input string `json:"input"`
Reviewers []reviewerInfo `json:"reviewers,omitempty"`
Ccs []reviewerInfo `json:"ccs,omitempty"`
Error string `json:"error,omitempty"`
Confirm bool `json:"confirm,omitempty"`
}
func (rr *addReviewerResult) ToProto() (*gerritpb.AddReviewerResult, error) {
reviewers := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Reviewers {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
reviewers = append(reviewers, rInfo)
}
ccs := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Ccs {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
ccs = append(ccs, rInfo)
}
return &gerritpb.AddReviewerResult{
Input: rr.Input,
Reviewers: reviewers,
Ccs: ccs,
Error: rr.Error,
Confirm: rr.Confirm,
}, nil
}
func enumToString(v int32, m map[int32]string) string {
if v == 0 {
return ""
}
prefixLen := strings.LastIndex(m[0], "UNSPECIFIED")
return m[v][prefixLen:]
}
type reviewInput struct {
Message string `json:"message,omitempty"`
Labels map[string]int32 `json:"labels,omitempty"`
Tag string `json:"tag,omitempty"`
Notify string `json:"notify,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
OnBehalfOf int64 `json:"on_behalf_of,omitempty"`
Ready bool `json:"ready,omitempty"`
WorkInProgress bool `json:"work_in_progress,omitempty"`
AddToAttentionSet []*attentionSetInput `json:"add_to_attention_set,omitempty"`
RemoveFromAttentionSet []*attentionSetInput `json:"remove_from_attention_set,omitempty"`
IgnoreAutomaticAttentionSetRules bool `json:"ignore_automatic_attention_set_rules,omitempty"`
Reviewers []*reviewerInput `json:"reviewers,omitempty"`
}
type notifyInfo struct {
Accounts []int64 `json:"accounts,omitempty"`
}
type notifyDetails map[string]*notifyInfo
func toNotifyDetails(in *gerritpb.NotifyDetails) notifyDetails {
recipients := in.GetRecipients()
if len(recipients) == 0 {
return nil
}
res := make(map[string]*notifyInfo, len(recipients))
for _, recipient := range recipients {
if len(recipient.Info.GetAccounts()) == 0 {
continue
}
rt := recipient.RecipientType
if rt == gerritpb.NotifyDetails_RECIPIENT_TYPE_UNSPECIFIED {
// Must have been caught in validation.
panic(fmt.Errorf("must specify recipient type"))
}
rts := enumToString(int32(rt.Number()), gerritpb.NotifyDetails_RecipientType_name)
if ni, ok := res[rts]; !ok {
ni = ¬ifyInfo{
Accounts: make([]int64, len(recipient.Info.GetAccounts())),
}
for i, aid := range recipient.Info.GetAccounts() {
ni.Accounts[i] = aid
}
res[rts] = ni
} else {
ni.Accounts = append(ni.Accounts, recipient.Info.GetAccounts()...)
}
}
for _, ni := range res {
// Sort & dedup accounts in each notification bucket.
sort.Slice(ni.Accounts, func(i, j int) bool { return ni.Accounts[i] < ni.Accounts[j] })
n := 0
for i := 1; i < len(ni.Accounts); i++ {
if ni.Accounts[n] == ni.Accounts[i] {
continue
}
n++
ni.Accounts[n] = ni.Accounts[i]
}
ni.Accounts = ni.Accounts[:n+1]
}
return res
}
type attentionSetInput struct {
User string `json:"user"`
Reason string `json:"reason"`
Notify string `json:"string,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
}
func toAttentionSetInput(in *gerritpb.AttentionSetInput) *attentionSetInput {
return &attentionSetInput{
User: in.User,
Reason: in.Reason,
Notify: enumToString(int32(in.Notify.Number()), gerritpb.Notify_name),
NotifyDetails: toNotifyDetails(in.NotifyDetails),
}
}
func toAttentionSetInputs(in []*gerritpb.AttentionSetInput) []*attentionSetInput {
if len(in) == 0 {
return nil
}
out := make([]*attentionSetInput, len(in))
for i, x := range in {
out[i] = toAttentionSetInput(x)
}
return out
}
type reviewerInput struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
}
func toReviewerInputs(in []*gerritpb.ReviewerInput) []*reviewerInput {
if len(in) == 0 {
return nil
}
out := make([]*reviewerInput, len(in))
for i, x := range in {
out[i] = &reviewerInput{
Reviewer: x.Reviewer,
State: enumToString(int32(x.State.Number()), gerritpb.ReviewerInput_State_name),
}
}
return out
}
type reviewResult struct {
Labels map[string]int32 `json:"labels,omitempty"`
Reviewers map[string]*addReviewerResult `json:"reviewers,omitempty"`
}
func (rr *reviewResult) ToProto() (*gerritpb.ReviewResult, error) {
result := &gerritpb.ReviewResult{
Labels: rr.Labels,
}
if len(rr.Reviewers) == 0 {
return result, nil
}
reviewers := make(map[string]*gerritpb.AddReviewerResult, len(rr.Reviewers))
for i, x := range rr.Reviewers {
reviewerDetails, err := x.ToProto()
if err != nil {
return nil, err
}
reviewers[i] = reviewerDetails
}
result.Reviewers = reviewers
return result, nil
}
type projectInfo struct {
ID string `json:"id,omitempty"`
Parent string `json:"parent,omitempty"`
Description string `json:"description,omitempty"`
State string `json:"state,omitempty"`
Branches map[string]string `json:"branches,omitempty"`
WebLinks []*gerritpb.WebLinkInfo `json:"web_links,omitempty"`
}
func (pi *projectInfo) ToProto() (*gerritpb.ProjectInfo, error) {
stateEnumVal := "PROJECT_STATE_" + pi.State
stateEnumNum, found := gerritpb.ProjectInfo_State_value[stateEnumVal]
if !found {
return nil, errors.Reason("no State enum value for %q", pi.State).Err()
}
projectName, err := url.QueryUnescape(pi.ID)
if err != nil {
return nil, errors.Annotate(err, "decoding name").Err()
}
absoluteRefs := make(map[string]string, len(pi.Branches))
for ref, sha1 := range pi.Branches {
absoluteRefs[branchToRef(ref)] = sha1
}
return &gerritpb.ProjectInfo{
Name: projectName,
Parent: pi.Parent,
Description: pi.Description,
State: gerritpb.ProjectInfo_State(stateEnumNum),
Refs: absoluteRefs,
WebLinks: pi.WebLinks,
}, nil
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#submit-info
type submitInfo struct {
Status string `json:"status"`
}
func (si *submitInfo) ToProto() *gerritpb.SubmitInfo {
return &gerritpb.SubmitInfo{
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[si.Status]),
}
}
type submitRequirementResultInfo struct {
Name string `json:"name"`
Description string `json:"description"`
Status string `json:"status"`
IsLegacy bool `json:"is_legacy"`
ApplicabilityExpressionResult *submitRequirementExpressionInfo `json:"applicability_expression_result"`
SubmittabilityExpressionResult *submitRequirementExpressionInfo `json:"submittability_expression_result"`
OverrideExpressionResult *submitRequirementExpressionInfo `json:"override_expression_result"`
}
func (ri *submitRequirementResultInfo) ToProto() (*gerritpb.SubmitRequirementResultInfo, error) {
numVal, found := gerritpb.SubmitRequirementResultInfo_Status_value[ri.Status]
if !found {
return nil, errors.Reason("no Status enum value for %q", ri.Status).Err()
}
return &gerritpb.SubmitRequirementResultInfo{
Name: ri.Name,
Description: ri.Description,
Status: gerritpb.SubmitRequirementResultInfo_Status(numVal),
IsLegacy: ri.IsLegacy,
ApplicabilityExpressionResult: ri.ApplicabilityExpressionResult.ToProto(),
SubmittabilityExpressionResult: ri.SubmittabilityExpressionResult.ToProto(),
OverrideExpressionResult: ri.OverrideExpressionResult.ToProto(),
}, nil
}
type submitRequirementExpressionInfo struct {
Expression string `json:"expression"`
Fulfilled bool `json:"fulfilled"`
PassingAtoms []string `json:"passing_atoms"`
FailingAtoms []string `json:"failing_atoms"`
ErrorMessage string `json:"error_message"`
}
func (ei *submitRequirementExpressionInfo) ToProto() *gerritpb.SubmitRequirementExpressionInfo {
if ei == nil {
return nil
}
return &gerritpb.SubmitRequirementExpressionInfo{
Expression: ei.Expression,
Fulfilled: ei.Fulfilled,
PassingAtoms: ei.PassingAtoms,
FailingAtoms: ei.FailingAtoms,
ErrorMessage: ei.ErrorMessage,
}
}
type metaDiff struct {
Added *changeInfo `json:"added"`
Removed *changeInfo `json:"removed"`
OldChangeInfo *changeInfo `json:"old_change_info"`
NewChangeInfo *changeInfo `json:"new_change_info"`
}
func (md *metaDiff) ToProto() (*gerritpb.MetaDiff, error) {
var resp gerritpb.MetaDiff
var ci *gerritpb.ChangeInfo
var err error
if ci, err = md.Added.ToProto(); err != nil {
return nil, err
}
resp.Added = ci
if ci, err = md.Removed.ToProto(); err != nil {
return nil, err
}
resp.Removed = ci
if ci, err = md.OldChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.OldChangeInfo = ci
if ci, err = md.NewChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.NewChangeInfo = ci
return &resp, nil
}
| {
if cmi == nil {
return nil
}
return &gerritpb.ChangeMessageInfo{
Id: cmi.ID,
Author: cmi.Author.ToProto(),
RealAuthor: cmi.RealAuthor.ToProto(),
Date: timestamppb.New(cmi.Date.Time),
Message: cmi.Message,
Tag: cmi.Tag,
}
} | identifier_body |
json.go | // Copyright 2020 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gerrit
import (
"fmt"
"net/url"
"sort"
"strconv"
"strings"
"google.golang.org/protobuf/types/known/timestamppb"
"go.chromium.org/luci/common/errors"
gerritpb "go.chromium.org/luci/common/proto/gerrit"
)
// This file contains code related to JSON representations of messages that are
// used for requests to the Gerrit REST API, and unmarshalling code to convert
// from the JSON representations to protos defined in `gerritpb`.
//
// Each of these structs corresponds to an entity described at
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#json-entities
// and also to a message in `gerritpb`, and each has a unmarshalling method
// called ToProto.
// timestamp implements customized JSON marshal/unmarshal behavior that matches
// the timestamp format used in Gerrit.
type accountInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (a *accountInfo) ToProto() *gerritpb.AccountInfo {
if a == nil {
return nil
}
return &gerritpb.AccountInfo{
Name: a.Name,
Email: a.Email,
SecondaryEmails: a.SecondaryEmails,
Username: a.Username,
AccountId: a.AccountID,
}
}
type ownerInfo struct {
Account accountInfo `json:"account,omitempty"`
}
// changeInfo represents JSON for a gerritpb.ChangeInfo on the wire.
type changeInfo struct {
Number int64 `json:"_number"`
Owner *accountInfo `json:"owner"`
Project string `json:"project"`
Branch string `json:"branch"`
ChangeID string `json:"change_id"`
Reviewers map[string][]*accountInfo `json:"reviewers"`
Hashtags []string `json:"hashtags"`
Subject string `json:"subject"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
CurrentRevision string `json:"current_revision"`
Revisions map[string]*revisionInfo `json:"revisions"`
Labels map[string]*labelInfo `json:"labels"`
Messages []changeMessageInfo `json:"messages"`
Requirements []requirement `json:"requirements"`
SubmitRequirements []*submitRequirementResultInfo `json:"submit_requirements"`
Created Timestamp `json:"created"`
Updated Timestamp `json:"updated"`
Submitted Timestamp `json:"submitted"`
Submittable bool `json:"submittable,omitempty"`
IsPrivate bool `json:"is_private,omitempty"`
MetaRevID string `json:"meta_rev_id,omitempty"`
RevertOf int64 `json:"revert_of,omitempty"`
CherryPickOfChange int64 `json:"cherry_pick_of_change,omitempty"`
// MoreChanges may be set on the last change in a response to a query for
// changes, but this is not a property of the change itself and is not
// needed in gerritpb.ChangeInfo.
MoreChanges bool `json:"_more_changes"`
}
func (ci *changeInfo) ToProto() (*gerritpb.ChangeInfo, error) {
ret := &gerritpb.ChangeInfo{
Number: ci.Number,
Owner: ci.Owner.ToProto(),
Project: ci.Project,
Ref: branchToRef(ci.Branch),
Subject: ci.Subject,
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[ci.Status]),
Hashtags: ci.Hashtags,
CurrentRevision: ci.CurrentRevision,
Submittable: ci.Submittable,
IsPrivate: ci.IsPrivate,
MetaRevId: ci.MetaRevID,
Created: timestamppb.New(ci.Created.Time),
Updated: timestamppb.New(ci.Updated.Time),
Submitted: timestamppb.New(ci.Submitted.Time),
RevertOf: ci.RevertOf,
CherryPickOfChange: ci.CherryPickOfChange,
Branch: ci.Branch,
}
if ci.Revisions != nil {
ret.Revisions = make(map[string]*gerritpb.RevisionInfo, len(ci.Revisions))
for rev, info := range ci.Revisions {
ret.Revisions[rev] = info.ToProto()
}
}
if ci.Labels != nil {
ret.Labels = make(map[string]*gerritpb.LabelInfo, len(ci.Labels))
for label, info := range ci.Labels {
ret.Labels[label] = info.ToProto()
}
}
if ci.Messages != nil {
ret.Messages = make([]*gerritpb.ChangeMessageInfo, len(ci.Messages))
for i, msg := range ci.Messages {
ret.Messages[i] = msg.ToProto()
}
}
var err error
if ci.Requirements != nil {
ret.Requirements = make([]*gerritpb.Requirement, len(ci.Requirements))
for i, r := range ci.Requirements {
if ret.Requirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.SubmitRequirements != nil {
ret.SubmitRequirements = make([]*gerritpb.SubmitRequirementResultInfo,
len(ci.SubmitRequirements))
for i, r := range ci.SubmitRequirements {
if ret.SubmitRequirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.Reviewers != nil {
ret.Reviewers = &gerritpb.ReviewerStatusMap{}
if accs, exist := ci.Reviewers["REVIEWER"]; exist {
ret.Reviewers.Reviewers = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Reviewers[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["CC"]; exist {
ret.Reviewers.Ccs = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["REMOVED"]; exist {
ret.Reviewers.Removed = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
}
return ret, nil
}
type labelInfo struct {
Optional bool `json:"optional"`
Approved *accountInfo `json:"approved"`
Rejected *accountInfo `json:"rejected"`
Recommended *accountInfo `json:"recommended"`
Disliked *accountInfo `json:"disliked"`
Blocking bool `json:"blocking"`
Value int32 `json:"value"`
DefaultValue int32 `json:"default_value"`
All []*approvalInfo `json:"all"`
Values map[string]string `json:"values"`
}
func (li *labelInfo) ToProto() *gerritpb.LabelInfo {
ret := &gerritpb.LabelInfo{
Optional: li.Optional,
Approved: li.Approved.ToProto(),
Rejected: li.Rejected.ToProto(),
Recommended: li.Recommended.ToProto(),
Disliked: li.Disliked.ToProto(),
Blocking: li.Blocking,
Value: li.Value,
DefaultValue: li.DefaultValue,
}
if len(li.All) > 0 {
ret.All = make([]*gerritpb.ApprovalInfo, len(li.All))
for i, a := range li.All {
ret.All[i] = a.ToProto()
}
}
if li.Values != nil {
ret.Values = make(map[int32]string, len(li.Values))
for value, description := range li.Values {
i, err := strconv.ParseInt(strings.TrimSpace(value), 10, 32)
// Error is silently ignored for consistency with other parts of code.
if err == nil {
ret.Values[int32(i)] = description
}
}
}
return ret
}
type approvalInfo struct {
accountInfo
Value int32 `json:"value"`
PermittedVotingRange *gerritpb.VotingRangeInfo `json:"permitted_voting_range"`
Date Timestamp `json:"date"`
Tag string `json:"tag"`
PostSubmit bool `json:"post_submit"`
}
func (ai *approvalInfo) ToProto() *gerritpb.ApprovalInfo {
ret := &gerritpb.ApprovalInfo{
User: ai.accountInfo.ToProto(),
Value: ai.Value,
PermittedVotingRange: ai.PermittedVotingRange,
Date: timestamppb.New(ai.Date.Time),
Tag: ai.Tag,
PostSubmit: ai.PostSubmit,
}
return ret
}
type changeMessageInfo struct {
ID string `json:"id"`
Author *accountInfo `json:"author"`
RealAuthor *accountInfo `json:"real_author"`
Date Timestamp `json:"date"`
Message string `json:"message"`
Tag string `json:"tag"`
}
func (cmi *changeMessageInfo) ToProto() *gerritpb.ChangeMessageInfo {
if cmi == nil {
return nil
}
return &gerritpb.ChangeMessageInfo{
Id: cmi.ID,
Author: cmi.Author.ToProto(),
RealAuthor: cmi.RealAuthor.ToProto(),
Date: timestamppb.New(cmi.Date.Time),
Message: cmi.Message,
Tag: cmi.Tag,
}
}
type requirement struct {
Status string `json:"status"`
FallbackText string `json:"fallback_text"`
Type string `json:"type"`
}
func (r *requirement) ToProto() (*gerritpb.Requirement, error) {
stringVal := "REQUIREMENT_STATUS_" + r.Status
numVal, found := gerritpb.Requirement_Status_value[stringVal]
if !found {
return nil, errors.Reason("no Status enum value for %q", r.Status).Err()
}
return &gerritpb.Requirement{
Status: gerritpb.Requirement_Status(numVal),
FallbackText: r.FallbackText,
Type: r.Type,
}, nil
}
type fileInfo struct {
LinesInserted int32 `json:"lines_inserted"`
LinesDeleted int32 `json:"lines_deleted"`
SizeDelta int64 `json:"size_delta"`
Size int64 `json:"size"`
}
func (fi *fileInfo) ToProto() *gerritpb.FileInfo {
return &gerritpb.FileInfo{
LinesInserted: fi.LinesInserted,
LinesDeleted: fi.LinesDeleted,
SizeDelta: fi.SizeDelta,
Size: fi.Size,
}
}
type revisionInfo struct {
Kind string `json:"kind"`
Number int `json:"_number"`
Uploader *accountInfo `json:"uploader"`
Ref string `json:"ref"`
Created Timestamp `json:"created"`
Description string `json:"description"`
Files map[string]*fileInfo `json:"files"`
Commit *commitInfo `json:"commit"`
}
func (ri *revisionInfo) ToProto() *gerritpb.RevisionInfo {
ret := &gerritpb.RevisionInfo{
Number: int32(ri.Number),
Uploader: ri.Uploader.ToProto(),
Ref: ri.Ref,
Created: timestamppb.New(ri.Created.Time),
Description: ri.Description,
} | if ri.Files != nil {
ret.Files = make(map[string]*gerritpb.FileInfo, len(ri.Files))
for i, fi := range ri.Files {
ret.Files[i] = fi.ToProto()
}
}
if ri.Commit != nil {
ret.Commit = ri.Commit.ToProto()
}
return ret
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#git-person-info
type gitPersonInfo struct {
Name string `json:"name"`
Email string `json:"email"`
}
func (g *gitPersonInfo) ToProto() *gerritpb.GitPersonInfo {
return &gerritpb.GitPersonInfo{
Name: g.Name,
Email: g.Email,
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#commit-info
type commitInfo struct {
Commit string `json:"commit"`
Parents []*commitInfo `json:"parents"`
Author *gitPersonInfo `json:"author"`
Committer *gitPersonInfo `json:"committer"`
Subject string `json:"subject"`
Message string `json:"message"`
}
func (c *commitInfo) ToProto() *gerritpb.CommitInfo {
parents := make([]*gerritpb.CommitInfo_Parent, len(c.Parents))
for i, p := range c.Parents {
parents[i] = &gerritpb.CommitInfo_Parent{Id: p.Commit}
}
return &gerritpb.CommitInfo{
Id: c.Commit,
Parents: parents,
Message: c.Message,
Author: c.Author.ToProto(),
// TODO(tandrii): support other fields once added.
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#related-change-and-commit-info
type relatedChangeAndCommitInfo struct {
Project string `json:"project"`
ChangeID string `json:"change_id"`
Commit commitInfo `json:"commit"`
Number int64 `json:"_change_number"`
Patchset int64 `json:"_revision_number"`
CurrentPatchset int64 `json:"_current_revision_number"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
}
func (r *relatedChangeAndCommitInfo) ToProto() *gerritpb.GetRelatedChangesResponse_ChangeAndCommit {
return &gerritpb.GetRelatedChangesResponse_ChangeAndCommit{
Project: r.Project,
Number: r.Number,
Patchset: r.Patchset,
CurrentPatchset: r.CurrentPatchset,
Commit: r.Commit.ToProto(),
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[r.Status]),
}
}
type mergeableInfo struct {
SubmitType string `json:"submit_type"`
Strategy string `json:"strategy"`
Mergeable bool `json:"mergeable"`
CommitMerged bool `json:"commit_merged"`
ContentMerged bool `json:"content_merged"`
Conflicts []string `json:"conflicts"`
MergeableInto []string `json:"mergeable_into"`
}
func (mi *mergeableInfo) ToProto() (*gerritpb.MergeableInfo, error) {
// Convert something like 'simple-two-way-in-core' to 'SIMPLE_TWO_WAY_IN_CORE'.
strategyEnumName := strings.Replace(strings.ToUpper(mi.Strategy), "-", "_", -1)
strategyEnumNum, found := gerritpb.MergeableStrategy_value[strategyEnumName]
if !found {
return nil, errors.Reason("no MergeableStrategy enum value for %q", strategyEnumName).Err()
}
submitTypeEnumNum, found := gerritpb.MergeableInfo_SubmitType_value[mi.SubmitType]
if !found {
return nil, errors.Reason("no SubmitType enum value for %q", mi.SubmitType).Err()
}
return &gerritpb.MergeableInfo{
SubmitType: gerritpb.MergeableInfo_SubmitType(submitTypeEnumNum),
Strategy: gerritpb.MergeableStrategy(strategyEnumNum),
Mergeable: mi.Mergeable,
CommitMerged: mi.CommitMerged,
ContentMerged: mi.ContentMerged,
Conflicts: mi.Conflicts,
MergeableInto: mi.MergeableInto,
}, nil
}
type addReviewerRequest struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
Confirmed bool `json:"confirmed,omitempty"`
Notify string `json:"notify,omitempty"`
}
type reviewerInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
Approvals map[string]string `json:"approvals,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (ri *reviewerInfo) ToProtoReviewerInfo() (*gerritpb.ReviewerInfo, error) {
approvals := make(map[string]int32, 0)
for label, score := range ri.Approvals {
score = strings.TrimLeft(score, " ")
scoreInt, err := strconv.ParseInt(score, 10, 32)
if err != nil {
return nil, errors.Annotate(err, "parsing approvals").Err()
}
approvals[label] = int32(scoreInt)
}
return &gerritpb.ReviewerInfo{
Account: &gerritpb.AccountInfo{
Name: ri.Name,
Email: ri.Email,
SecondaryEmails: ri.SecondaryEmails,
Username: ri.Username,
AccountId: ri.AccountID,
},
Approvals: approvals,
}, nil
}
type addReviewerResult struct {
Input string `json:"input"`
Reviewers []reviewerInfo `json:"reviewers,omitempty"`
Ccs []reviewerInfo `json:"ccs,omitempty"`
Error string `json:"error,omitempty"`
Confirm bool `json:"confirm,omitempty"`
}
func (rr *addReviewerResult) ToProto() (*gerritpb.AddReviewerResult, error) {
reviewers := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Reviewers {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
reviewers = append(reviewers, rInfo)
}
ccs := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Ccs {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
ccs = append(ccs, rInfo)
}
return &gerritpb.AddReviewerResult{
Input: rr.Input,
Reviewers: reviewers,
Ccs: ccs,
Error: rr.Error,
Confirm: rr.Confirm,
}, nil
}
func enumToString(v int32, m map[int32]string) string {
if v == 0 {
return ""
}
prefixLen := strings.LastIndex(m[0], "UNSPECIFIED")
return m[v][prefixLen:]
}
type reviewInput struct {
Message string `json:"message,omitempty"`
Labels map[string]int32 `json:"labels,omitempty"`
Tag string `json:"tag,omitempty"`
Notify string `json:"notify,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
OnBehalfOf int64 `json:"on_behalf_of,omitempty"`
Ready bool `json:"ready,omitempty"`
WorkInProgress bool `json:"work_in_progress,omitempty"`
AddToAttentionSet []*attentionSetInput `json:"add_to_attention_set,omitempty"`
RemoveFromAttentionSet []*attentionSetInput `json:"remove_from_attention_set,omitempty"`
IgnoreAutomaticAttentionSetRules bool `json:"ignore_automatic_attention_set_rules,omitempty"`
Reviewers []*reviewerInput `json:"reviewers,omitempty"`
}
type notifyInfo struct {
Accounts []int64 `json:"accounts,omitempty"`
}
type notifyDetails map[string]*notifyInfo
func toNotifyDetails(in *gerritpb.NotifyDetails) notifyDetails {
recipients := in.GetRecipients()
if len(recipients) == 0 {
return nil
}
res := make(map[string]*notifyInfo, len(recipients))
for _, recipient := range recipients {
if len(recipient.Info.GetAccounts()) == 0 {
continue
}
rt := recipient.RecipientType
if rt == gerritpb.NotifyDetails_RECIPIENT_TYPE_UNSPECIFIED {
// Must have been caught in validation.
panic(fmt.Errorf("must specify recipient type"))
}
rts := enumToString(int32(rt.Number()), gerritpb.NotifyDetails_RecipientType_name)
if ni, ok := res[rts]; !ok {
ni = ¬ifyInfo{
Accounts: make([]int64, len(recipient.Info.GetAccounts())),
}
for i, aid := range recipient.Info.GetAccounts() {
ni.Accounts[i] = aid
}
res[rts] = ni
} else {
ni.Accounts = append(ni.Accounts, recipient.Info.GetAccounts()...)
}
}
for _, ni := range res {
// Sort & dedup accounts in each notification bucket.
sort.Slice(ni.Accounts, func(i, j int) bool { return ni.Accounts[i] < ni.Accounts[j] })
n := 0
for i := 1; i < len(ni.Accounts); i++ {
if ni.Accounts[n] == ni.Accounts[i] {
continue
}
n++
ni.Accounts[n] = ni.Accounts[i]
}
ni.Accounts = ni.Accounts[:n+1]
}
return res
}
type attentionSetInput struct {
User string `json:"user"`
Reason string `json:"reason"`
Notify string `json:"string,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
}
func toAttentionSetInput(in *gerritpb.AttentionSetInput) *attentionSetInput {
return &attentionSetInput{
User: in.User,
Reason: in.Reason,
Notify: enumToString(int32(in.Notify.Number()), gerritpb.Notify_name),
NotifyDetails: toNotifyDetails(in.NotifyDetails),
}
}
func toAttentionSetInputs(in []*gerritpb.AttentionSetInput) []*attentionSetInput {
if len(in) == 0 {
return nil
}
out := make([]*attentionSetInput, len(in))
for i, x := range in {
out[i] = toAttentionSetInput(x)
}
return out
}
type reviewerInput struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
}
func toReviewerInputs(in []*gerritpb.ReviewerInput) []*reviewerInput {
if len(in) == 0 {
return nil
}
out := make([]*reviewerInput, len(in))
for i, x := range in {
out[i] = &reviewerInput{
Reviewer: x.Reviewer,
State: enumToString(int32(x.State.Number()), gerritpb.ReviewerInput_State_name),
}
}
return out
}
type reviewResult struct {
Labels map[string]int32 `json:"labels,omitempty"`
Reviewers map[string]*addReviewerResult `json:"reviewers,omitempty"`
}
func (rr *reviewResult) ToProto() (*gerritpb.ReviewResult, error) {
result := &gerritpb.ReviewResult{
Labels: rr.Labels,
}
if len(rr.Reviewers) == 0 {
return result, nil
}
reviewers := make(map[string]*gerritpb.AddReviewerResult, len(rr.Reviewers))
for i, x := range rr.Reviewers {
reviewerDetails, err := x.ToProto()
if err != nil {
return nil, err
}
reviewers[i] = reviewerDetails
}
result.Reviewers = reviewers
return result, nil
}
type projectInfo struct {
ID string `json:"id,omitempty"`
Parent string `json:"parent,omitempty"`
Description string `json:"description,omitempty"`
State string `json:"state,omitempty"`
Branches map[string]string `json:"branches,omitempty"`
WebLinks []*gerritpb.WebLinkInfo `json:"web_links,omitempty"`
}
func (pi *projectInfo) ToProto() (*gerritpb.ProjectInfo, error) {
stateEnumVal := "PROJECT_STATE_" + pi.State
stateEnumNum, found := gerritpb.ProjectInfo_State_value[stateEnumVal]
if !found {
return nil, errors.Reason("no State enum value for %q", pi.State).Err()
}
projectName, err := url.QueryUnescape(pi.ID)
if err != nil {
return nil, errors.Annotate(err, "decoding name").Err()
}
absoluteRefs := make(map[string]string, len(pi.Branches))
for ref, sha1 := range pi.Branches {
absoluteRefs[branchToRef(ref)] = sha1
}
return &gerritpb.ProjectInfo{
Name: projectName,
Parent: pi.Parent,
Description: pi.Description,
State: gerritpb.ProjectInfo_State(stateEnumNum),
Refs: absoluteRefs,
WebLinks: pi.WebLinks,
}, nil
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#submit-info
type submitInfo struct {
Status string `json:"status"`
}
func (si *submitInfo) ToProto() *gerritpb.SubmitInfo {
return &gerritpb.SubmitInfo{
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[si.Status]),
}
}
type submitRequirementResultInfo struct {
Name string `json:"name"`
Description string `json:"description"`
Status string `json:"status"`
IsLegacy bool `json:"is_legacy"`
ApplicabilityExpressionResult *submitRequirementExpressionInfo `json:"applicability_expression_result"`
SubmittabilityExpressionResult *submitRequirementExpressionInfo `json:"submittability_expression_result"`
OverrideExpressionResult *submitRequirementExpressionInfo `json:"override_expression_result"`
}
func (ri *submitRequirementResultInfo) ToProto() (*gerritpb.SubmitRequirementResultInfo, error) {
numVal, found := gerritpb.SubmitRequirementResultInfo_Status_value[ri.Status]
if !found {
return nil, errors.Reason("no Status enum value for %q", ri.Status).Err()
}
return &gerritpb.SubmitRequirementResultInfo{
Name: ri.Name,
Description: ri.Description,
Status: gerritpb.SubmitRequirementResultInfo_Status(numVal),
IsLegacy: ri.IsLegacy,
ApplicabilityExpressionResult: ri.ApplicabilityExpressionResult.ToProto(),
SubmittabilityExpressionResult: ri.SubmittabilityExpressionResult.ToProto(),
OverrideExpressionResult: ri.OverrideExpressionResult.ToProto(),
}, nil
}
type submitRequirementExpressionInfo struct {
Expression string `json:"expression"`
Fulfilled bool `json:"fulfilled"`
PassingAtoms []string `json:"passing_atoms"`
FailingAtoms []string `json:"failing_atoms"`
ErrorMessage string `json:"error_message"`
}
func (ei *submitRequirementExpressionInfo) ToProto() *gerritpb.SubmitRequirementExpressionInfo {
if ei == nil {
return nil
}
return &gerritpb.SubmitRequirementExpressionInfo{
Expression: ei.Expression,
Fulfilled: ei.Fulfilled,
PassingAtoms: ei.PassingAtoms,
FailingAtoms: ei.FailingAtoms,
ErrorMessage: ei.ErrorMessage,
}
}
type metaDiff struct {
Added *changeInfo `json:"added"`
Removed *changeInfo `json:"removed"`
OldChangeInfo *changeInfo `json:"old_change_info"`
NewChangeInfo *changeInfo `json:"new_change_info"`
}
func (md *metaDiff) ToProto() (*gerritpb.MetaDiff, error) {
var resp gerritpb.MetaDiff
var ci *gerritpb.ChangeInfo
var err error
if ci, err = md.Added.ToProto(); err != nil {
return nil, err
}
resp.Added = ci
if ci, err = md.Removed.ToProto(); err != nil {
return nil, err
}
resp.Removed = ci
if ci, err = md.OldChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.OldChangeInfo = ci
if ci, err = md.NewChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.NewChangeInfo = ci
return &resp, nil
} | if v, ok := gerritpb.RevisionInfo_Kind_value[ri.Kind]; ok {
ret.Kind = gerritpb.RevisionInfo_Kind(v)
} | random_line_split |
json.go | // Copyright 2020 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gerrit
import (
"fmt"
"net/url"
"sort"
"strconv"
"strings"
"google.golang.org/protobuf/types/known/timestamppb"
"go.chromium.org/luci/common/errors"
gerritpb "go.chromium.org/luci/common/proto/gerrit"
)
// This file contains code related to JSON representations of messages that are
// used for requests to the Gerrit REST API, and unmarshalling code to convert
// from the JSON representations to protos defined in `gerritpb`.
//
// Each of these structs corresponds to an entity described at
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#json-entities
// and also to a message in `gerritpb`, and each has a unmarshalling method
// called ToProto.
// timestamp implements customized JSON marshal/unmarshal behavior that matches
// the timestamp format used in Gerrit.
type accountInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (a *accountInfo) ToProto() *gerritpb.AccountInfo {
if a == nil {
return nil
}
return &gerritpb.AccountInfo{
Name: a.Name,
Email: a.Email,
SecondaryEmails: a.SecondaryEmails,
Username: a.Username,
AccountId: a.AccountID,
}
}
type ownerInfo struct {
Account accountInfo `json:"account,omitempty"`
}
// changeInfo represents JSON for a gerritpb.ChangeInfo on the wire.
type changeInfo struct {
Number int64 `json:"_number"`
Owner *accountInfo `json:"owner"`
Project string `json:"project"`
Branch string `json:"branch"`
ChangeID string `json:"change_id"`
Reviewers map[string][]*accountInfo `json:"reviewers"`
Hashtags []string `json:"hashtags"`
Subject string `json:"subject"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
CurrentRevision string `json:"current_revision"`
Revisions map[string]*revisionInfo `json:"revisions"`
Labels map[string]*labelInfo `json:"labels"`
Messages []changeMessageInfo `json:"messages"`
Requirements []requirement `json:"requirements"`
SubmitRequirements []*submitRequirementResultInfo `json:"submit_requirements"`
Created Timestamp `json:"created"`
Updated Timestamp `json:"updated"`
Submitted Timestamp `json:"submitted"`
Submittable bool `json:"submittable,omitempty"`
IsPrivate bool `json:"is_private,omitempty"`
MetaRevID string `json:"meta_rev_id,omitempty"`
RevertOf int64 `json:"revert_of,omitempty"`
CherryPickOfChange int64 `json:"cherry_pick_of_change,omitempty"`
// MoreChanges may be set on the last change in a response to a query for
// changes, but this is not a property of the change itself and is not
// needed in gerritpb.ChangeInfo.
MoreChanges bool `json:"_more_changes"`
}
func (ci *changeInfo) ToProto() (*gerritpb.ChangeInfo, error) {
ret := &gerritpb.ChangeInfo{
Number: ci.Number,
Owner: ci.Owner.ToProto(),
Project: ci.Project,
Ref: branchToRef(ci.Branch),
Subject: ci.Subject,
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[ci.Status]),
Hashtags: ci.Hashtags,
CurrentRevision: ci.CurrentRevision,
Submittable: ci.Submittable,
IsPrivate: ci.IsPrivate,
MetaRevId: ci.MetaRevID,
Created: timestamppb.New(ci.Created.Time),
Updated: timestamppb.New(ci.Updated.Time),
Submitted: timestamppb.New(ci.Submitted.Time),
RevertOf: ci.RevertOf,
CherryPickOfChange: ci.CherryPickOfChange,
Branch: ci.Branch,
}
if ci.Revisions != nil {
ret.Revisions = make(map[string]*gerritpb.RevisionInfo, len(ci.Revisions))
for rev, info := range ci.Revisions {
ret.Revisions[rev] = info.ToProto()
}
}
if ci.Labels != nil {
ret.Labels = make(map[string]*gerritpb.LabelInfo, len(ci.Labels))
for label, info := range ci.Labels {
ret.Labels[label] = info.ToProto()
}
}
if ci.Messages != nil {
ret.Messages = make([]*gerritpb.ChangeMessageInfo, len(ci.Messages))
for i, msg := range ci.Messages {
ret.Messages[i] = msg.ToProto()
}
}
var err error
if ci.Requirements != nil {
ret.Requirements = make([]*gerritpb.Requirement, len(ci.Requirements))
for i, r := range ci.Requirements {
if ret.Requirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.SubmitRequirements != nil {
ret.SubmitRequirements = make([]*gerritpb.SubmitRequirementResultInfo,
len(ci.SubmitRequirements))
for i, r := range ci.SubmitRequirements {
if ret.SubmitRequirements[i], err = r.ToProto(); err != nil {
return nil, err
}
}
}
if ci.Reviewers != nil {
ret.Reviewers = &gerritpb.ReviewerStatusMap{}
if accs, exist := ci.Reviewers["REVIEWER"]; exist {
ret.Reviewers.Reviewers = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Reviewers[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["CC"]; exist {
ret.Reviewers.Ccs = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
if accs, exist := ci.Reviewers["REMOVED"]; exist {
ret.Reviewers.Removed = make([]*gerritpb.AccountInfo, len(accs))
for i, acc := range accs {
ret.Reviewers.Ccs[i] = acc.ToProto()
}
}
}
return ret, nil
}
type labelInfo struct {
Optional bool `json:"optional"`
Approved *accountInfo `json:"approved"`
Rejected *accountInfo `json:"rejected"`
Recommended *accountInfo `json:"recommended"`
Disliked *accountInfo `json:"disliked"`
Blocking bool `json:"blocking"`
Value int32 `json:"value"`
DefaultValue int32 `json:"default_value"`
All []*approvalInfo `json:"all"`
Values map[string]string `json:"values"`
}
func (li *labelInfo) ToProto() *gerritpb.LabelInfo {
ret := &gerritpb.LabelInfo{
Optional: li.Optional,
Approved: li.Approved.ToProto(),
Rejected: li.Rejected.ToProto(),
Recommended: li.Recommended.ToProto(),
Disliked: li.Disliked.ToProto(),
Blocking: li.Blocking,
Value: li.Value,
DefaultValue: li.DefaultValue,
}
if len(li.All) > 0 {
ret.All = make([]*gerritpb.ApprovalInfo, len(li.All))
for i, a := range li.All {
ret.All[i] = a.ToProto()
}
}
if li.Values != nil {
ret.Values = make(map[int32]string, len(li.Values))
for value, description := range li.Values {
i, err := strconv.ParseInt(strings.TrimSpace(value), 10, 32)
// Error is silently ignored for consistency with other parts of code.
if err == nil {
ret.Values[int32(i)] = description
}
}
}
return ret
}
type approvalInfo struct {
accountInfo
Value int32 `json:"value"`
PermittedVotingRange *gerritpb.VotingRangeInfo `json:"permitted_voting_range"`
Date Timestamp `json:"date"`
Tag string `json:"tag"`
PostSubmit bool `json:"post_submit"`
}
func (ai *approvalInfo) ToProto() *gerritpb.ApprovalInfo {
ret := &gerritpb.ApprovalInfo{
User: ai.accountInfo.ToProto(),
Value: ai.Value,
PermittedVotingRange: ai.PermittedVotingRange,
Date: timestamppb.New(ai.Date.Time),
Tag: ai.Tag,
PostSubmit: ai.PostSubmit,
}
return ret
}
type changeMessageInfo struct {
ID string `json:"id"`
Author *accountInfo `json:"author"`
RealAuthor *accountInfo `json:"real_author"`
Date Timestamp `json:"date"`
Message string `json:"message"`
Tag string `json:"tag"`
}
func (cmi *changeMessageInfo) ToProto() *gerritpb.ChangeMessageInfo {
if cmi == nil {
return nil
}
return &gerritpb.ChangeMessageInfo{
Id: cmi.ID,
Author: cmi.Author.ToProto(),
RealAuthor: cmi.RealAuthor.ToProto(),
Date: timestamppb.New(cmi.Date.Time),
Message: cmi.Message,
Tag: cmi.Tag,
}
}
type requirement struct {
Status string `json:"status"`
FallbackText string `json:"fallback_text"`
Type string `json:"type"`
}
func (r *requirement) ToProto() (*gerritpb.Requirement, error) {
stringVal := "REQUIREMENT_STATUS_" + r.Status
numVal, found := gerritpb.Requirement_Status_value[stringVal]
if !found {
return nil, errors.Reason("no Status enum value for %q", r.Status).Err()
}
return &gerritpb.Requirement{
Status: gerritpb.Requirement_Status(numVal),
FallbackText: r.FallbackText,
Type: r.Type,
}, nil
}
type fileInfo struct {
LinesInserted int32 `json:"lines_inserted"`
LinesDeleted int32 `json:"lines_deleted"`
SizeDelta int64 `json:"size_delta"`
Size int64 `json:"size"`
}
func (fi *fileInfo) ToProto() *gerritpb.FileInfo {
return &gerritpb.FileInfo{
LinesInserted: fi.LinesInserted,
LinesDeleted: fi.LinesDeleted,
SizeDelta: fi.SizeDelta,
Size: fi.Size,
}
}
type revisionInfo struct {
Kind string `json:"kind"`
Number int `json:"_number"`
Uploader *accountInfo `json:"uploader"`
Ref string `json:"ref"`
Created Timestamp `json:"created"`
Description string `json:"description"`
Files map[string]*fileInfo `json:"files"`
Commit *commitInfo `json:"commit"`
}
func (ri *revisionInfo) ToProto() *gerritpb.RevisionInfo {
ret := &gerritpb.RevisionInfo{
Number: int32(ri.Number),
Uploader: ri.Uploader.ToProto(),
Ref: ri.Ref,
Created: timestamppb.New(ri.Created.Time),
Description: ri.Description,
}
if v, ok := gerritpb.RevisionInfo_Kind_value[ri.Kind]; ok {
ret.Kind = gerritpb.RevisionInfo_Kind(v)
}
if ri.Files != nil {
ret.Files = make(map[string]*gerritpb.FileInfo, len(ri.Files))
for i, fi := range ri.Files {
ret.Files[i] = fi.ToProto()
}
}
if ri.Commit != nil {
ret.Commit = ri.Commit.ToProto()
}
return ret
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#git-person-info
type gitPersonInfo struct {
Name string `json:"name"`
Email string `json:"email"`
}
func (g *gitPersonInfo) ToProto() *gerritpb.GitPersonInfo {
return &gerritpb.GitPersonInfo{
Name: g.Name,
Email: g.Email,
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#commit-info
type commitInfo struct {
Commit string `json:"commit"`
Parents []*commitInfo `json:"parents"`
Author *gitPersonInfo `json:"author"`
Committer *gitPersonInfo `json:"committer"`
Subject string `json:"subject"`
Message string `json:"message"`
}
func (c *commitInfo) ToProto() *gerritpb.CommitInfo {
parents := make([]*gerritpb.CommitInfo_Parent, len(c.Parents))
for i, p := range c.Parents {
parents[i] = &gerritpb.CommitInfo_Parent{Id: p.Commit}
}
return &gerritpb.CommitInfo{
Id: c.Commit,
Parents: parents,
Message: c.Message,
Author: c.Author.ToProto(),
// TODO(tandrii): support other fields once added.
}
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#related-change-and-commit-info
type relatedChangeAndCommitInfo struct {
Project string `json:"project"`
ChangeID string `json:"change_id"`
Commit commitInfo `json:"commit"`
Number int64 `json:"_change_number"`
Patchset int64 `json:"_revision_number"`
CurrentPatchset int64 `json:"_current_revision_number"`
// json.Unmarshal cannot convert enum string to value,
// so this field is handled specially in ToProto.
Status string `json:"status"`
}
func (r *relatedChangeAndCommitInfo) ToProto() *gerritpb.GetRelatedChangesResponse_ChangeAndCommit {
return &gerritpb.GetRelatedChangesResponse_ChangeAndCommit{
Project: r.Project,
Number: r.Number,
Patchset: r.Patchset,
CurrentPatchset: r.CurrentPatchset,
Commit: r.Commit.ToProto(),
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[r.Status]),
}
}
type mergeableInfo struct {
SubmitType string `json:"submit_type"`
Strategy string `json:"strategy"`
Mergeable bool `json:"mergeable"`
CommitMerged bool `json:"commit_merged"`
ContentMerged bool `json:"content_merged"`
Conflicts []string `json:"conflicts"`
MergeableInto []string `json:"mergeable_into"`
}
func (mi *mergeableInfo) ToProto() (*gerritpb.MergeableInfo, error) {
// Convert something like 'simple-two-way-in-core' to 'SIMPLE_TWO_WAY_IN_CORE'.
strategyEnumName := strings.Replace(strings.ToUpper(mi.Strategy), "-", "_", -1)
strategyEnumNum, found := gerritpb.MergeableStrategy_value[strategyEnumName]
if !found {
return nil, errors.Reason("no MergeableStrategy enum value for %q", strategyEnumName).Err()
}
submitTypeEnumNum, found := gerritpb.MergeableInfo_SubmitType_value[mi.SubmitType]
if !found {
return nil, errors.Reason("no SubmitType enum value for %q", mi.SubmitType).Err()
}
return &gerritpb.MergeableInfo{
SubmitType: gerritpb.MergeableInfo_SubmitType(submitTypeEnumNum),
Strategy: gerritpb.MergeableStrategy(strategyEnumNum),
Mergeable: mi.Mergeable,
CommitMerged: mi.CommitMerged,
ContentMerged: mi.ContentMerged,
Conflicts: mi.Conflicts,
MergeableInto: mi.MergeableInto,
}, nil
}
type addReviewerRequest struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
Confirmed bool `json:"confirmed,omitempty"`
Notify string `json:"notify,omitempty"`
}
type reviewerInfo struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
SecondaryEmails []string `json:"secondary_emails,omitempty"`
Username string `json:"username,omitempty"`
Approvals map[string]string `json:"approvals,omitempty"`
AccountID int64 `json:"_account_id,omitempty"`
}
func (ri *reviewerInfo) ToProtoReviewerInfo() (*gerritpb.ReviewerInfo, error) {
approvals := make(map[string]int32, 0)
for label, score := range ri.Approvals {
score = strings.TrimLeft(score, " ")
scoreInt, err := strconv.ParseInt(score, 10, 32)
if err != nil {
return nil, errors.Annotate(err, "parsing approvals").Err()
}
approvals[label] = int32(scoreInt)
}
return &gerritpb.ReviewerInfo{
Account: &gerritpb.AccountInfo{
Name: ri.Name,
Email: ri.Email,
SecondaryEmails: ri.SecondaryEmails,
Username: ri.Username,
AccountId: ri.AccountID,
},
Approvals: approvals,
}, nil
}
type addReviewerResult struct {
Input string `json:"input"`
Reviewers []reviewerInfo `json:"reviewers,omitempty"`
Ccs []reviewerInfo `json:"ccs,omitempty"`
Error string `json:"error,omitempty"`
Confirm bool `json:"confirm,omitempty"`
}
func (rr *addReviewerResult) ToProto() (*gerritpb.AddReviewerResult, error) {
reviewers := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Reviewers {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
reviewers = append(reviewers, rInfo)
}
ccs := make([]*gerritpb.ReviewerInfo, 0)
for _, r := range rr.Ccs {
rInfo, err := r.ToProtoReviewerInfo()
if err != nil {
return nil, errors.Annotate(err, "converting reviewerInfo").Err()
}
ccs = append(ccs, rInfo)
}
return &gerritpb.AddReviewerResult{
Input: rr.Input,
Reviewers: reviewers,
Ccs: ccs,
Error: rr.Error,
Confirm: rr.Confirm,
}, nil
}
func enumToString(v int32, m map[int32]string) string {
if v == 0 {
return ""
}
prefixLen := strings.LastIndex(m[0], "UNSPECIFIED")
return m[v][prefixLen:]
}
type reviewInput struct {
Message string `json:"message,omitempty"`
Labels map[string]int32 `json:"labels,omitempty"`
Tag string `json:"tag,omitempty"`
Notify string `json:"notify,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
OnBehalfOf int64 `json:"on_behalf_of,omitempty"`
Ready bool `json:"ready,omitempty"`
WorkInProgress bool `json:"work_in_progress,omitempty"`
AddToAttentionSet []*attentionSetInput `json:"add_to_attention_set,omitempty"`
RemoveFromAttentionSet []*attentionSetInput `json:"remove_from_attention_set,omitempty"`
IgnoreAutomaticAttentionSetRules bool `json:"ignore_automatic_attention_set_rules,omitempty"`
Reviewers []*reviewerInput `json:"reviewers,omitempty"`
}
type notifyInfo struct {
Accounts []int64 `json:"accounts,omitempty"`
}
type notifyDetails map[string]*notifyInfo
func toNotifyDetails(in *gerritpb.NotifyDetails) notifyDetails {
recipients := in.GetRecipients()
if len(recipients) == 0 {
return nil
}
res := make(map[string]*notifyInfo, len(recipients))
for _, recipient := range recipients {
if len(recipient.Info.GetAccounts()) == 0 {
continue
}
rt := recipient.RecipientType
if rt == gerritpb.NotifyDetails_RECIPIENT_TYPE_UNSPECIFIED {
// Must have been caught in validation.
panic(fmt.Errorf("must specify recipient type"))
}
rts := enumToString(int32(rt.Number()), gerritpb.NotifyDetails_RecipientType_name)
if ni, ok := res[rts]; !ok {
ni = ¬ifyInfo{
Accounts: make([]int64, len(recipient.Info.GetAccounts())),
}
for i, aid := range recipient.Info.GetAccounts() {
ni.Accounts[i] = aid
}
res[rts] = ni
} else {
ni.Accounts = append(ni.Accounts, recipient.Info.GetAccounts()...)
}
}
for _, ni := range res {
// Sort & dedup accounts in each notification bucket.
sort.Slice(ni.Accounts, func(i, j int) bool { return ni.Accounts[i] < ni.Accounts[j] })
n := 0
for i := 1; i < len(ni.Accounts); i++ {
if ni.Accounts[n] == ni.Accounts[i] {
continue
}
n++
ni.Accounts[n] = ni.Accounts[i]
}
ni.Accounts = ni.Accounts[:n+1]
}
return res
}
type attentionSetInput struct {
User string `json:"user"`
Reason string `json:"reason"`
Notify string `json:"string,omitempty"`
NotifyDetails notifyDetails `json:"notify_details,omitempty"`
}
func toAttentionSetInput(in *gerritpb.AttentionSetInput) *attentionSetInput {
return &attentionSetInput{
User: in.User,
Reason: in.Reason,
Notify: enumToString(int32(in.Notify.Number()), gerritpb.Notify_name),
NotifyDetails: toNotifyDetails(in.NotifyDetails),
}
}
func toAttentionSetInputs(in []*gerritpb.AttentionSetInput) []*attentionSetInput {
if len(in) == 0 {
return nil
}
out := make([]*attentionSetInput, len(in))
for i, x := range in {
out[i] = toAttentionSetInput(x)
}
return out
}
type reviewerInput struct {
Reviewer string `json:"reviewer"`
State string `json:"state,omitempty"`
}
func toReviewerInputs(in []*gerritpb.ReviewerInput) []*reviewerInput {
if len(in) == 0 {
return nil
}
out := make([]*reviewerInput, len(in))
for i, x := range in {
out[i] = &reviewerInput{
Reviewer: x.Reviewer,
State: enumToString(int32(x.State.Number()), gerritpb.ReviewerInput_State_name),
}
}
return out
}
type reviewResult struct {
Labels map[string]int32 `json:"labels,omitempty"`
Reviewers map[string]*addReviewerResult `json:"reviewers,omitempty"`
}
func (rr *reviewResult) ToProto() (*gerritpb.ReviewResult, error) {
result := &gerritpb.ReviewResult{
Labels: rr.Labels,
}
if len(rr.Reviewers) == 0 {
return result, nil
}
reviewers := make(map[string]*gerritpb.AddReviewerResult, len(rr.Reviewers))
for i, x := range rr.Reviewers |
result.Reviewers = reviewers
return result, nil
}
type projectInfo struct {
ID string `json:"id,omitempty"`
Parent string `json:"parent,omitempty"`
Description string `json:"description,omitempty"`
State string `json:"state,omitempty"`
Branches map[string]string `json:"branches,omitempty"`
WebLinks []*gerritpb.WebLinkInfo `json:"web_links,omitempty"`
}
func (pi *projectInfo) ToProto() (*gerritpb.ProjectInfo, error) {
stateEnumVal := "PROJECT_STATE_" + pi.State
stateEnumNum, found := gerritpb.ProjectInfo_State_value[stateEnumVal]
if !found {
return nil, errors.Reason("no State enum value for %q", pi.State).Err()
}
projectName, err := url.QueryUnescape(pi.ID)
if err != nil {
return nil, errors.Annotate(err, "decoding name").Err()
}
absoluteRefs := make(map[string]string, len(pi.Branches))
for ref, sha1 := range pi.Branches {
absoluteRefs[branchToRef(ref)] = sha1
}
return &gerritpb.ProjectInfo{
Name: projectName,
Parent: pi.Parent,
Description: pi.Description,
State: gerritpb.ProjectInfo_State(stateEnumNum),
Refs: absoluteRefs,
WebLinks: pi.WebLinks,
}, nil
}
// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#submit-info
type submitInfo struct {
Status string `json:"status"`
}
func (si *submitInfo) ToProto() *gerritpb.SubmitInfo {
return &gerritpb.SubmitInfo{
Status: gerritpb.ChangeStatus(gerritpb.ChangeStatus_value[si.Status]),
}
}
type submitRequirementResultInfo struct {
Name string `json:"name"`
Description string `json:"description"`
Status string `json:"status"`
IsLegacy bool `json:"is_legacy"`
ApplicabilityExpressionResult *submitRequirementExpressionInfo `json:"applicability_expression_result"`
SubmittabilityExpressionResult *submitRequirementExpressionInfo `json:"submittability_expression_result"`
OverrideExpressionResult *submitRequirementExpressionInfo `json:"override_expression_result"`
}
func (ri *submitRequirementResultInfo) ToProto() (*gerritpb.SubmitRequirementResultInfo, error) {
numVal, found := gerritpb.SubmitRequirementResultInfo_Status_value[ri.Status]
if !found {
return nil, errors.Reason("no Status enum value for %q", ri.Status).Err()
}
return &gerritpb.SubmitRequirementResultInfo{
Name: ri.Name,
Description: ri.Description,
Status: gerritpb.SubmitRequirementResultInfo_Status(numVal),
IsLegacy: ri.IsLegacy,
ApplicabilityExpressionResult: ri.ApplicabilityExpressionResult.ToProto(),
SubmittabilityExpressionResult: ri.SubmittabilityExpressionResult.ToProto(),
OverrideExpressionResult: ri.OverrideExpressionResult.ToProto(),
}, nil
}
type submitRequirementExpressionInfo struct {
Expression string `json:"expression"`
Fulfilled bool `json:"fulfilled"`
PassingAtoms []string `json:"passing_atoms"`
FailingAtoms []string `json:"failing_atoms"`
ErrorMessage string `json:"error_message"`
}
func (ei *submitRequirementExpressionInfo) ToProto() *gerritpb.SubmitRequirementExpressionInfo {
if ei == nil {
return nil
}
return &gerritpb.SubmitRequirementExpressionInfo{
Expression: ei.Expression,
Fulfilled: ei.Fulfilled,
PassingAtoms: ei.PassingAtoms,
FailingAtoms: ei.FailingAtoms,
ErrorMessage: ei.ErrorMessage,
}
}
type metaDiff struct {
Added *changeInfo `json:"added"`
Removed *changeInfo `json:"removed"`
OldChangeInfo *changeInfo `json:"old_change_info"`
NewChangeInfo *changeInfo `json:"new_change_info"`
}
func (md *metaDiff) ToProto() (*gerritpb.MetaDiff, error) {
var resp gerritpb.MetaDiff
var ci *gerritpb.ChangeInfo
var err error
if ci, err = md.Added.ToProto(); err != nil {
return nil, err
}
resp.Added = ci
if ci, err = md.Removed.ToProto(); err != nil {
return nil, err
}
resp.Removed = ci
if ci, err = md.OldChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.OldChangeInfo = ci
if ci, err = md.NewChangeInfo.ToProto(); err != nil {
return nil, err
}
resp.NewChangeInfo = ci
return &resp, nil
}
| {
reviewerDetails, err := x.ToProto()
if err != nil {
return nil, err
}
reviewers[i] = reviewerDetails
} | conditional_block |
list_conflict_files_test.go | package conflicts
import (
"bytes"
"context"
"io"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
type conflictFile struct {
Header *gitalypb.ConflictFileHeader
Content []byte
}
func TestSuccessfulListConflictFilesRequest(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, false, nil)
ourCommitOid := "1a35b5a77cf6af7edf6703f88e82f6aff613666f"
theirCommitOid := "8309e68585b28d61eb85b7e2834849dda6bf1733"
conflictContent1 := `<<<<<<< encoding/codagé
Content is not important, file name is
=======
Content can be important, but here, file name is of utmost importance
>>>>>>> encoding/codagé
`
conflictContent2 := `<<<<<<< files/ruby/feature.rb
class Feature
def foo
puts 'bar'
end
=======
# This file was changed in feature branch
# We put different code here to make merge conflict
class Conflict
>>>>>>> files/ruby/feature.rb
end
`
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("encoding/codagé"),
TheirPath: []byte("encoding/codagé"),
},
Content: []byte(conflictContent1),
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/feature.rb"),
TheirPath: []byte("files/ruby/feature.rb"),
},
Content: []byte(conflictContent2),
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
require.Equal(t, expectedFiles[i].Content, receivedFiles[i].Content)
}
}
func TestSuccessfulListConflictFilesRequestWithAncestor(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
theirCommitOid := "1450cd639e0bc6721eb02800169e464f212cde06"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/popen.rb"),
TheirPath: []byte("files/ruby/popen.rb"),
AncestorPath: []byte("files/ruby/popen.rb"),
},
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/regex.rb"),
TheirPath: []byte("files/ruby/regex.rb"),
AncestorPath: []byte("files/ruby/regex.rb"),
},
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
}
}
func TestListConflictFilesHugeDiff(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := SetupConflictsService(ctx, t, false, nil)
our := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("a\n"), 128*1024),
"b": bytes.Repeat([]byte("b\n"), 128*1024),
})
their := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("x\n"), 128*1024),
"b": bytes.Repeat([]byte("y\n"), 128*1024),
})
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: our,
TheirCommitOid: their,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, 2)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("a"),
TheirPath: []byte("a"),
}, receivedFiles[0].Header)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("b"),
TheirPath: []byte("b"),
}, receivedFiles[1].Header)
}
func buildCommit(t *testing.T, ctx context.Context, cfg config.Cfg, repo *gitalypb.Repository, repoPath string, files map[string][]byte) string {
t.Helper()
for file, contents := range files {
filePath := filepath.Join(repoPath, file)
require.NoError(t, os.WriteFile(filePath, contents, 0o666))
gittest.Exec(t, cfg, "-C", repoPath, "add", filePath)
}
gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "message")
oid, err := localrepo.NewTestRepo(t, cfg, repo).ResolveRevision(ctx, git.Revision("HEAD"))
require.NoError(t, err)
gittest.Exec(t, cfg, "-C", repoPath, "reset", "--hard", "HEAD~")
return oid.String()
}
func TestListConflictFilesFailedPrecondition(t *testing.T) { | ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
testCases := []struct {
desc string
ourCommitOid string
theirCommitOid string
}{
{
desc: "conflict side missing",
ourCommitOid: "eb227b3e214624708c474bdab7bde7afc17cefcc",
theirCommitOid: "824be604a34828eb682305f0d963056cfac87b2d",
},
{
// These commits have a conflict on the 'VERSION' file in the test repo.
// The conflict is expected to raise an encoding error.
desc: "encoding error",
ourCommitOid: "bd493d44ae3c4dd84ce89cb75be78c4708cbd548",
theirCommitOid: "7df99c9ad5b8c9bfc5ae4fb7a91cc87adcce02ef",
},
{
desc: "submodule object lookup error",
ourCommitOid: "de78448b0b504f3f60093727bddfda1ceee42345",
theirCommitOid: "2f61d70f862c6a4f782ef7933e020a118282db29",
},
{
desc: "invalid commit id on 'our' side",
ourCommitOid: "abcdef0000000000000000000000000000000000",
theirCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
},
{
desc: "invalid commit id on 'their' side",
ourCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
theirCommitOid: "abcdef0000000000000000000000000000000000",
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: tc.ourCommitOid,
TheirCommitOid: tc.theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
if err == nil {
err = drainListConflictFilesResponse(c)
}
testhelper.RequireGrpcCode(t, err, codes.FailedPrecondition)
})
}
}
func TestListConflictFilesAllowTreeConflicts(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "eb227b3e214624708c474bdab7bde7afc17cefcc"
theirCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
AllowTreeConflicts: true,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
conflictContent := `<<<<<<< files/ruby/version_info.rb
module Gitlab
class VersionInfo
include Comparable
attr_reader :major, :minor, :patch
def self.parse(str)
if str && m = str.match(%r{(\d+)\.(\d+)\.(\d+)})
VersionInfo.new(m[1].to_i, m[2].to_i, m[3].to_i)
else
VersionInfo.new
end
end
def initialize(major = 0, minor = 0, patch = 0)
@major = major
@minor = minor
@patch = patch
end
def <=>(other)
return unless other.is_a? VersionInfo
return unless valid? && other.valid?
if other.major < @major
1
elsif @major < other.major
-1
elsif other.minor < @minor
1
elsif @minor < other.minor
-1
elsif other.patch < @patch
1
elsif @patch < other.patch
25
else
0
end
end
def to_s
if valid?
"%d.%d.%d" % [@major, @minor, @patch]
else
"Unknown"
end
end
def valid?
@major >= 0 && @minor >= 0 && @patch >= 0 && @major + @minor + @patch > 0
end
end
end
=======
>>>>>>>
`
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
AncestorPath: []byte("files/ruby/version_info.rb"),
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/version_info.rb"),
},
Content: []byte(conflictContent),
},
}
testhelper.ProtoEqual(t, expectedFiles, getConflictFiles(t, c))
}
func TestFailedListConflictFilesRequestDueToValidation(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "0b4bc9a49b562e85de7cc9e834518ea6828729b9"
theirCommitOid := "bb5206fee213d983da88c47f9cf4cc6caf9c66dc"
testCases := []struct {
desc string
request *gitalypb.ListConflictFilesRequest
code codes.Code
}{
{
desc: "empty repo",
request: &gitalypb.ListConflictFilesRequest{
Repository: nil,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty OurCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: "",
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty TheirCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: "",
},
code: codes.InvalidArgument,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
c, _ := client.ListConflictFiles(ctx, testCase.request)
testhelper.RequireGrpcCode(t, drainListConflictFilesResponse(c), testCase.code)
})
}
}
func getConflictFiles(t *testing.T, c gitalypb.ConflictsService_ListConflictFilesClient) []*conflictFile {
t.Helper()
var files []*conflictFile
var currentFile *conflictFile
for {
r, err := c.Recv()
if err == io.EOF {
break
}
require.NoError(t, err)
for _, file := range r.GetFiles() {
// If there's a header this is the beginning of a new file
if header := file.GetHeader(); header != nil {
if currentFile != nil {
files = append(files, currentFile)
}
currentFile = &conflictFile{Header: header}
} else {
// Append to current file's content
currentFile.Content = append(currentFile.Content, file.GetContent()...)
}
}
}
// Append leftover file
files = append(files, currentFile)
return files
}
func drainListConflictFilesResponse(c gitalypb.ConflictsService_ListConflictFilesClient) error {
var err error
for err == nil {
_, err = c.Recv()
}
return err
} | random_line_split | |
list_conflict_files_test.go | package conflicts
import (
"bytes"
"context"
"io"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
type conflictFile struct {
Header *gitalypb.ConflictFileHeader
Content []byte
}
func | (t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, false, nil)
ourCommitOid := "1a35b5a77cf6af7edf6703f88e82f6aff613666f"
theirCommitOid := "8309e68585b28d61eb85b7e2834849dda6bf1733"
conflictContent1 := `<<<<<<< encoding/codagé
Content is not important, file name is
=======
Content can be important, but here, file name is of utmost importance
>>>>>>> encoding/codagé
`
conflictContent2 := `<<<<<<< files/ruby/feature.rb
class Feature
def foo
puts 'bar'
end
=======
# This file was changed in feature branch
# We put different code here to make merge conflict
class Conflict
>>>>>>> files/ruby/feature.rb
end
`
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("encoding/codagé"),
TheirPath: []byte("encoding/codagé"),
},
Content: []byte(conflictContent1),
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/feature.rb"),
TheirPath: []byte("files/ruby/feature.rb"),
},
Content: []byte(conflictContent2),
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
require.Equal(t, expectedFiles[i].Content, receivedFiles[i].Content)
}
}
func TestSuccessfulListConflictFilesRequestWithAncestor(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
theirCommitOid := "1450cd639e0bc6721eb02800169e464f212cde06"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/popen.rb"),
TheirPath: []byte("files/ruby/popen.rb"),
AncestorPath: []byte("files/ruby/popen.rb"),
},
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/regex.rb"),
TheirPath: []byte("files/ruby/regex.rb"),
AncestorPath: []byte("files/ruby/regex.rb"),
},
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
}
}
func TestListConflictFilesHugeDiff(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := SetupConflictsService(ctx, t, false, nil)
our := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("a\n"), 128*1024),
"b": bytes.Repeat([]byte("b\n"), 128*1024),
})
their := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("x\n"), 128*1024),
"b": bytes.Repeat([]byte("y\n"), 128*1024),
})
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: our,
TheirCommitOid: their,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, 2)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("a"),
TheirPath: []byte("a"),
}, receivedFiles[0].Header)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("b"),
TheirPath: []byte("b"),
}, receivedFiles[1].Header)
}
func buildCommit(t *testing.T, ctx context.Context, cfg config.Cfg, repo *gitalypb.Repository, repoPath string, files map[string][]byte) string {
t.Helper()
for file, contents := range files {
filePath := filepath.Join(repoPath, file)
require.NoError(t, os.WriteFile(filePath, contents, 0o666))
gittest.Exec(t, cfg, "-C", repoPath, "add", filePath)
}
gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "message")
oid, err := localrepo.NewTestRepo(t, cfg, repo).ResolveRevision(ctx, git.Revision("HEAD"))
require.NoError(t, err)
gittest.Exec(t, cfg, "-C", repoPath, "reset", "--hard", "HEAD~")
return oid.String()
}
func TestListConflictFilesFailedPrecondition(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
testCases := []struct {
desc string
ourCommitOid string
theirCommitOid string
}{
{
desc: "conflict side missing",
ourCommitOid: "eb227b3e214624708c474bdab7bde7afc17cefcc",
theirCommitOid: "824be604a34828eb682305f0d963056cfac87b2d",
},
{
// These commits have a conflict on the 'VERSION' file in the test repo.
// The conflict is expected to raise an encoding error.
desc: "encoding error",
ourCommitOid: "bd493d44ae3c4dd84ce89cb75be78c4708cbd548",
theirCommitOid: "7df99c9ad5b8c9bfc5ae4fb7a91cc87adcce02ef",
},
{
desc: "submodule object lookup error",
ourCommitOid: "de78448b0b504f3f60093727bddfda1ceee42345",
theirCommitOid: "2f61d70f862c6a4f782ef7933e020a118282db29",
},
{
desc: "invalid commit id on 'our' side",
ourCommitOid: "abcdef0000000000000000000000000000000000",
theirCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
},
{
desc: "invalid commit id on 'their' side",
ourCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
theirCommitOid: "abcdef0000000000000000000000000000000000",
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: tc.ourCommitOid,
TheirCommitOid: tc.theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
if err == nil {
err = drainListConflictFilesResponse(c)
}
testhelper.RequireGrpcCode(t, err, codes.FailedPrecondition)
})
}
}
func TestListConflictFilesAllowTreeConflicts(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "eb227b3e214624708c474bdab7bde7afc17cefcc"
theirCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
AllowTreeConflicts: true,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
conflictContent := `<<<<<<< files/ruby/version_info.rb
module Gitlab
class VersionInfo
include Comparable
attr_reader :major, :minor, :patch
def self.parse(str)
if str && m = str.match(%r{(\d+)\.(\d+)\.(\d+)})
VersionInfo.new(m[1].to_i, m[2].to_i, m[3].to_i)
else
VersionInfo.new
end
end
def initialize(major = 0, minor = 0, patch = 0)
@major = major
@minor = minor
@patch = patch
end
def <=>(other)
return unless other.is_a? VersionInfo
return unless valid? && other.valid?
if other.major < @major
1
elsif @major < other.major
-1
elsif other.minor < @minor
1
elsif @minor < other.minor
-1
elsif other.patch < @patch
1
elsif @patch < other.patch
25
else
0
end
end
def to_s
if valid?
"%d.%d.%d" % [@major, @minor, @patch]
else
"Unknown"
end
end
def valid?
@major >= 0 && @minor >= 0 && @patch >= 0 && @major + @minor + @patch > 0
end
end
end
=======
>>>>>>>
`
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
AncestorPath: []byte("files/ruby/version_info.rb"),
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/version_info.rb"),
},
Content: []byte(conflictContent),
},
}
testhelper.ProtoEqual(t, expectedFiles, getConflictFiles(t, c))
}
func TestFailedListConflictFilesRequestDueToValidation(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "0b4bc9a49b562e85de7cc9e834518ea6828729b9"
theirCommitOid := "bb5206fee213d983da88c47f9cf4cc6caf9c66dc"
testCases := []struct {
desc string
request *gitalypb.ListConflictFilesRequest
code codes.Code
}{
{
desc: "empty repo",
request: &gitalypb.ListConflictFilesRequest{
Repository: nil,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty OurCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: "",
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty TheirCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: "",
},
code: codes.InvalidArgument,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
c, _ := client.ListConflictFiles(ctx, testCase.request)
testhelper.RequireGrpcCode(t, drainListConflictFilesResponse(c), testCase.code)
})
}
}
func getConflictFiles(t *testing.T, c gitalypb.ConflictsService_ListConflictFilesClient) []*conflictFile {
t.Helper()
var files []*conflictFile
var currentFile *conflictFile
for {
r, err := c.Recv()
if err == io.EOF {
break
}
require.NoError(t, err)
for _, file := range r.GetFiles() {
// If there's a header this is the beginning of a new file
if header := file.GetHeader(); header != nil {
if currentFile != nil {
files = append(files, currentFile)
}
currentFile = &conflictFile{Header: header}
} else {
// Append to current file's content
currentFile.Content = append(currentFile.Content, file.GetContent()...)
}
}
}
// Append leftover file
files = append(files, currentFile)
return files
}
func drainListConflictFilesResponse(c gitalypb.ConflictsService_ListConflictFilesClient) error {
var err error
for err == nil {
_, err = c.Recv()
}
return err
}
| TestSuccessfulListConflictFilesRequest | identifier_name |
list_conflict_files_test.go | package conflicts
import (
"bytes"
"context"
"io"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
type conflictFile struct {
Header *gitalypb.ConflictFileHeader
Content []byte
}
func TestSuccessfulListConflictFilesRequest(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, false, nil)
ourCommitOid := "1a35b5a77cf6af7edf6703f88e82f6aff613666f"
theirCommitOid := "8309e68585b28d61eb85b7e2834849dda6bf1733"
conflictContent1 := `<<<<<<< encoding/codagé
Content is not important, file name is
=======
Content can be important, but here, file name is of utmost importance
>>>>>>> encoding/codagé
`
conflictContent2 := `<<<<<<< files/ruby/feature.rb
class Feature
def foo
puts 'bar'
end
=======
# This file was changed in feature branch
# We put different code here to make merge conflict
class Conflict
>>>>>>> files/ruby/feature.rb
end
`
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("encoding/codagé"),
TheirPath: []byte("encoding/codagé"),
},
Content: []byte(conflictContent1),
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/feature.rb"),
TheirPath: []byte("files/ruby/feature.rb"),
},
Content: []byte(conflictContent2),
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
require.Equal(t, expectedFiles[i].Content, receivedFiles[i].Content)
}
}
func TestSuccessfulListConflictFilesRequestWithAncestor(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
theirCommitOid := "1450cd639e0bc6721eb02800169e464f212cde06"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/popen.rb"),
TheirPath: []byte("files/ruby/popen.rb"),
AncestorPath: []byte("files/ruby/popen.rb"),
},
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/regex.rb"),
TheirPath: []byte("files/ruby/regex.rb"),
AncestorPath: []byte("files/ruby/regex.rb"),
},
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
}
}
func TestListConflictFilesHugeDiff(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := SetupConflictsService(ctx, t, false, nil)
our := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("a\n"), 128*1024),
"b": bytes.Repeat([]byte("b\n"), 128*1024),
})
their := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("x\n"), 128*1024),
"b": bytes.Repeat([]byte("y\n"), 128*1024),
})
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: our,
TheirCommitOid: their,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, 2)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("a"),
TheirPath: []byte("a"),
}, receivedFiles[0].Header)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("b"),
TheirPath: []byte("b"),
}, receivedFiles[1].Header)
}
func buildCommit(t *testing.T, ctx context.Context, cfg config.Cfg, repo *gitalypb.Repository, repoPath string, files map[string][]byte) string {
t.Helper()
for file, contents := range files {
filePath := filepath.Join(repoPath, file)
require.NoError(t, os.WriteFile(filePath, contents, 0o666))
gittest.Exec(t, cfg, "-C", repoPath, "add", filePath)
}
gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "message")
oid, err := localrepo.NewTestRepo(t, cfg, repo).ResolveRevision(ctx, git.Revision("HEAD"))
require.NoError(t, err)
gittest.Exec(t, cfg, "-C", repoPath, "reset", "--hard", "HEAD~")
return oid.String()
}
func TestListConflictFilesFailedPrecondition(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
testCases := []struct {
desc string
ourCommitOid string
theirCommitOid string
}{
{
desc: "conflict side missing",
ourCommitOid: "eb227b3e214624708c474bdab7bde7afc17cefcc",
theirCommitOid: "824be604a34828eb682305f0d963056cfac87b2d",
},
{
// These commits have a conflict on the 'VERSION' file in the test repo.
// The conflict is expected to raise an encoding error.
desc: "encoding error",
ourCommitOid: "bd493d44ae3c4dd84ce89cb75be78c4708cbd548",
theirCommitOid: "7df99c9ad5b8c9bfc5ae4fb7a91cc87adcce02ef",
},
{
desc: "submodule object lookup error",
ourCommitOid: "de78448b0b504f3f60093727bddfda1ceee42345",
theirCommitOid: "2f61d70f862c6a4f782ef7933e020a118282db29",
},
{
desc: "invalid commit id on 'our' side",
ourCommitOid: "abcdef0000000000000000000000000000000000",
theirCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
},
{
desc: "invalid commit id on 'their' side",
ourCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
theirCommitOid: "abcdef0000000000000000000000000000000000",
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: tc.ourCommitOid,
TheirCommitOid: tc.theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
if err == nil {
err = drainListConflictFilesResponse(c)
}
testhelper.RequireGrpcCode(t, err, codes.FailedPrecondition)
})
}
}
func TestListConflictFilesAllowTreeConflicts(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "eb227b3e214624708c474bdab7bde7afc17cefcc"
theirCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
AllowTreeConflicts: true,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
conflictContent := `<<<<<<< files/ruby/version_info.rb
module Gitlab
class VersionInfo
include Comparable
attr_reader :major, :minor, :patch
def self.parse(str)
if str && m = str.match(%r{(\d+)\.(\d+)\.(\d+)})
VersionInfo.new(m[1].to_i, m[2].to_i, m[3].to_i)
else
VersionInfo.new
end
end
def initialize(major = 0, minor = 0, patch = 0)
@major = major
@minor = minor
@patch = patch
end
def <=>(other)
return unless other.is_a? VersionInfo
return unless valid? && other.valid?
if other.major < @major
1
elsif @major < other.major
-1
elsif other.minor < @minor
1
elsif @minor < other.minor
-1
elsif other.patch < @patch
1
elsif @patch < other.patch
25
else
0
end
end
def to_s
if valid?
"%d.%d.%d" % [@major, @minor, @patch]
else
"Unknown"
end
end
def valid?
@major >= 0 && @minor >= 0 && @patch >= 0 && @major + @minor + @patch > 0
end
end
end
=======
>>>>>>>
`
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
AncestorPath: []byte("files/ruby/version_info.rb"),
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/version_info.rb"),
},
Content: []byte(conflictContent),
},
}
testhelper.ProtoEqual(t, expectedFiles, getConflictFiles(t, c))
}
func TestFailedListConflictFilesRequestDueToValidation(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "0b4bc9a49b562e85de7cc9e834518ea6828729b9"
theirCommitOid := "bb5206fee213d983da88c47f9cf4cc6caf9c66dc"
testCases := []struct {
desc string
request *gitalypb.ListConflictFilesRequest
code codes.Code
}{
{
desc: "empty repo",
request: &gitalypb.ListConflictFilesRequest{
Repository: nil,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty OurCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: "",
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty TheirCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: "",
},
code: codes.InvalidArgument,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
c, _ := client.ListConflictFiles(ctx, testCase.request)
testhelper.RequireGrpcCode(t, drainListConflictFilesResponse(c), testCase.code)
})
}
}
func getConflictFiles(t *testing.T, c gitalypb.ConflictsService_ListConflictFilesClient) []*conflictFile {
t.Helper()
var files []*conflictFile
var currentFile *conflictFile
for {
| / Append leftover file
files = append(files, currentFile)
return files
}
func drainListConflictFilesResponse(c gitalypb.ConflictsService_ListConflictFilesClient) error {
var err error
for err == nil {
_, err = c.Recv()
}
return err
}
| r, err := c.Recv()
if err == io.EOF {
break
}
require.NoError(t, err)
for _, file := range r.GetFiles() {
// If there's a header this is the beginning of a new file
if header := file.GetHeader(); header != nil {
if currentFile != nil {
files = append(files, currentFile)
}
currentFile = &conflictFile{Header: header}
} else {
// Append to current file's content
currentFile.Content = append(currentFile.Content, file.GetContent()...)
}
}
}
/ | conditional_block |
list_conflict_files_test.go | package conflicts
import (
"bytes"
"context"
"io"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/localrepo"
"gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/grpc/codes"
)
type conflictFile struct {
Header *gitalypb.ConflictFileHeader
Content []byte
}
func TestSuccessfulListConflictFilesRequest(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, false, nil)
ourCommitOid := "1a35b5a77cf6af7edf6703f88e82f6aff613666f"
theirCommitOid := "8309e68585b28d61eb85b7e2834849dda6bf1733"
conflictContent1 := `<<<<<<< encoding/codagé
Content is not important, file name is
=======
Content can be important, but here, file name is of utmost importance
>>>>>>> encoding/codagé
`
conflictContent2 := `<<<<<<< files/ruby/feature.rb
class Feature
def foo
puts 'bar'
end
=======
# This file was changed in feature branch
# We put different code here to make merge conflict
class Conflict
>>>>>>> files/ruby/feature.rb
end
`
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("encoding/codagé"),
TheirPath: []byte("encoding/codagé"),
},
Content: []byte(conflictContent1),
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/feature.rb"),
TheirPath: []byte("files/ruby/feature.rb"),
},
Content: []byte(conflictContent2),
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
require.Equal(t, expectedFiles[i].Content, receivedFiles[i].Content)
}
}
func TestSuccessfulListConflictFilesRequestWithAncestor(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
theirCommitOid := "1450cd639e0bc6721eb02800169e464f212cde06"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/popen.rb"),
TheirPath: []byte("files/ruby/popen.rb"),
AncestorPath: []byte("files/ruby/popen.rb"),
},
},
{
Header: &gitalypb.ConflictFileHeader{
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/regex.rb"),
TheirPath: []byte("files/ruby/regex.rb"),
AncestorPath: []byte("files/ruby/regex.rb"),
},
},
}
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, len(expectedFiles))
for i := 0; i < len(expectedFiles); i++ {
testhelper.ProtoEqual(t, receivedFiles[i].Header, expectedFiles[i].Header)
}
}
func TestListConflictFilesHugeDiff(t *testing.T) {
ctx := testhelper.Context(t)
cfg, repo, repoPath, client := SetupConflictsService(ctx, t, false, nil)
our := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("a\n"), 128*1024),
"b": bytes.Repeat([]byte("b\n"), 128*1024),
})
their := buildCommit(t, ctx, cfg, repo, repoPath, map[string][]byte{
"a": bytes.Repeat([]byte("x\n"), 128*1024),
"b": bytes.Repeat([]byte("y\n"), 128*1024),
})
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: our,
TheirCommitOid: their,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
receivedFiles := getConflictFiles(t, c)
require.Len(t, receivedFiles, 2)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("a"),
TheirPath: []byte("a"),
}, receivedFiles[0].Header)
testhelper.ProtoEqual(t, &gitalypb.ConflictFileHeader{
CommitOid: our,
OurMode: int32(0o100644),
OurPath: []byte("b"),
TheirPath: []byte("b"),
}, receivedFiles[1].Header)
}
func buildCommit(t *testing.T, ctx context.Context, cfg config.Cfg, repo *gitalypb.Repository, repoPath string, files map[string][]byte) string {
t.Helper()
for file, contents := range files {
filePath := filepath.Join(repoPath, file)
require.NoError(t, os.WriteFile(filePath, contents, 0o666))
gittest.Exec(t, cfg, "-C", repoPath, "add", filePath)
}
gittest.Exec(t, cfg, "-C", repoPath, "commit", "-m", "message")
oid, err := localrepo.NewTestRepo(t, cfg, repo).ResolveRevision(ctx, git.Revision("HEAD"))
require.NoError(t, err)
gittest.Exec(t, cfg, "-C", repoPath, "reset", "--hard", "HEAD~")
return oid.String()
}
func TestListConflictFilesFailedPrecondition(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
testCases := []struct {
desc string
ourCommitOid string
theirCommitOid string
}{
{
desc: "conflict side missing",
ourCommitOid: "eb227b3e214624708c474bdab7bde7afc17cefcc",
theirCommitOid: "824be604a34828eb682305f0d963056cfac87b2d",
},
{
// These commits have a conflict on the 'VERSION' file in the test repo.
// The conflict is expected to raise an encoding error.
desc: "encoding error",
ourCommitOid: "bd493d44ae3c4dd84ce89cb75be78c4708cbd548",
theirCommitOid: "7df99c9ad5b8c9bfc5ae4fb7a91cc87adcce02ef",
},
{
desc: "submodule object lookup error",
ourCommitOid: "de78448b0b504f3f60093727bddfda1ceee42345",
theirCommitOid: "2f61d70f862c6a4f782ef7933e020a118282db29",
},
{
desc: "invalid commit id on 'our' side",
ourCommitOid: "abcdef0000000000000000000000000000000000",
theirCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
},
{
desc: "invalid commit id on 'their' side",
ourCommitOid: "1a35b5a77cf6af7edf6703f88e82f6aff613666f",
theirCommitOid: "abcdef0000000000000000000000000000000000",
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: tc.ourCommitOid,
TheirCommitOid: tc.theirCommitOid,
}
c, err := client.ListConflictFiles(ctx, request)
if err == nil {
err = drainListConflictFilesResponse(c)
}
testhelper.RequireGrpcCode(t, err, codes.FailedPrecondition)
})
}
}
func TestListConflictFilesAllowTreeConflicts(t *testing.T) {
ctx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "eb227b3e214624708c474bdab7bde7afc17cefcc"
theirCommitOid := "824be604a34828eb682305f0d963056cfac87b2d"
request := &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
AllowTreeConflicts: true,
}
c, err := client.ListConflictFiles(ctx, request)
require.NoError(t, err)
conflictContent := `<<<<<<< files/ruby/version_info.rb
module Gitlab
class VersionInfo
include Comparable
attr_reader :major, :minor, :patch
def self.parse(str)
if str && m = str.match(%r{(\d+)\.(\d+)\.(\d+)})
VersionInfo.new(m[1].to_i, m[2].to_i, m[3].to_i)
else
VersionInfo.new
end
end
def initialize(major = 0, minor = 0, patch = 0)
@major = major
@minor = minor
@patch = patch
end
def <=>(other)
return unless other.is_a? VersionInfo
return unless valid? && other.valid?
if other.major < @major
1
elsif @major < other.major
-1
elsif other.minor < @minor
1
elsif @minor < other.minor
-1
elsif other.patch < @patch
1
elsif @patch < other.patch
25
else
0
end
end
def to_s
if valid?
"%d.%d.%d" % [@major, @minor, @patch]
else
"Unknown"
end
end
def valid?
@major >= 0 && @minor >= 0 && @patch >= 0 && @major + @minor + @patch > 0
end
end
end
=======
>>>>>>>
`
expectedFiles := []*conflictFile{
{
Header: &gitalypb.ConflictFileHeader{
AncestorPath: []byte("files/ruby/version_info.rb"),
CommitOid: ourCommitOid,
OurMode: int32(0o100644),
OurPath: []byte("files/ruby/version_info.rb"),
},
Content: []byte(conflictContent),
},
}
testhelper.ProtoEqual(t, expectedFiles, getConflictFiles(t, c))
}
func TestFailedListConflictFilesRequestDueToValidation(t *testing.T) {
c | nc getConflictFiles(t *testing.T, c gitalypb.ConflictsService_ListConflictFilesClient) []*conflictFile {
t.Helper()
var files []*conflictFile
var currentFile *conflictFile
for {
r, err := c.Recv()
if err == io.EOF {
break
}
require.NoError(t, err)
for _, file := range r.GetFiles() {
// If there's a header this is the beginning of a new file
if header := file.GetHeader(); header != nil {
if currentFile != nil {
files = append(files, currentFile)
}
currentFile = &conflictFile{Header: header}
} else {
// Append to current file's content
currentFile.Content = append(currentFile.Content, file.GetContent()...)
}
}
}
// Append leftover file
files = append(files, currentFile)
return files
}
func drainListConflictFilesResponse(c gitalypb.ConflictsService_ListConflictFilesClient) error {
var err error
for err == nil {
_, err = c.Recv()
}
return err
}
| tx := testhelper.Context(t)
_, repo, _, client := SetupConflictsService(ctx, t, true, nil)
ourCommitOid := "0b4bc9a49b562e85de7cc9e834518ea6828729b9"
theirCommitOid := "bb5206fee213d983da88c47f9cf4cc6caf9c66dc"
testCases := []struct {
desc string
request *gitalypb.ListConflictFilesRequest
code codes.Code
}{
{
desc: "empty repo",
request: &gitalypb.ListConflictFilesRequest{
Repository: nil,
OurCommitOid: ourCommitOid,
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty OurCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: "",
TheirCommitOid: theirCommitOid,
},
code: codes.InvalidArgument,
},
{
desc: "empty TheirCommitId field",
request: &gitalypb.ListConflictFilesRequest{
Repository: repo,
OurCommitOid: ourCommitOid,
TheirCommitOid: "",
},
code: codes.InvalidArgument,
},
}
for _, testCase := range testCases {
t.Run(testCase.desc, func(t *testing.T) {
c, _ := client.ListConflictFiles(ctx, testCase.request)
testhelper.RequireGrpcCode(t, drainListConflictFilesResponse(c), testCase.code)
})
}
}
fu | identifier_body |
reconcile.go | //
// Copyright (c) 2018 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: EPL-2.0
//
package reconcile
import (
"context"
"fmt"
"strings"
"time"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/wait"
"github.com/golang/glog"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/crd"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/resource"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/states"
)
// Reconciler periodically checks the status of subresources and takes
// various self-healing and convergence actions. These include updating
// the top-level custom resource status, re-creating missing subresources,
// deleting orphaned subresources, et cetera.
//
// See the docs/reconciliation.md file for a detailed description of the
// reconciliation policy.
type Reconciler struct {
namespace string
gvk schema.GroupVersionKind
crdHandle *crd.Handle
crdClient crd.Client
resourceClients []resource.Client
}
// New returns a new Reconciler.
func New(namespace string, gvk schema.GroupVersionKind, crdHandle *crd.Handle, crdClient crd.Client, resourceClients []resource.Client) *Reconciler {
return &Reconciler{
namespace: namespace,
gvk: gvk,
crdHandle: crdHandle,
crdClient: crdClient,
resourceClients: resourceClients,
}
}
// Run starts the reconciliation loop and blocks until the context is done, or
// there is an unrecoverable error. Reconciliation actions are done at the
// supplied interval.
func (r *Reconciler) Run(ctx context.Context, interval time.Duration) error {
glog.V(4).Infof("Starting reconciler for %v.%v.%v", r.gvk.Group, r.gvk.Version, r.gvk.Kind)
go wait.Until(r.run, interval, ctx.Done())
<-ctx.Done()
return ctx.Err()
}
type subresource struct {
client resource.Client
object runtime.Object
lifecycle lifecycle
}
type subresources []*subresource
// Contains subresources grouped by their controlling resource.
type subresourceMap map[string]subresources
type action struct {
newCRState states.State
newCRReason string
subresourcesToCreate subresources
subresourcesToDelete subresources
}
func (a action) String() string {
var sCreateNames []string
for _, s := range a.subresourcesToCreate {
sCreateNames = append(sCreateNames, s.client.Plural())
}
var sDeleteNames []string
for _, s := range a.subresourcesToDelete {
sDeleteNames = append(sDeleteNames, s.client.Plural())
}
return fmt.Sprintf(
`{
newCRState: "%s",
newCRReason: "%s",
subresourcesToCreate: "%s",
subresourcesToDelete: "%s"
}`,
a.newCRState,
a.newCRReason,
strings.Join(sCreateNames, ", "),
strings.Join(sDeleteNames, ", "))
}
func (r *Reconciler) run() {
subresourcesByCR := r.groupSubresourcesByCustomResource()
for crName, subs := range subresourcesByCR {
a, cr, err := r.planAction(crName, subs)
if err != nil {
glog.Errorf(`failed to plan action for custom resource: [%s] subresources: [%v] error: [%s]`, crName, subresourcesByCR, err.Error())
continue
}
glog.Infof("planned action: %s", a.String())
errs := r.executeAction(crName, cr, a)
if len(errs) > 0 {
glog.Errorf(`failed to execute action for custom resource: [%s] subresources: %v errors: %v`, crName, subresourcesByCR, errs)
}
}
}
// TODO(CD): groupSubresourcesByCustomResource() doesn't work for a custom
// resource with no sub-resource(s) or the sub-resource have been deleted.
// As resourceClient.List() will not have any sub-resource belonging to the
// custom resource, result will not have the controller name as one of its
// keys.
//
// To fix the problem, we could do a List from the CR client and then iterate
// over those names instead of keys from the intermediate result map we built
// based on the subresources.
func (r *Reconciler) groupSubresourcesByCustomResource() subresourceMap {
result := subresourceMap{}
// Get the list of crs.
crListObj, err := r.crdClient.List(r.namespace, map[string]string{})
if err != nil || crListObj == nil {
glog.Warningf("[reconcile] could not list custom resources. Got error %v %v", err, crListObj)
return result
}
customResourceList := crListObj.(crd.CustomResourceList)
// Get the list of custom resources
crList := customResourceList.GetItems()
// Return if the list is empty
if len(crList) == 0 {
glog.Warningf("[reconcile] custom resources list is empty")
return result
}
for _, resourceClient := range r.resourceClients {
objects, err := resourceClient.List(r.namespace, map[string]string{})
if err != nil {
glog.Warningf(`[reconcile] failed to list "%s" subresources`, resourceClient.Plural())
continue
}
for _, obj := range objects {
controllerRef := metav1.GetControllerOf(obj)
if controllerRef == nil {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as it doesn not have a controller reference", obj.GetName(), r.namespace)
continue
}
// Only manipulate controller-created subresources.
if controllerRef.APIVersion != r.gvk.GroupVersion().String() || controllerRef.Kind != r.gvk.Kind {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as controlling custom resource is from a different group, version and kind", obj.GetName(), r.namespace)
continue
}
subLifecycle := exists
objMeta, err := meta.Accessor(obj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for subresource: %v", err)
continue
}
if objMeta.GetDeletionTimestamp() != nil {
subLifecycle = deleting
}
runtimeObj, ok := obj.(runtime.Object)
if !ok {
glog.Warningf("[reconcile] error asserting metav1.Object as runtime.Object: %v", err)
continue
}
controllerName := controllerRef.Name
objList := result[controllerName]
result[controllerName] = append(objList, &subresource{resourceClient, runtimeObj, subLifecycle})
}
}
// Iterate over the crs to get the list of missing sub resources
// ASSUMPTION: There is at most one subresource of each kind per
// custom resource. We use the plural form as a key
for _, item := range crList {
cr, ok := item.(crd.CustomResource)
if !ok {
glog.Warningf("[reconcile] failed to assert item %v to type CustomResource", item)
continue
}
subs, ok := result[cr.Name()]
if !ok {
glog.Warningf("[reconcile] no sub-resources found for cr %v", cr.Name())
}
// Find non-existing subresources based on the expected subresource clients.
existingSubs := map[string]struct{}{}
for _, sub := range subs {
existingSubs[sub.client.Plural()] = struct{}{}
}
for _, subClient := range r.resourceClients {
_, exists := existingSubs[subClient.Plural()]
if !exists {
result[cr.Name()] = append(subs, &subresource{subClient, nil, doesNotExist})
}
}
}
return result
}
func (subs subresources) filter(predicate func(s *subresource) bool) subresources |
func (subs subresources) any(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) > 0
}
func (subs subresources) all(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) == len(subs)
}
func (r *Reconciler) planAction(controllerName string, subs subresources) (*action, crd.CustomResource, error) {
// If the controller name is empty, these are not our subresources;
// do nothing.
if controllerName == "" {
return &action{}, nil, nil
}
// Compute the current lifecycle phase of the custom resource.
customResourceLifecycle := exists
crObj, err := r.crdClient.Get(r.namespace, controllerName)
if err != nil && apierrors.IsNotFound(err) {
customResourceLifecycle = doesNotExist
}
crMeta, err := meta.Accessor(crObj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for controlling custom resource: %v", err)
} else if crMeta.GetDeletionTimestamp() != nil {
customResourceLifecycle = deleting
}
// If the custom resource is deleting or does not exist, clean up all
// subresources.
if customResourceLifecycle.isOneOf(doesNotExist, deleting) {
return &action{subresourcesToDelete: subs}, nil, nil
}
cr, ok := crObj.(crd.CustomResource)
if !ok {
return &action{}, nil, fmt.Errorf("object retrieved from CRD client not an instance of crd.CustomResource: [%v]", crObj)
}
customResourceSpecState := cr.GetSpecState()
customResourceStatusState := cr.GetStatusState()
// If the desired custom resource state is running or completed AND
// the custom resource is in a terminal state, then delete all subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Completed, states.Failed) {
return &action{subresourcesToDelete: subs}, nil, nil
}
// If the desired custom resource state is running or completed AND
// the current custom resource status is non-terminal, ANY non-ephemeral
// subresource that is failed, does not exist or has been deleted causes
// the custom resource current state to move to failed.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return !s.client.IsEphemeral() &&
s.lifecycle.isOneOf(doesNotExist, deleting) ||
s.client.GetStatusState(s.object) == states.Failed
}) {
// Set CR to failed
return &action{
newCRState: states.Failed,
}, cr, nil
}
}
// If the desired custom resource state is completed AND
// the current custom resource status is pending or running, then if ANY
// subresource is completed, set the current custom resource state to
// completed.
if customResourceSpecState == states.Completed && customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Completed
}) {
// Set CR as completed
return &action{
newCRState: states.Completed,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending or running, then
// re-create any nonexisting ephemeral subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
toRecreate := subs.filter(func(s *subresource) bool {
return s.client.IsEphemeral() &&
(s.lifecycle == exists && s.client.GetStatusState(s.object) == states.Failed ||
s.lifecycle == doesNotExist)
})
if len(toRecreate) > 0 {
// Recreate
return &action{subresourcesToCreate: toRecreate}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is running AND
// ANY subresource is pending, then set the current custom resource state
// to pending.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Running {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Pending
}) {
// Set CR as pending
return &action{
newCRState: states.Pending,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending AND
// ALL subresources are running, then set the current custom resource state
// to running.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Pending {
// All resources must be running for us to consider the custom resource as running.
if subs.all(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Running
}) {
// Set CR as running
return &action{
newCRState: states.Running,
}, cr, nil
}
}
// Default case: do nothing.
return &action{}, cr, nil
}
func (r *Reconciler) executeAction(controllerName string, cr crd.CustomResource, a *action) []error {
errors := []error{}
glog.V(4).Infof(`executing reconcile action for "%s" resource "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
if a.newCRState != "" {
glog.Infof(`updating "%s" custom resource for controller "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
cr.SetStatusStateWithMessage(a.newCRState, a.newCRReason)
_, err := r.crdClient.Update(cr)
if err != nil {
glog.Errorf(`error updating custom resource state for "%s" in namespace "%s"`, controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToCreate {
glog.Infof(`creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Create(r.namespace, cr)
if err != nil {
glog.Errorf(`error creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToDelete {
glog.Infof(`deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Delete(r.namespace, controllerName)
if err != nil {
glog.Errorf(`error deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
return errors
}
| {
var result subresources
for _, sub := range subs {
if predicate(sub) {
result = append(result, sub)
}
}
return result
} | identifier_body |
reconcile.go | //
// Copyright (c) 2018 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: EPL-2.0
//
package reconcile
import (
"context"
"fmt"
"strings"
"time"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/wait"
"github.com/golang/glog"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/crd"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/resource"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/states"
)
// Reconciler periodically checks the status of subresources and takes
// various self-healing and convergence actions. These include updating
// the top-level custom resource status, re-creating missing subresources,
// deleting orphaned subresources, et cetera.
//
// See the docs/reconciliation.md file for a detailed description of the
// reconciliation policy.
type Reconciler struct {
namespace string
gvk schema.GroupVersionKind
crdHandle *crd.Handle
crdClient crd.Client
resourceClients []resource.Client
}
// New returns a new Reconciler.
func New(namespace string, gvk schema.GroupVersionKind, crdHandle *crd.Handle, crdClient crd.Client, resourceClients []resource.Client) *Reconciler {
return &Reconciler{
namespace: namespace,
gvk: gvk,
crdHandle: crdHandle,
crdClient: crdClient,
resourceClients: resourceClients,
}
}
// Run starts the reconciliation loop and blocks until the context is done, or
// there is an unrecoverable error. Reconciliation actions are done at the
// supplied interval.
func (r *Reconciler) Run(ctx context.Context, interval time.Duration) error {
glog.V(4).Infof("Starting reconciler for %v.%v.%v", r.gvk.Group, r.gvk.Version, r.gvk.Kind)
go wait.Until(r.run, interval, ctx.Done())
<-ctx.Done()
return ctx.Err()
}
type subresource struct {
client resource.Client
object runtime.Object
lifecycle lifecycle
}
type subresources []*subresource
// Contains subresources grouped by their controlling resource.
type subresourceMap map[string]subresources
type action struct {
newCRState states.State
newCRReason string
subresourcesToCreate subresources
subresourcesToDelete subresources
}
func (a action) String() string {
var sCreateNames []string
for _, s := range a.subresourcesToCreate {
sCreateNames = append(sCreateNames, s.client.Plural())
}
var sDeleteNames []string
for _, s := range a.subresourcesToDelete {
sDeleteNames = append(sDeleteNames, s.client.Plural())
}
return fmt.Sprintf(
`{
newCRState: "%s",
newCRReason: "%s",
subresourcesToCreate: "%s",
subresourcesToDelete: "%s"
}`,
a.newCRState,
a.newCRReason,
strings.Join(sCreateNames, ", "),
strings.Join(sDeleteNames, ", "))
}
func (r *Reconciler) run() {
subresourcesByCR := r.groupSubresourcesByCustomResource()
for crName, subs := range subresourcesByCR {
a, cr, err := r.planAction(crName, subs)
if err != nil {
glog.Errorf(`failed to plan action for custom resource: [%s] subresources: [%v] error: [%s]`, crName, subresourcesByCR, err.Error())
continue
}
glog.Infof("planned action: %s", a.String())
errs := r.executeAction(crName, cr, a)
if len(errs) > 0 {
glog.Errorf(`failed to execute action for custom resource: [%s] subresources: %v errors: %v`, crName, subresourcesByCR, errs)
}
}
}
// TODO(CD): groupSubresourcesByCustomResource() doesn't work for a custom
// resource with no sub-resource(s) or the sub-resource have been deleted.
// As resourceClient.List() will not have any sub-resource belonging to the
// custom resource, result will not have the controller name as one of its
// keys.
//
// To fix the problem, we could do a List from the CR client and then iterate
// over those names instead of keys from the intermediate result map we built
// based on the subresources.
func (r *Reconciler) groupSubresourcesByCustomResource() subresourceMap {
result := subresourceMap{}
// Get the list of crs.
crListObj, err := r.crdClient.List(r.namespace, map[string]string{})
if err != nil || crListObj == nil {
glog.Warningf("[reconcile] could not list custom resources. Got error %v %v", err, crListObj)
return result
}
customResourceList := crListObj.(crd.CustomResourceList)
// Get the list of custom resources
crList := customResourceList.GetItems()
// Return if the list is empty
if len(crList) == 0 {
glog.Warningf("[reconcile] custom resources list is empty")
return result
}
for _, resourceClient := range r.resourceClients {
objects, err := resourceClient.List(r.namespace, map[string]string{})
if err != nil {
glog.Warningf(`[reconcile] failed to list "%s" subresources`, resourceClient.Plural())
continue
}
for _, obj := range objects {
controllerRef := metav1.GetControllerOf(obj)
if controllerRef == nil {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as it doesn not have a controller reference", obj.GetName(), r.namespace)
continue
}
// Only manipulate controller-created subresources.
if controllerRef.APIVersion != r.gvk.GroupVersion().String() || controllerRef.Kind != r.gvk.Kind {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as controlling custom resource is from a different group, version and kind", obj.GetName(), r.namespace)
continue
}
subLifecycle := exists
objMeta, err := meta.Accessor(obj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for subresource: %v", err)
continue
}
if objMeta.GetDeletionTimestamp() != nil {
subLifecycle = deleting
}
runtimeObj, ok := obj.(runtime.Object)
if !ok {
glog.Warningf("[reconcile] error asserting metav1.Object as runtime.Object: %v", err)
continue
}
controllerName := controllerRef.Name
objList := result[controllerName]
result[controllerName] = append(objList, &subresource{resourceClient, runtimeObj, subLifecycle})
}
}
// Iterate over the crs to get the list of missing sub resources
// ASSUMPTION: There is at most one subresource of each kind per
// custom resource. We use the plural form as a key
for _, item := range crList {
cr, ok := item.(crd.CustomResource)
if !ok {
glog.Warningf("[reconcile] failed to assert item %v to type CustomResource", item)
continue
}
subs, ok := result[cr.Name()]
if !ok {
glog.Warningf("[reconcile] no sub-resources found for cr %v", cr.Name())
}
// Find non-existing subresources based on the expected subresource clients.
existingSubs := map[string]struct{}{}
for _, sub := range subs {
existingSubs[sub.client.Plural()] = struct{}{}
}
for _, subClient := range r.resourceClients {
_, exists := existingSubs[subClient.Plural()]
if !exists {
result[cr.Name()] = append(subs, &subresource{subClient, nil, doesNotExist})
}
}
}
return result
}
func (subs subresources) filter(predicate func(s *subresource) bool) subresources {
var result subresources
for _, sub := range subs {
if predicate(sub) {
result = append(result, sub)
}
}
return result
}
func (subs subresources) any(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) > 0
}
func (subs subresources) | (predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) == len(subs)
}
func (r *Reconciler) planAction(controllerName string, subs subresources) (*action, crd.CustomResource, error) {
// If the controller name is empty, these are not our subresources;
// do nothing.
if controllerName == "" {
return &action{}, nil, nil
}
// Compute the current lifecycle phase of the custom resource.
customResourceLifecycle := exists
crObj, err := r.crdClient.Get(r.namespace, controllerName)
if err != nil && apierrors.IsNotFound(err) {
customResourceLifecycle = doesNotExist
}
crMeta, err := meta.Accessor(crObj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for controlling custom resource: %v", err)
} else if crMeta.GetDeletionTimestamp() != nil {
customResourceLifecycle = deleting
}
// If the custom resource is deleting or does not exist, clean up all
// subresources.
if customResourceLifecycle.isOneOf(doesNotExist, deleting) {
return &action{subresourcesToDelete: subs}, nil, nil
}
cr, ok := crObj.(crd.CustomResource)
if !ok {
return &action{}, nil, fmt.Errorf("object retrieved from CRD client not an instance of crd.CustomResource: [%v]", crObj)
}
customResourceSpecState := cr.GetSpecState()
customResourceStatusState := cr.GetStatusState()
// If the desired custom resource state is running or completed AND
// the custom resource is in a terminal state, then delete all subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Completed, states.Failed) {
return &action{subresourcesToDelete: subs}, nil, nil
}
// If the desired custom resource state is running or completed AND
// the current custom resource status is non-terminal, ANY non-ephemeral
// subresource that is failed, does not exist or has been deleted causes
// the custom resource current state to move to failed.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return !s.client.IsEphemeral() &&
s.lifecycle.isOneOf(doesNotExist, deleting) ||
s.client.GetStatusState(s.object) == states.Failed
}) {
// Set CR to failed
return &action{
newCRState: states.Failed,
}, cr, nil
}
}
// If the desired custom resource state is completed AND
// the current custom resource status is pending or running, then if ANY
// subresource is completed, set the current custom resource state to
// completed.
if customResourceSpecState == states.Completed && customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Completed
}) {
// Set CR as completed
return &action{
newCRState: states.Completed,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending or running, then
// re-create any nonexisting ephemeral subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
toRecreate := subs.filter(func(s *subresource) bool {
return s.client.IsEphemeral() &&
(s.lifecycle == exists && s.client.GetStatusState(s.object) == states.Failed ||
s.lifecycle == doesNotExist)
})
if len(toRecreate) > 0 {
// Recreate
return &action{subresourcesToCreate: toRecreate}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is running AND
// ANY subresource is pending, then set the current custom resource state
// to pending.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Running {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Pending
}) {
// Set CR as pending
return &action{
newCRState: states.Pending,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending AND
// ALL subresources are running, then set the current custom resource state
// to running.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Pending {
// All resources must be running for us to consider the custom resource as running.
if subs.all(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Running
}) {
// Set CR as running
return &action{
newCRState: states.Running,
}, cr, nil
}
}
// Default case: do nothing.
return &action{}, cr, nil
}
func (r *Reconciler) executeAction(controllerName string, cr crd.CustomResource, a *action) []error {
errors := []error{}
glog.V(4).Infof(`executing reconcile action for "%s" resource "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
if a.newCRState != "" {
glog.Infof(`updating "%s" custom resource for controller "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
cr.SetStatusStateWithMessage(a.newCRState, a.newCRReason)
_, err := r.crdClient.Update(cr)
if err != nil {
glog.Errorf(`error updating custom resource state for "%s" in namespace "%s"`, controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToCreate {
glog.Infof(`creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Create(r.namespace, cr)
if err != nil {
glog.Errorf(`error creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToDelete {
glog.Infof(`deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Delete(r.namespace, controllerName)
if err != nil {
glog.Errorf(`error deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
return errors
}
| all | identifier_name |
reconcile.go | //
// Copyright (c) 2018 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: EPL-2.0
//
package reconcile
import (
"context"
"fmt"
"strings"
"time"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/wait"
"github.com/golang/glog"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/crd"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/resource"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/states"
)
// Reconciler periodically checks the status of subresources and takes
// various self-healing and convergence actions. These include updating
// the top-level custom resource status, re-creating missing subresources,
// deleting orphaned subresources, et cetera.
//
// See the docs/reconciliation.md file for a detailed description of the
// reconciliation policy.
type Reconciler struct {
namespace string
gvk schema.GroupVersionKind
crdHandle *crd.Handle
crdClient crd.Client
resourceClients []resource.Client
}
// New returns a new Reconciler.
func New(namespace string, gvk schema.GroupVersionKind, crdHandle *crd.Handle, crdClient crd.Client, resourceClients []resource.Client) *Reconciler {
return &Reconciler{
namespace: namespace,
gvk: gvk,
crdHandle: crdHandle,
crdClient: crdClient,
resourceClients: resourceClients,
}
}
// Run starts the reconciliation loop and blocks until the context is done, or
// there is an unrecoverable error. Reconciliation actions are done at the
// supplied interval.
func (r *Reconciler) Run(ctx context.Context, interval time.Duration) error {
glog.V(4).Infof("Starting reconciler for %v.%v.%v", r.gvk.Group, r.gvk.Version, r.gvk.Kind)
go wait.Until(r.run, interval, ctx.Done())
<-ctx.Done()
return ctx.Err()
}
type subresource struct {
client resource.Client
object runtime.Object
lifecycle lifecycle
}
type subresources []*subresource
// Contains subresources grouped by their controlling resource.
type subresourceMap map[string]subresources
type action struct {
newCRState states.State
newCRReason string
subresourcesToCreate subresources
subresourcesToDelete subresources
}
func (a action) String() string {
var sCreateNames []string
for _, s := range a.subresourcesToCreate {
sCreateNames = append(sCreateNames, s.client.Plural())
}
var sDeleteNames []string
for _, s := range a.subresourcesToDelete {
sDeleteNames = append(sDeleteNames, s.client.Plural())
}
return fmt.Sprintf(
`{
newCRState: "%s",
newCRReason: "%s",
subresourcesToCreate: "%s",
subresourcesToDelete: "%s"
}`,
a.newCRState,
a.newCRReason,
strings.Join(sCreateNames, ", "),
strings.Join(sDeleteNames, ", "))
}
func (r *Reconciler) run() {
subresourcesByCR := r.groupSubresourcesByCustomResource()
for crName, subs := range subresourcesByCR {
a, cr, err := r.planAction(crName, subs)
if err != nil {
glog.Errorf(`failed to plan action for custom resource: [%s] subresources: [%v] error: [%s]`, crName, subresourcesByCR, err.Error())
continue
}
glog.Infof("planned action: %s", a.String())
errs := r.executeAction(crName, cr, a)
if len(errs) > 0 {
glog.Errorf(`failed to execute action for custom resource: [%s] subresources: %v errors: %v`, crName, subresourcesByCR, errs)
}
}
}
// TODO(CD): groupSubresourcesByCustomResource() doesn't work for a custom
// resource with no sub-resource(s) or the sub-resource have been deleted.
// As resourceClient.List() will not have any sub-resource belonging to the
// custom resource, result will not have the controller name as one of its
// keys.
//
// To fix the problem, we could do a List from the CR client and then iterate
// over those names instead of keys from the intermediate result map we built
// based on the subresources.
func (r *Reconciler) groupSubresourcesByCustomResource() subresourceMap {
result := subresourceMap{}
// Get the list of crs.
crListObj, err := r.crdClient.List(r.namespace, map[string]string{})
if err != nil || crListObj == nil {
glog.Warningf("[reconcile] could not list custom resources. Got error %v %v", err, crListObj)
return result
}
customResourceList := crListObj.(crd.CustomResourceList)
// Get the list of custom resources
crList := customResourceList.GetItems()
// Return if the list is empty
if len(crList) == 0 {
glog.Warningf("[reconcile] custom resources list is empty")
return result
}
for _, resourceClient := range r.resourceClients {
objects, err := resourceClient.List(r.namespace, map[string]string{})
if err != nil {
glog.Warningf(`[reconcile] failed to list "%s" subresources`, resourceClient.Plural())
continue
}
for _, obj := range objects {
controllerRef := metav1.GetControllerOf(obj)
if controllerRef == nil {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as it doesn not have a controller reference", obj.GetName(), r.namespace)
continue
}
// Only manipulate controller-created subresources.
if controllerRef.APIVersion != r.gvk.GroupVersion().String() || controllerRef.Kind != r.gvk.Kind {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as controlling custom resource is from a different group, version and kind", obj.GetName(), r.namespace)
continue
}
subLifecycle := exists
objMeta, err := meta.Accessor(obj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for subresource: %v", err)
continue
}
if objMeta.GetDeletionTimestamp() != nil {
subLifecycle = deleting
}
runtimeObj, ok := obj.(runtime.Object)
if !ok {
glog.Warningf("[reconcile] error asserting metav1.Object as runtime.Object: %v", err)
continue
}
controllerName := controllerRef.Name
objList := result[controllerName]
result[controllerName] = append(objList, &subresource{resourceClient, runtimeObj, subLifecycle})
}
}
// Iterate over the crs to get the list of missing sub resources
// ASSUMPTION: There is at most one subresource of each kind per
// custom resource. We use the plural form as a key
for _, item := range crList {
cr, ok := item.(crd.CustomResource)
if !ok |
subs, ok := result[cr.Name()]
if !ok {
glog.Warningf("[reconcile] no sub-resources found for cr %v", cr.Name())
}
// Find non-existing subresources based on the expected subresource clients.
existingSubs := map[string]struct{}{}
for _, sub := range subs {
existingSubs[sub.client.Plural()] = struct{}{}
}
for _, subClient := range r.resourceClients {
_, exists := existingSubs[subClient.Plural()]
if !exists {
result[cr.Name()] = append(subs, &subresource{subClient, nil, doesNotExist})
}
}
}
return result
}
func (subs subresources) filter(predicate func(s *subresource) bool) subresources {
var result subresources
for _, sub := range subs {
if predicate(sub) {
result = append(result, sub)
}
}
return result
}
func (subs subresources) any(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) > 0
}
func (subs subresources) all(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) == len(subs)
}
func (r *Reconciler) planAction(controllerName string, subs subresources) (*action, crd.CustomResource, error) {
// If the controller name is empty, these are not our subresources;
// do nothing.
if controllerName == "" {
return &action{}, nil, nil
}
// Compute the current lifecycle phase of the custom resource.
customResourceLifecycle := exists
crObj, err := r.crdClient.Get(r.namespace, controllerName)
if err != nil && apierrors.IsNotFound(err) {
customResourceLifecycle = doesNotExist
}
crMeta, err := meta.Accessor(crObj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for controlling custom resource: %v", err)
} else if crMeta.GetDeletionTimestamp() != nil {
customResourceLifecycle = deleting
}
// If the custom resource is deleting or does not exist, clean up all
// subresources.
if customResourceLifecycle.isOneOf(doesNotExist, deleting) {
return &action{subresourcesToDelete: subs}, nil, nil
}
cr, ok := crObj.(crd.CustomResource)
if !ok {
return &action{}, nil, fmt.Errorf("object retrieved from CRD client not an instance of crd.CustomResource: [%v]", crObj)
}
customResourceSpecState := cr.GetSpecState()
customResourceStatusState := cr.GetStatusState()
// If the desired custom resource state is running or completed AND
// the custom resource is in a terminal state, then delete all subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Completed, states.Failed) {
return &action{subresourcesToDelete: subs}, nil, nil
}
// If the desired custom resource state is running or completed AND
// the current custom resource status is non-terminal, ANY non-ephemeral
// subresource that is failed, does not exist or has been deleted causes
// the custom resource current state to move to failed.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return !s.client.IsEphemeral() &&
s.lifecycle.isOneOf(doesNotExist, deleting) ||
s.client.GetStatusState(s.object) == states.Failed
}) {
// Set CR to failed
return &action{
newCRState: states.Failed,
}, cr, nil
}
}
// If the desired custom resource state is completed AND
// the current custom resource status is pending or running, then if ANY
// subresource is completed, set the current custom resource state to
// completed.
if customResourceSpecState == states.Completed && customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Completed
}) {
// Set CR as completed
return &action{
newCRState: states.Completed,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending or running, then
// re-create any nonexisting ephemeral subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
toRecreate := subs.filter(func(s *subresource) bool {
return s.client.IsEphemeral() &&
(s.lifecycle == exists && s.client.GetStatusState(s.object) == states.Failed ||
s.lifecycle == doesNotExist)
})
if len(toRecreate) > 0 {
// Recreate
return &action{subresourcesToCreate: toRecreate}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is running AND
// ANY subresource is pending, then set the current custom resource state
// to pending.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Running {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Pending
}) {
// Set CR as pending
return &action{
newCRState: states.Pending,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending AND
// ALL subresources are running, then set the current custom resource state
// to running.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Pending {
// All resources must be running for us to consider the custom resource as running.
if subs.all(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Running
}) {
// Set CR as running
return &action{
newCRState: states.Running,
}, cr, nil
}
}
// Default case: do nothing.
return &action{}, cr, nil
}
func (r *Reconciler) executeAction(controllerName string, cr crd.CustomResource, a *action) []error {
errors := []error{}
glog.V(4).Infof(`executing reconcile action for "%s" resource "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
if a.newCRState != "" {
glog.Infof(`updating "%s" custom resource for controller "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
cr.SetStatusStateWithMessage(a.newCRState, a.newCRReason)
_, err := r.crdClient.Update(cr)
if err != nil {
glog.Errorf(`error updating custom resource state for "%s" in namespace "%s"`, controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToCreate {
glog.Infof(`creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Create(r.namespace, cr)
if err != nil {
glog.Errorf(`error creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToDelete {
glog.Infof(`deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Delete(r.namespace, controllerName)
if err != nil {
glog.Errorf(`error deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
return errors
}
| {
glog.Warningf("[reconcile] failed to assert item %v to type CustomResource", item)
continue
} | conditional_block |
reconcile.go | //
// Copyright (c) 2018 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: EPL-2.0
//
package reconcile
import (
"context"
"fmt"
"strings"
"time"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/wait"
"github.com/golang/glog"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/crd"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/resource"
"github.com/intel/crd-reconciler-for-kubernetes/pkg/states"
)
// Reconciler periodically checks the status of subresources and takes
// various self-healing and convergence actions. These include updating
// the top-level custom resource status, re-creating missing subresources,
// deleting orphaned subresources, et cetera.
//
// See the docs/reconciliation.md file for a detailed description of the
// reconciliation policy.
type Reconciler struct {
namespace string
gvk schema.GroupVersionKind
crdHandle *crd.Handle
crdClient crd.Client
resourceClients []resource.Client
}
// New returns a new Reconciler.
func New(namespace string, gvk schema.GroupVersionKind, crdHandle *crd.Handle, crdClient crd.Client, resourceClients []resource.Client) *Reconciler {
return &Reconciler{
namespace: namespace,
gvk: gvk,
crdHandle: crdHandle,
crdClient: crdClient,
resourceClients: resourceClients,
}
}
// Run starts the reconciliation loop and blocks until the context is done, or
// there is an unrecoverable error. Reconciliation actions are done at the
// supplied interval.
func (r *Reconciler) Run(ctx context.Context, interval time.Duration) error {
glog.V(4).Infof("Starting reconciler for %v.%v.%v", r.gvk.Group, r.gvk.Version, r.gvk.Kind)
go wait.Until(r.run, interval, ctx.Done())
<-ctx.Done()
return ctx.Err()
}
type subresource struct {
client resource.Client
object runtime.Object
lifecycle lifecycle
}
type subresources []*subresource
// Contains subresources grouped by their controlling resource.
type subresourceMap map[string]subresources
type action struct {
newCRState states.State
newCRReason string
subresourcesToCreate subresources
subresourcesToDelete subresources
}
func (a action) String() string {
var sCreateNames []string
for _, s := range a.subresourcesToCreate {
sCreateNames = append(sCreateNames, s.client.Plural())
}
var sDeleteNames []string
for _, s := range a.subresourcesToDelete {
sDeleteNames = append(sDeleteNames, s.client.Plural())
}
return fmt.Sprintf(
`{
newCRState: "%s",
newCRReason: "%s",
subresourcesToCreate: "%s",
subresourcesToDelete: "%s"
}`,
a.newCRState,
a.newCRReason,
strings.Join(sCreateNames, ", "),
strings.Join(sDeleteNames, ", "))
}
func (r *Reconciler) run() {
subresourcesByCR := r.groupSubresourcesByCustomResource()
for crName, subs := range subresourcesByCR {
a, cr, err := r.planAction(crName, subs)
if err != nil {
glog.Errorf(`failed to plan action for custom resource: [%s] subresources: [%v] error: [%s]`, crName, subresourcesByCR, err.Error())
continue
}
glog.Infof("planned action: %s", a.String())
errs := r.executeAction(crName, cr, a)
if len(errs) > 0 {
glog.Errorf(`failed to execute action for custom resource: [%s] subresources: %v errors: %v`, crName, subresourcesByCR, errs)
}
}
}
// TODO(CD): groupSubresourcesByCustomResource() doesn't work for a custom
// resource with no sub-resource(s) or the sub-resource have been deleted.
// As resourceClient.List() will not have any sub-resource belonging to the | // To fix the problem, we could do a List from the CR client and then iterate
// over those names instead of keys from the intermediate result map we built
// based on the subresources.
func (r *Reconciler) groupSubresourcesByCustomResource() subresourceMap {
result := subresourceMap{}
// Get the list of crs.
crListObj, err := r.crdClient.List(r.namespace, map[string]string{})
if err != nil || crListObj == nil {
glog.Warningf("[reconcile] could not list custom resources. Got error %v %v", err, crListObj)
return result
}
customResourceList := crListObj.(crd.CustomResourceList)
// Get the list of custom resources
crList := customResourceList.GetItems()
// Return if the list is empty
if len(crList) == 0 {
glog.Warningf("[reconcile] custom resources list is empty")
return result
}
for _, resourceClient := range r.resourceClients {
objects, err := resourceClient.List(r.namespace, map[string]string{})
if err != nil {
glog.Warningf(`[reconcile] failed to list "%s" subresources`, resourceClient.Plural())
continue
}
for _, obj := range objects {
controllerRef := metav1.GetControllerOf(obj)
if controllerRef == nil {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as it doesn not have a controller reference", obj.GetName(), r.namespace)
continue
}
// Only manipulate controller-created subresources.
if controllerRef.APIVersion != r.gvk.GroupVersion().String() || controllerRef.Kind != r.gvk.Kind {
glog.V(4).Infof("[reconcile] ignoring sub-resource %v, %v as controlling custom resource is from a different group, version and kind", obj.GetName(), r.namespace)
continue
}
subLifecycle := exists
objMeta, err := meta.Accessor(obj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for subresource: %v", err)
continue
}
if objMeta.GetDeletionTimestamp() != nil {
subLifecycle = deleting
}
runtimeObj, ok := obj.(runtime.Object)
if !ok {
glog.Warningf("[reconcile] error asserting metav1.Object as runtime.Object: %v", err)
continue
}
controllerName := controllerRef.Name
objList := result[controllerName]
result[controllerName] = append(objList, &subresource{resourceClient, runtimeObj, subLifecycle})
}
}
// Iterate over the crs to get the list of missing sub resources
// ASSUMPTION: There is at most one subresource of each kind per
// custom resource. We use the plural form as a key
for _, item := range crList {
cr, ok := item.(crd.CustomResource)
if !ok {
glog.Warningf("[reconcile] failed to assert item %v to type CustomResource", item)
continue
}
subs, ok := result[cr.Name()]
if !ok {
glog.Warningf("[reconcile] no sub-resources found for cr %v", cr.Name())
}
// Find non-existing subresources based on the expected subresource clients.
existingSubs := map[string]struct{}{}
for _, sub := range subs {
existingSubs[sub.client.Plural()] = struct{}{}
}
for _, subClient := range r.resourceClients {
_, exists := existingSubs[subClient.Plural()]
if !exists {
result[cr.Name()] = append(subs, &subresource{subClient, nil, doesNotExist})
}
}
}
return result
}
func (subs subresources) filter(predicate func(s *subresource) bool) subresources {
var result subresources
for _, sub := range subs {
if predicate(sub) {
result = append(result, sub)
}
}
return result
}
func (subs subresources) any(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) > 0
}
func (subs subresources) all(predicate func(s *subresource) bool) bool {
return len(subs.filter(predicate)) == len(subs)
}
func (r *Reconciler) planAction(controllerName string, subs subresources) (*action, crd.CustomResource, error) {
// If the controller name is empty, these are not our subresources;
// do nothing.
if controllerName == "" {
return &action{}, nil, nil
}
// Compute the current lifecycle phase of the custom resource.
customResourceLifecycle := exists
crObj, err := r.crdClient.Get(r.namespace, controllerName)
if err != nil && apierrors.IsNotFound(err) {
customResourceLifecycle = doesNotExist
}
crMeta, err := meta.Accessor(crObj)
if err != nil {
glog.Warningf("[reconcile] error getting meta accessor for controlling custom resource: %v", err)
} else if crMeta.GetDeletionTimestamp() != nil {
customResourceLifecycle = deleting
}
// If the custom resource is deleting or does not exist, clean up all
// subresources.
if customResourceLifecycle.isOneOf(doesNotExist, deleting) {
return &action{subresourcesToDelete: subs}, nil, nil
}
cr, ok := crObj.(crd.CustomResource)
if !ok {
return &action{}, nil, fmt.Errorf("object retrieved from CRD client not an instance of crd.CustomResource: [%v]", crObj)
}
customResourceSpecState := cr.GetSpecState()
customResourceStatusState := cr.GetStatusState()
// If the desired custom resource state is running or completed AND
// the custom resource is in a terminal state, then delete all subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Completed, states.Failed) {
return &action{subresourcesToDelete: subs}, nil, nil
}
// If the desired custom resource state is running or completed AND
// the current custom resource status is non-terminal, ANY non-ephemeral
// subresource that is failed, does not exist or has been deleted causes
// the custom resource current state to move to failed.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return !s.client.IsEphemeral() &&
s.lifecycle.isOneOf(doesNotExist, deleting) ||
s.client.GetStatusState(s.object) == states.Failed
}) {
// Set CR to failed
return &action{
newCRState: states.Failed,
}, cr, nil
}
}
// If the desired custom resource state is completed AND
// the current custom resource status is pending or running, then if ANY
// subresource is completed, set the current custom resource state to
// completed.
if customResourceSpecState == states.Completed && customResourceStatusState.IsOneOf(states.Pending, states.Running) {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Completed
}) {
// Set CR as completed
return &action{
newCRState: states.Completed,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending or running, then
// re-create any nonexisting ephemeral subresources.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState.IsOneOf(states.Pending, states.Running) {
toRecreate := subs.filter(func(s *subresource) bool {
return s.client.IsEphemeral() &&
(s.lifecycle == exists && s.client.GetStatusState(s.object) == states.Failed ||
s.lifecycle == doesNotExist)
})
if len(toRecreate) > 0 {
// Recreate
return &action{subresourcesToCreate: toRecreate}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is running AND
// ANY subresource is pending, then set the current custom resource state
// to pending.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Running {
if subs.any(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Pending
}) {
// Set CR as pending
return &action{
newCRState: states.Pending,
}, cr, nil
}
}
// If the desired custom resource state is running or completed AND
// the current custom resource state is pending AND
// ALL subresources are running, then set the current custom resource state
// to running.
if customResourceSpecState.IsOneOf(states.Running, states.Completed) &&
customResourceStatusState == states.Pending {
// All resources must be running for us to consider the custom resource as running.
if subs.all(func(s *subresource) bool {
return s.client.GetStatusState(s.object) == states.Running
}) {
// Set CR as running
return &action{
newCRState: states.Running,
}, cr, nil
}
}
// Default case: do nothing.
return &action{}, cr, nil
}
func (r *Reconciler) executeAction(controllerName string, cr crd.CustomResource, a *action) []error {
errors := []error{}
glog.V(4).Infof(`executing reconcile action for "%s" resource "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
if a.newCRState != "" {
glog.Infof(`updating "%s" custom resource for controller "%s" in namespace "%s"`, r.crdHandle.Plural, controllerName, r.namespace)
cr.SetStatusStateWithMessage(a.newCRState, a.newCRReason)
_, err := r.crdClient.Update(cr)
if err != nil {
glog.Errorf(`error updating custom resource state for "%s" in namespace "%s"`, controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToCreate {
glog.Infof(`creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Create(r.namespace, cr)
if err != nil {
glog.Errorf(`error creating "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
for _, s := range a.subresourcesToDelete {
glog.Infof(`deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
err := s.client.Delete(r.namespace, controllerName)
if err != nil {
glog.Errorf(`error deleting "%s" subresource for controller "%s" in namespace "%s"`, s.client.Plural(), controllerName, r.namespace)
errors = append(errors, err)
}
}
return errors
} | // custom resource, result will not have the controller name as one of its
// keys.
// | random_line_split |
GPR_stan2.py | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 29 09:35:12 2016
@author: nigul
"""
import sys
sys.path.append('../')
import matplotlib as mpl
mpl.use('Agg')
mpl.rcParams['figure.figsize'] = (20, 30)
import pickle
import numpy as np
import pylab as plt
from filelock import FileLock
import mw_utils
import GPR_QP
import GPR_QP2
import pandas as pd
import os
import os.path | #import pandas as pd
#num_groups = 1
#group_no = 0
#if len(sys.argv) > 1:
# num_groups = int(sys.argv[1])
#if len(sys.argv) > 2:
# group_no = int(sys.argv[2])
star = sys.argv[1]
peak_no = int(sys.argv[2])
peak_no_str = ""
if peak_no > 0:
peak_no_str = str(peak_no) + "/"
num_iters = 50
num_chains = 4
down_sample_factor = 1
if len(sys.argv) > 3:
num_iters = int(sys.argv[3])
if len(sys.argv) > 4:
num_chains = int(sys.argv[4])
if len(sys.argv) > 5:
down_sample_factor = int(sys.argv[5])
dynamic_downsample = False
n_jobs = num_chains
n_tries = 1
downsample_iters = 1
print star, peak_no, num_iters, num_chains, down_sample_factor
data_dir = "../GP_input"
if data_dir == "../cleaned":
skiprows = 1
else:
skiprows = 0
files = []
data_found = False
for root, dirs, dir_files in os.walk(data_dir):
for file in dir_files:
if file[-4:] == ".dat":
file_star = file[:-4]
file_star = file_star.upper()
if (file_star[-3:] == '.CL'):
file_star = file_star[0:-3]
if (file_star[0:2] == 'HD'):
file_star = file_star[2:]
while star[0] == '0': # remove leading zeros
file_star = file_star[1:]
if star == file_star:
dat = np.loadtxt(data_dir+"/"+file, usecols=(0,1), skiprows=skiprows)
data_found = True
break
if not data_found:
print "Cannot find data for " + star
sys.exit(1)
offset = 1979.3452
model = pickle.load(open('model.pkl', 'rb'))
model_null = pickle.load(open('model_null.pkl', 'rb'))
t_orig = dat[:,0]
y_orig = dat[:,1]
n_orig = len(t_orig)
if dynamic_downsample:
down_sample_factor = max(1, n_orig / 500)
downsample_iters = down_sample_factor
for downsample_iter in np.arange(0, downsample_iters):
if downsample_iters > 1:
downsample_iter_str = '_' + str(downsample_iter)
else:
downsample_iter_str = ''
if down_sample_factor >= 2:
#indices = np.random.choice(len(t), len(t)/down_sample_factor, replace=False, p=None)
#indices = np.sort(indices)
#t = t[indices]
#y = y[indices]
t = t_orig[downsample_iter::down_sample_factor]
y = y_orig[downsample_iter::down_sample_factor]
else:
t = t_orig
y = y_orig
#(t, y, noise_var_prop) = mw_utils.daily_averages(t, y, mw_utils.get_seasonal_noise_var(t/365.25, y))
#noise_var_prop = mw_utils.get_seasonal_noise_var(t/365.25, y)
#np.savetxt("GPR_stan/" + star + ".dat", np.column_stack((t_daily, y_daily)), fmt='%f')
t /= 365.25
t += offset
seasonal_noise = mw_utils.get_seasonal_noise_var(t, y, per_point=False)
noise_var_prop = mw_utils.get_seasonal_noise_var(t, y)
seasonal_means_var =np.var(mw_utils.get_seasonal_means(t, y)[:,1])
n = len(t)
print "Downsample factor", float(n_orig)/n
duration = max(t) - min(t)
orig_mean = np.mean(y)
#y -= orig_mean
orig_std = np.std(y)
n = len(t)
t -= np.mean(t)
t, y, noise_var_prop = mw_utils.downsample(t, y, noise_var_prop, 15.0/365.25)
n = len(t)
var = np.var(y)
###########################################################################
# Quasiperiodic model
prior_freq_mean = 0.0
prior_freq_std = 0.167
print "prior_freq_mean, prior_freq_std: ", prior_freq_mean, prior_freq_std
initial_param_values = []
for i in np.arange(0, num_chains):
#initial_freq = np.random.uniform(0.25*i/num_chains,0.25*(i+1)/num_chains)
initial_freq = 0.5*float(i+0.5)/num_chains#np.random.uniform(0, 0.5)
#initial_freq = max(0, np.random.normal(prior_freq_mean, prior_freq_std))
initial_m = orig_mean
initial_trend_var = var / duration
#initial_inv_length_scale = 0.0001#abs(np.random.normal(0, prior_freq_mean))
#initial_param_values.append(dict(freq=initial_freq, trend_var=initial_trend_var, m=initial_m, noise_var=initial_noise_var, inv_lengh_scale=initial_inv_length_scale))
initial_param_values.append(dict(freq=initial_freq, trend_var=initial_trend_var, m=initial_m))
fit = model.sampling(data=dict(x=t,N=n,y=y,noise_var=noise_var_prop, var_y=var,
var_seasonal_means=seasonal_means_var, prior_freq_mean=prior_freq_mean, prior_freq_std=prior_freq_std),
init=initial_param_values,
iter=num_iters, chains=num_chains, n_jobs=n_jobs)
with open("results/"+peak_no_str+star + downsample_iter_str + "_results.txt", "w") as output:
output.write(str(fit))
fit.plot()
plt.savefig("results/"+peak_no_str+star + downsample_iter_str + "_results.png")
plt.close()
results = fit.extract()
loglik_samples = results['lp__']
loglik = np.mean(loglik_samples)
length_scale_samples = results['length_scale'];
(length_scale, length_scale_se) = mw_utils.mean_with_se(length_scale_samples)
length_scale2_samples = results['length_scale2'];
(length_scale2, length_scale2_se) = mw_utils.mean_with_se(length_scale2_samples)
sig_var_samples = results['sig_var']
sig_var = np.mean(sig_var_samples)
sig_var2_samples = results['sig_var2']
sig_var2 = np.mean(sig_var2_samples)
m_samples = results['m'];
m = np.mean(m_samples)
trend_var_samples = results['trend_var'];
(trend_var, trend_var_se) = mw_utils.mean_with_se(trend_var_samples)
###########################################################################
# Find optimum freq 1
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
fig.set_size_inches(18, 12)
freq_samples = results['freq'];
freq_freqs = gaussian_kde(freq_samples)
freqs = np.linspace(min(freq_samples), max(freq_samples), 1000)
(freq, freq_se) = mw_utils.mode_with_se(freq_samples)
local_maxima_inds = mw_utils.find_local_maxima(freq_freqs(freqs))
freq_kmeans = KMeans(n_clusters=len(local_maxima_inds)).fit(freq_samples.reshape((-1, 1)))
opt_freq_label = freq_kmeans.predict(np.array([freq]).reshape((-1, 1)))
freq_samples_ = np.sort(freq_samples[np.where(freq_kmeans.labels_ == opt_freq_label)])
inds = np.searchsorted(freqs, freq_samples_)
freqs_ = freqs[inds]
ax1.plot(freqs, freq_freqs(freqs), "b-", freqs_, freq_freqs(freqs_), 'k--')
###########################################################################
freq2_samples = results['freq2'];
freq2_freqs = gaussian_kde(freq2_samples)
freqs2 = np.linspace(min(freq2_samples), max(freq2_samples), 1000)
(freq2, freq2_se) = mw_utils.mode_with_se(freq2_samples)
local_maxima_inds = mw_utils.find_local_maxima(freq2_freqs(freqs2))
freq2_kmeans = KMeans(n_clusters=len(local_maxima_inds)).fit(freq2_samples.reshape((-1, 1)))
opt_freq2_label = freq_kmeans.predict(np.array([freq2]).reshape((-1, 1)))
freq2_samples_ = np.sort(freq_samples[np.where(freq_kmeans.labels_ == opt_freq_label)])
inds = np.searchsorted(freqs2, freq2_samples_)
freqs2_ = freqs2[inds]
ax1.plot(freqs2, freq2_freqs(freqs2), "g-", freqs2_, freq2_freqs(freqs2_), 'k--')
###########################################################################
print "var=", var
print "sig_var=", sig_var
print "sig_var2=", sig_var2
print "length_scale", length_scale
print "length_scale2", length_scale2
print "freq, freq_se", freq, freq_se
print "freq2, freq2_se", freq2, freq2_se
print "trend_var", trend_var
print "m", m
gpr_gp = GPR_QP2.GPR_QP2(sig_vars=[sig_var, sig_var2], length_scales=[length_scale, length_scale2], freqs=[freq, freq2], noise_var=noise_var_prop, trend_var=trend_var, c=0.0)
t_test = np.linspace(min(t), max(t), 500)
gpr_gp.init(t, y-m)
(f_mean, pred_var, loglik) = gpr_gp.fit(t_test)
(f_t, _, _) = gpr_gp.fit(t)
f_mean += m
fvu = np.sum((f_t + m - y)**2) / n / var
print "FVU", fvu
print "loglik", loglik #(loglik + 0.5 * n * np.log(2.0 * np.pi))
###########################################################################
#Squared-exponential GP for model comparison
initial_param_values = []
for i in np.arange(0, num_chains):
initial_m = orig_mean
initial_trend_var = var / duration
initial_param_values.append(dict(trend_var=initial_trend_var, m=initial_m))
fit_null = model_null.sampling(data=dict(x=t,N=n,y=y,noise_var=noise_var_prop, var_y=var,
var_seasonal_means=seasonal_means_var, prior_freq_mean=prior_freq_mean, prior_freq_std=prior_freq_std),
init=initial_param_values,
iter=num_iters, chains=num_chains, n_jobs=n_jobs)
with open("results/"+peak_no_str+star + downsample_iter_str + "_results_null.txt", "w") as output:
output.write(str(fit))
fit_null.plot()
plt.savefig("results/"+peak_no_str+star + downsample_iter_str + "_results_null.png")
plt.close()
results_null = fit_null.extract()
loglik_samples_null = results_null['lp__']
loglik_null = np.mean(loglik_samples_null)
length_scale_samples_null = results_null['length_scale'];
(length_scale_null, length_scale_se_null) = mw_utils.mean_with_se(length_scale_samples_null)
sig_var_samples_null = results_null['sig_var']
sig_var_null = np.mean(sig_var_samples_null)
trend_var_samples_null = results_null['trend_var'];
(trend_var_null, trend_var_se_null) = mw_utils.mean_with_se(trend_var_samples_null)
m_samples_null = results_null['m'];
m_null = np.mean(m_samples_null)
print "length_scale_null", length_scale_null
print "trend_var_null", trend_var_null
print "m_null", m_null
gpr_gp_null = GPR_QP.GPR_QP(sig_var=sig_var_null, length_scale=length_scale_null, freq=0.0, noise_var=noise_var_prop, rot_freq=0.0, rot_amplitude=0.0, trend_var=trend_var_null, c=0.0)
t_test_null = np.linspace(min(t), max(t), 500)
gpr_gp_null.init(t, y-m_null)
(f_mean_null, pred_var_null, loglik_null) = gpr_gp_null.fit(t_test_null)
(f_t_null, _, _) = gpr_gp_null.fit(t)
f_mean_null += m_null
fvu_null = np.sum((f_t_null + m_null - y)**2) / n / var
print "FVU_null", fvu_null
print "loglik_null", loglik #(loglik + 0.5 * n * np.log(2.0 * np.pi))
###########################################################################
ax2.plot(t, y, 'b+')
#ax2.plot(t, y_wo_rot, 'r+')
ax2.plot(t_test, f_mean, 'k-')
ax2.fill_between(t_test, f_mean + 2.0 * np.sqrt(pred_var), f_mean - 2.0 * np.sqrt(pred_var), alpha=0.1, facecolor='lightgray', interpolate=True)
ax2.plot(t_test_null, f_mean_null, 'g-')
###########################################################################
# LOO-CV
seasons = mw_utils.get_seasons(zip(t, y), 1.0, True)
l_loo = 0.0
l_loo_null = 0.0
dat = np.column_stack((t, y))
season_index = 0
for season in seasons:
season_start = min(season[:,0])
season_end = max(season[:,0])
print "cv for season: ", season_index, season_start, season_end
dat_test = seasons[season_index]
if season_index == len(seasons) - 1:
indices = np.where(dat[:,0] < season_start)[0]
dat_train = dat[indices,:]
noise_train = noise_var_prop[indices]
#dat_test = dat[np.where(dat[:,0] >= season_start)[0],:]
else:
dat_season = dat[np.where(dat[:,0] < season_end)[0],:]
indices_after = np.where(dat[:,0] >= season_end)[0]
dat_after = dat[indices_after,:]
indices_before = np.where(dat_season[:,0] < season_start)[0]
dat_before = dat_season[indices_before,:]
#dat_test = seasonal_means[season_index]# dat_season[np.where(dat_season[:,0] >= season_start)[0],:]
dat_train = np.concatenate((dat_before, dat_after), axis=0)
noise_before = noise_var_prop[indices_before]
noise_after = noise_var_prop[indices_after]
noise_train = np.concatenate((noise_before, noise_after), axis=0)
#test_mat = np.array([[1.16490151e-08, 1.16493677e-08], [1.16493677e-08, 1.16497061e-08]])
#test_mat = np.array([[1.16490151e-08, 1.16e-08], [1.16e-08, 1.16497061e-08]])
#test_mat *= 1e8
#print test_mat
#L_test_covar = la.cholesky(test_mat)
#print indices_before, indices_after, noise_train
gpr_gp_cv = GPR_QP.GPR_QP(sig_var=sig_var, length_scale=length_scale, freq=freq, noise_var=noise_train, rot_freq=0, rot_amplitude=0, trend_var=trend_var, c=0.0)
gpr_gp_cv_null = GPR_QP.GPR_QP(sig_var=0.0, length_scale=length_scale, freq=0.0, noise_var=noise_train, rot_freq=0.0, rot_amplitude=0.0, trend_var=trend_var_null, c=0.0)
gpr_gp_cv.init(dat_train[:,0], dat_train[:,1]-m)
print seasonal_noise
print dat_test
print m
print m_null
(_, _, loglik_test) = gpr_gp_cv.cv(dat_test[:,0], dat_test[:,1]-m, np.repeat(seasonal_noise[season_index], np.shape(dat_test)[0]))
l_loo += loglik_test
gpr_gp_cv_null.init(dat_train[:,0], dat_train[:,1]-m_null)
(_, _, loglik_test_null) = gpr_gp_null.cv(dat_test[:,0], dat_test[:,1]-m_null, np.repeat(seasonal_noise[season_index], np.shape(dat_test)[0]))
l_loo_null += loglik_test_null
season_index += 1
print "l_loo, l_loo_null", l_loo, l_loo_null
###########################################################################
fig.savefig("results/"+peak_no_str+star + downsample_iter_str + '_fit.png')
plt.close()
###########################################################################
period = 1.0/freq
period_samples = np.ones(len(freq1_samples)) / freq1_samples;
period_se = freq_se/freq/freq
with FileLock("GPRLock"):
with open("results/"+peak_no_str+"results.txt", "a") as output:
#output.write(star + ' ' + str(period/duration < 2.0/3.0 and period > 2) + ' ' + str(period) + ' ' + str(np.std(period_samples)) + " " + str(length_scale) + " " + str(np.std(length_scale_samples)) + " " + str(rot_amplitude) + " " + str(rot_amplitude_std) + " " + str(bic - bic_null) + "\n")
output.write(star + " " + str(downsample_iter) + " " + str(period/duration < 2.0/3.0 and period > 2.0) + " " + str(period) + " " + str(period_se) + ' ' + str(np.std(period_samples)) + " " + str(length_scale) + " " + str(length_scale_se) + " " + str(np.std(length_scale_samples)) + " " + str(trend_var) + " " + str(trend_var_se)+ " " + str(np.std(trend_var_samples)) + " " + str(m) + " " + str(sig_var) + " " + str(fvu) + " " + str(l_loo - l_loo_null) + " " + "\n") | from scipy.stats import gaussian_kde
from sklearn.cluster import KMeans | random_line_split |
GPR_stan2.py | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 29 09:35:12 2016
@author: nigul
"""
import sys
sys.path.append('../')
import matplotlib as mpl
mpl.use('Agg')
mpl.rcParams['figure.figsize'] = (20, 30)
import pickle
import numpy as np
import pylab as plt
from filelock import FileLock
import mw_utils
import GPR_QP
import GPR_QP2
import pandas as pd
import os
import os.path
from scipy.stats import gaussian_kde
from sklearn.cluster import KMeans
#import pandas as pd
#num_groups = 1
#group_no = 0
#if len(sys.argv) > 1:
# num_groups = int(sys.argv[1])
#if len(sys.argv) > 2:
# group_no = int(sys.argv[2])
star = sys.argv[1]
peak_no = int(sys.argv[2])
peak_no_str = ""
if peak_no > 0:
peak_no_str = str(peak_no) + "/"
num_iters = 50
num_chains = 4
down_sample_factor = 1
if len(sys.argv) > 3:
num_iters = int(sys.argv[3])
if len(sys.argv) > 4:
num_chains = int(sys.argv[4])
if len(sys.argv) > 5:
down_sample_factor = int(sys.argv[5])
dynamic_downsample = False
n_jobs = num_chains
n_tries = 1
downsample_iters = 1
print star, peak_no, num_iters, num_chains, down_sample_factor
data_dir = "../GP_input"
if data_dir == "../cleaned":
skiprows = 1
else:
skiprows = 0
files = []
data_found = False
for root, dirs, dir_files in os.walk(data_dir):
for file in dir_files:
if file[-4:] == ".dat":
file_star = file[:-4]
file_star = file_star.upper()
if (file_star[-3:] == '.CL'):
file_star = file_star[0:-3]
if (file_star[0:2] == 'HD'):
file_star = file_star[2:]
while star[0] == '0': # remove leading zeros
file_star = file_star[1:]
if star == file_star:
dat = np.loadtxt(data_dir+"/"+file, usecols=(0,1), skiprows=skiprows)
data_found = True
break
if not data_found:
print "Cannot find data for " + star
sys.exit(1)
offset = 1979.3452
model = pickle.load(open('model.pkl', 'rb'))
model_null = pickle.load(open('model_null.pkl', 'rb'))
t_orig = dat[:,0]
y_orig = dat[:,1]
n_orig = len(t_orig)
if dynamic_downsample:
|
for downsample_iter in np.arange(0, downsample_iters):
if downsample_iters > 1:
downsample_iter_str = '_' + str(downsample_iter)
else:
downsample_iter_str = ''
if down_sample_factor >= 2:
#indices = np.random.choice(len(t), len(t)/down_sample_factor, replace=False, p=None)
#indices = np.sort(indices)
#t = t[indices]
#y = y[indices]
t = t_orig[downsample_iter::down_sample_factor]
y = y_orig[downsample_iter::down_sample_factor]
else:
t = t_orig
y = y_orig
#(t, y, noise_var_prop) = mw_utils.daily_averages(t, y, mw_utils.get_seasonal_noise_var(t/365.25, y))
#noise_var_prop = mw_utils.get_seasonal_noise_var(t/365.25, y)
#np.savetxt("GPR_stan/" + star + ".dat", np.column_stack((t_daily, y_daily)), fmt='%f')
t /= 365.25
t += offset
seasonal_noise = mw_utils.get_seasonal_noise_var(t, y, per_point=False)
noise_var_prop = mw_utils.get_seasonal_noise_var(t, y)
seasonal_means_var =np.var(mw_utils.get_seasonal_means(t, y)[:,1])
n = len(t)
print "Downsample factor", float(n_orig)/n
duration = max(t) - min(t)
orig_mean = np.mean(y)
#y -= orig_mean
orig_std = np.std(y)
n = len(t)
t -= np.mean(t)
t, y, noise_var_prop = mw_utils.downsample(t, y, noise_var_prop, 15.0/365.25)
n = len(t)
var = np.var(y)
###########################################################################
# Quasiperiodic model
prior_freq_mean = 0.0
prior_freq_std = 0.167
print "prior_freq_mean, prior_freq_std: ", prior_freq_mean, prior_freq_std
initial_param_values = []
for i in np.arange(0, num_chains):
#initial_freq = np.random.uniform(0.25*i/num_chains,0.25*(i+1)/num_chains)
initial_freq = 0.5*float(i+0.5)/num_chains#np.random.uniform(0, 0.5)
#initial_freq = max(0, np.random.normal(prior_freq_mean, prior_freq_std))
initial_m = orig_mean
initial_trend_var = var / duration
#initial_inv_length_scale = 0.0001#abs(np.random.normal(0, prior_freq_mean))
#initial_param_values.append(dict(freq=initial_freq, trend_var=initial_trend_var, m=initial_m, noise_var=initial_noise_var, inv_lengh_scale=initial_inv_length_scale))
initial_param_values.append(dict(freq=initial_freq, trend_var=initial_trend_var, m=initial_m))
fit = model.sampling(data=dict(x=t,N=n,y=y,noise_var=noise_var_prop, var_y=var,
var_seasonal_means=seasonal_means_var, prior_freq_mean=prior_freq_mean, prior_freq_std=prior_freq_std),
init=initial_param_values,
iter=num_iters, chains=num_chains, n_jobs=n_jobs)
with open("results/"+peak_no_str+star + downsample_iter_str + "_results.txt", "w") as output:
output.write(str(fit))
fit.plot()
plt.savefig("results/"+peak_no_str+star + downsample_iter_str + "_results.png")
plt.close()
results = fit.extract()
loglik_samples = results['lp__']
loglik = np.mean(loglik_samples)
length_scale_samples = results['length_scale'];
(length_scale, length_scale_se) = mw_utils.mean_with_se(length_scale_samples)
length_scale2_samples = results['length_scale2'];
(length_scale2, length_scale2_se) = mw_utils.mean_with_se(length_scale2_samples)
sig_var_samples = results['sig_var']
sig_var = np.mean(sig_var_samples)
sig_var2_samples = results['sig_var2']
sig_var2 = np.mean(sig_var2_samples)
m_samples = results['m'];
m = np.mean(m_samples)
trend_var_samples = results['trend_var'];
(trend_var, trend_var_se) = mw_utils.mean_with_se(trend_var_samples)
###########################################################################
# Find optimum freq 1
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
fig.set_size_inches(18, 12)
freq_samples = results['freq'];
freq_freqs = gaussian_kde(freq_samples)
freqs = np.linspace(min(freq_samples), max(freq_samples), 1000)
(freq, freq_se) = mw_utils.mode_with_se(freq_samples)
local_maxima_inds = mw_utils.find_local_maxima(freq_freqs(freqs))
freq_kmeans = KMeans(n_clusters=len(local_maxima_inds)).fit(freq_samples.reshape((-1, 1)))
opt_freq_label = freq_kmeans.predict(np.array([freq]).reshape((-1, 1)))
freq_samples_ = np.sort(freq_samples[np.where(freq_kmeans.labels_ == opt_freq_label)])
inds = np.searchsorted(freqs, freq_samples_)
freqs_ = freqs[inds]
ax1.plot(freqs, freq_freqs(freqs), "b-", freqs_, freq_freqs(freqs_), 'k--')
###########################################################################
freq2_samples = results['freq2'];
freq2_freqs = gaussian_kde(freq2_samples)
freqs2 = np.linspace(min(freq2_samples), max(freq2_samples), 1000)
(freq2, freq2_se) = mw_utils.mode_with_se(freq2_samples)
local_maxima_inds = mw_utils.find_local_maxima(freq2_freqs(freqs2))
freq2_kmeans = KMeans(n_clusters=len(local_maxima_inds)).fit(freq2_samples.reshape((-1, 1)))
opt_freq2_label = freq_kmeans.predict(np.array([freq2]).reshape((-1, 1)))
freq2_samples_ = np.sort(freq_samples[np.where(freq_kmeans.labels_ == opt_freq_label)])
inds = np.searchsorted(freqs2, freq2_samples_)
freqs2_ = freqs2[inds]
ax1.plot(freqs2, freq2_freqs(freqs2), "g-", freqs2_, freq2_freqs(freqs2_), 'k--')
###########################################################################
print "var=", var
print "sig_var=", sig_var
print "sig_var2=", sig_var2
print "length_scale", length_scale
print "length_scale2", length_scale2
print "freq, freq_se", freq, freq_se
print "freq2, freq2_se", freq2, freq2_se
print "trend_var", trend_var
print "m", m
gpr_gp = GPR_QP2.GPR_QP2(sig_vars=[sig_var, sig_var2], length_scales=[length_scale, length_scale2], freqs=[freq, freq2], noise_var=noise_var_prop, trend_var=trend_var, c=0.0)
t_test = np.linspace(min(t), max(t), 500)
gpr_gp.init(t, y-m)
(f_mean, pred_var, loglik) = gpr_gp.fit(t_test)
(f_t, _, _) = gpr_gp.fit(t)
f_mean += m
fvu = np.sum((f_t + m - y)**2) / n / var
print "FVU", fvu
print "loglik", loglik #(loglik + 0.5 * n * np.log(2.0 * np.pi))
###########################################################################
#Squared-exponential GP for model comparison
initial_param_values = []
for i in np.arange(0, num_chains):
initial_m = orig_mean
initial_trend_var = var / duration
initial_param_values.append(dict(trend_var=initial_trend_var, m=initial_m))
fit_null = model_null.sampling(data=dict(x=t,N=n,y=y,noise_var=noise_var_prop, var_y=var,
var_seasonal_means=seasonal_means_var, prior_freq_mean=prior_freq_mean, prior_freq_std=prior_freq_std),
init=initial_param_values,
iter=num_iters, chains=num_chains, n_jobs=n_jobs)
with open("results/"+peak_no_str+star + downsample_iter_str + "_results_null.txt", "w") as output:
output.write(str(fit))
fit_null.plot()
plt.savefig("results/"+peak_no_str+star + downsample_iter_str + "_results_null.png")
plt.close()
results_null = fit_null.extract()
loglik_samples_null = results_null['lp__']
loglik_null = np.mean(loglik_samples_null)
length_scale_samples_null = results_null['length_scale'];
(length_scale_null, length_scale_se_null) = mw_utils.mean_with_se(length_scale_samples_null)
sig_var_samples_null = results_null['sig_var']
sig_var_null = np.mean(sig_var_samples_null)
trend_var_samples_null = results_null['trend_var'];
(trend_var_null, trend_var_se_null) = mw_utils.mean_with_se(trend_var_samples_null)
m_samples_null = results_null['m'];
m_null = np.mean(m_samples_null)
print "length_scale_null", length_scale_null
print "trend_var_null", trend_var_null
print "m_null", m_null
gpr_gp_null = GPR_QP.GPR_QP(sig_var=sig_var_null, length_scale=length_scale_null, freq=0.0, noise_var=noise_var_prop, rot_freq=0.0, rot_amplitude=0.0, trend_var=trend_var_null, c=0.0)
t_test_null = np.linspace(min(t), max(t), 500)
gpr_gp_null.init(t, y-m_null)
(f_mean_null, pred_var_null, loglik_null) = gpr_gp_null.fit(t_test_null)
(f_t_null, _, _) = gpr_gp_null.fit(t)
f_mean_null += m_null
fvu_null = np.sum((f_t_null + m_null - y)**2) / n / var
print "FVU_null", fvu_null
print "loglik_null", loglik #(loglik + 0.5 * n * np.log(2.0 * np.pi))
###########################################################################
ax2.plot(t, y, 'b+')
#ax2.plot(t, y_wo_rot, 'r+')
ax2.plot(t_test, f_mean, 'k-')
ax2.fill_between(t_test, f_mean + 2.0 * np.sqrt(pred_var), f_mean - 2.0 * np.sqrt(pred_var), alpha=0.1, facecolor='lightgray', interpolate=True)
ax2.plot(t_test_null, f_mean_null, 'g-')
###########################################################################
# LOO-CV
seasons = mw_utils.get_seasons(zip(t, y), 1.0, True)
l_loo = 0.0
l_loo_null = 0.0
dat = np.column_stack((t, y))
season_index = 0
for season in seasons:
season_start = min(season[:,0])
season_end = max(season[:,0])
print "cv for season: ", season_index, season_start, season_end
dat_test = seasons[season_index]
if season_index == len(seasons) - 1:
indices = np.where(dat[:,0] < season_start)[0]
dat_train = dat[indices,:]
noise_train = noise_var_prop[indices]
#dat_test = dat[np.where(dat[:,0] >= season_start)[0],:]
else:
dat_season = dat[np.where(dat[:,0] < season_end)[0],:]
indices_after = np.where(dat[:,0] >= season_end)[0]
dat_after = dat[indices_after,:]
indices_before = np.where(dat_season[:,0] < season_start)[0]
dat_before = dat_season[indices_before,:]
#dat_test = seasonal_means[season_index]# dat_season[np.where(dat_season[:,0] >= season_start)[0],:]
dat_train = np.concatenate((dat_before, dat_after), axis=0)
noise_before = noise_var_prop[indices_before]
noise_after = noise_var_prop[indices_after]
noise_train = np.concatenate((noise_before, noise_after), axis=0)
#test_mat = np.array([[1.16490151e-08, 1.16493677e-08], [1.16493677e-08, 1.16497061e-08]])
#test_mat = np.array([[1.16490151e-08, 1.16e-08], [1.16e-08, 1.16497061e-08]])
#test_mat *= 1e8
#print test_mat
#L_test_covar = la.cholesky(test_mat)
#print indices_before, indices_after, noise_train
gpr_gp_cv = GPR_QP.GPR_QP(sig_var=sig_var, length_scale=length_scale, freq=freq, noise_var=noise_train, rot_freq=0, rot_amplitude=0, trend_var=trend_var, c=0.0)
gpr_gp_cv_null = GPR_QP.GPR_QP(sig_var=0.0, length_scale=length_scale, freq=0.0, noise_var=noise_train, rot_freq=0.0, rot_amplitude=0.0, trend_var=trend_var_null, c=0.0)
gpr_gp_cv.init(dat_train[:,0], dat_train[:,1]-m)
print seasonal_noise
print dat_test
print m
print m_null
(_, _, loglik_test) = gpr_gp_cv.cv(dat_test[:,0], dat_test[:,1]-m, np.repeat(seasonal_noise[season_index], np.shape(dat_test)[0]))
l_loo += loglik_test
gpr_gp_cv_null.init(dat_train[:,0], dat_train[:,1]-m_null)
(_, _, loglik_test_null) = gpr_gp_null.cv(dat_test[:,0], dat_test[:,1]-m_null, np.repeat(seasonal_noise[season_index], np.shape(dat_test)[0]))
l_loo_null += loglik_test_null
season_index += 1
print "l_loo, l_loo_null", l_loo, l_loo_null
###########################################################################
fig.savefig("results/"+peak_no_str+star + downsample_iter_str + '_fit.png')
plt.close()
###########################################################################
period = 1.0/freq
period_samples = np.ones(len(freq1_samples)) / freq1_samples;
period_se = freq_se/freq/freq
with FileLock("GPRLock"):
with open("results/"+peak_no_str+"results.txt", "a") as output:
#output.write(star + ' ' + str(period/duration < 2.0/3.0 and period > 2) + ' ' + str(period) + ' ' + str(np.std(period_samples)) + " " + str(length_scale) + " " + str(np.std(length_scale_samples)) + " " + str(rot_amplitude) + " " + str(rot_amplitude_std) + " " + str(bic - bic_null) + "\n")
output.write(star + " " + str(downsample_iter) + " " + str(period/duration < 2.0/3.0 and period > 2.0) + " " + str(period) + " " + str(period_se) + ' ' + str(np.std(period_samples)) + " " + str(length_scale) + " " + str(length_scale_se) + " " + str(np.std(length_scale_samples)) + " " + str(trend_var) + " " + str(trend_var_se)+ " " + str(np.std(trend_var_samples)) + " " + str(m) + " " + str(sig_var) + " " + str(fvu) + " " + str(l_loo - l_loo_null) + " " + "\n")
| down_sample_factor = max(1, n_orig / 500)
downsample_iters = down_sample_factor | conditional_block |
index.js | import Head from 'next/head';
import Portfolio from '../components/Portfolio';
import Link from 'next/link';
export default function | () {
return (
<div>
<Head>
<title>PublicTrades</title>
<link rel="icon" href="/favicon.ico" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/gsap/3.6.0/gsap.min.js"></script>
</Head>
<main>
<>
{/* This example requires Tailwind CSS v2.0+ */}
<div className="relative bg-white overflow-hidden">
<div className="max-w-7xl mx-auto">
<div className="relative z-10 pb-8 bg-white sm:pb-16 md:pb-20 lg:max-w-2xl lg:w-full lg:pb-28 xl:pb-32">
<svg className="hidden lg:block absolute right-0 inset-y-0 h-full w-48 text-white transform translate-x-1/2" fill="currentColor" viewBox="0 0 100 100" preserveAspectRatio="none" aria-hidden="true">
<polygon points="50,0 100,0 50,100 0,100" />
</svg>
<div className="relative pt-6 px-4 sm:px-6 lg:px-8">
<nav className="relative flex items-center justify-between sm:h-10 lg:justify-start" aria-label="Global">
<div className="flex items-center flex-grow flex-shrink-0 lg:flex-grow-0">
<div className="flex items-center justify-between w-full md:w-auto">
<a href="#">
{/* <span className="sr-only">Workflow</span> */}
<img className="h-8 w-auto sm:h-10" src="images/logo.png"/>
</a>
<div className="-mr-2 flex items-center md:hidden">
<button type="button" className="bg-white rounded-md p-2 inline-flex items-center justify-center text-gray-400 hover:text-gray-500 hover:bg-gray-100 focus:outline-none focus:ring-2 focus:ring-inset focus:ring-indigo-500" id="main-menu" aria-haspopup="true">
<span className="sr-only">Open main menu</span>
<svg className="h-6 w-6" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor" aria-hidden="true">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 6h16M4 12h16M4 18h16" />
</svg>
</button>
</div>
</div>
</div>
<div className="hidden md:block md:ml-10 md:pr-4 md:space-x-8">
{/* <a href="#" className="font-medium text-gray-500 hover:text-gray-900">Services</a>
<a href="#" className="font-medium text-gray-500 hover:text-gray-900">Products</a> */}
<Link href="login">
<a className="font-medium text-gray-500 hover:text-gray-900">Developers Login</a>
</Link>
</div>
</nav>
</div>
{/*
based on menu open state.
Entering: "duration-150 ease-out"
From: "opacity-0 scale-95"
To: "opacity-100 scale-100"
Leaving: "duration-100 ease-in"
From: "opacity-100 scale-100"
To: "opacity-0 scale-95"
*/}
<div className="absolute top-0 inset-x-0 p-2 transition transform origin-top-right md:hidden">
<div className="rounded-lg shadow-md bg-white ring-1 ring-black ring-opacity-5 overflow-hidden">
{/* <div role="menu" aria-orientation="vertical" aria-labelledby="main-menu">
<div className="px-2 pt-2 pb-3 space-y-1" role="none">
<a href="#" className="block px-3 py-2 rounded-md text-base font-medium text-gray-700 hover:text-gray-900 hover:bg-gray-50" role="menuitem">Services</a>
<a href="#" className="block px-3 py-2 rounded-md text-base font-medium text-gray-700 hover:text-gray-900 hover:bg-gray-50" role="menuitem">Products</a>
<a href="#" className="block px-3 py-2 rounded-md text-base font-medium text-gray-700 hover:text-gray-900 hover:bg-gray-50" role="menuitem">Developers</a>
</div>
<div role="none">
</div>
</div> */}
</div>
</div>
<main className="mt-10 mx-auto max-w-7xl px-4 sm:mt-12 sm:px-6 md:mt-16 lg:mt-20 lg:px-8 xl:mt-28">
<div className="sm:text-center lg:text-left">
<h1 className="text-4xl tracking-tight font-extrabold text-gray-900 sm:text-5xl md:text-6xl">
<span className="block xl:inline"><p>Developing overlooked ideas into</p></span>
<span className="block text-red-500 xl:inline">Useable products</span>
</h1>
<p className="mt-3 text-center text-gray-500 sm:mt-5 sm:text-lg sm:max-w-xl sm:mx-auto md:mt-5 md:text-xl lg:mx-0">
Ever had a need for an app service that doesn't exist and wished someone had it built already - We do too.
</p>
<div className="mt-5 sm:mt-8 sm:flex sm:justify-center lg:justify-start">
<div className="rounded-md shadow">
<a href="#" className="w-full flex items-center justify-center px-8 py-3 border border-transparent text-base font-medium rounded-md text-white bg-red-700 hover:bg-red-400 md:py-4 md:text-lg md:px-10">
Test an idea
</a>
</div>
<div className="mt-3 sm:mt-0 sm:ml-3">
<a href="#" className="w-full flex items-center justify-center px-8 py-3 border border-transparent text-base font-medium rounded-md text-white bg-green-700 hover:bg-green-400 md:py-4 md:text-lg md:px-10">
<span>Let's build together</span>
</a>
</div>
</div>
</div>
</main>
</div>
</div>
<div className="lg:absolute lg:inset-y-0 lg:right-0 lg:w-1/2">
<img className="h-56 w-full object-cover sm:h-72 md:h-96 lg:w-full lg:h-full" src="images/lastbulb2.jpg" alt="hero-image" />
</div>
</div>
<div className="relative bg-gray-50 pt-16 pb-20 px-4 sm:px-6 lg:pt-24 lg:pb-28 lg:px-8">
<div className="absolute inset-0">
<div className="bg-white h-1/3 sm:h-2/3" />
</div>
<div className="relative max-w-7xl mx-auto">
<div className="text-center">
<h2 className="text-3xl tracking-tight font-extrabold text-gray-900 sm:text-4xl">
Portfolio
</h2>
<p className="mt-3 mb-4 max-w-2xl mx-auto text-xl text-gray-500 sm:mt-4">
Growing list of web and mobile apps that once were just ideas, and now launched into actual product/service apps.
</p>
</div>
{/* Starts the Mid section */}
<div className="portfoliolist grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4 justify-center items-center">
<Portfolio />
</div>
</div>
</div>
<footer className="bg-gray-800" aria-labelledby="footerHeading">
<h2 id="footerHeading" className="sr-only">Footer</h2>
<div className="max-w-7xl mx-auto py-12 px-4 sm:px-6 lg:py-16 lg:px-8">
<div className="xl:grid xl:grid-cols-3 xl:gap-8">
<div className="grid grid-cols-2 gap-8 xl:col-span-2">
<div className="md:grid md:grid-cols-2 md:gap-8">
<div>
<h3 className="text-sm font-semibold text-gray-400 tracking-wider uppercase">
Solutions
</h3>
<ul className="mt-4 space-y-4">
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Marketing
</a>
</li>
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Analytics
</a>
</li>
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Commerce
</a>
</li>
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Insights
</a>
</li>
</ul>
</div>
<div className="mt-12 md:mt-0">
<h3 className="text-sm font-semibold text-gray-400 tracking-wider uppercase">
Support
</h3>
<ul className="mt-4 space-y-4">
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
API
</a>
</li>
</ul>
</div>
</div>
<div className="md:grid md:grid-cols-2 md:gap-8">
</div>
</div>
<div className="mt-8 xl:mt-0">
<h3 className="text-sm font-semibold text-gray-400 tracking-wider uppercase">
Subscribe to our newsletter
</h3>
<p className="mt-4 text-base text-gray-300">
The latest news, articles, and resources, sent to your inbox weekly.
</p>
<form className="mt-4 sm:flex sm:max-w-md">
<label htmlFor="emailAddress" className="sr-only">Email address</label>
<input type="email" name="emailAddress" id="emailAddress" autoComplete="email" required className="appearance-none min-w-0 w-full bg-white border border-transparent rounded-md py-2 px-4 text-base text-gray-900 placeholder-gray-500 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-gray-800 focus:ring-white focus:border-white focus:placeholder-gray-400" placeholder="Enter your email" />
<div className="mt-3 rounded-md sm:mt-0 sm:ml-3 sm:flex-shrink-0">
<button type="submit" className="w-full bg-indigo-500 border border-transparent rounded-md py-2 px-4 flex items-center justify-center text-base font-medium text-white hover:bg-indigo-600 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-gray-800 focus:ring-indigo-500">
Subscribe
</button>
</div>
</form>
</div>
</div>
<div className="mt-8 border-t border-gray-700 pt-8 md:flex md:items-center md:justify-between">
<p className="mt-8 text-base text-gray-400 md:mt-0 md:order-1">
© 2021 PublicTrades, LLC. All rights reserved.
</p>
</div>
</div>
</footer>
</>
</main>
<footer></footer>
</div>
)
}
| Home | identifier_name |
index.js | import Head from 'next/head';
import Portfolio from '../components/Portfolio';
import Link from 'next/link';
export default function Home() | {
return (
<div>
<Head>
<title>PublicTrades</title>
<link rel="icon" href="/favicon.ico" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/gsap/3.6.0/gsap.min.js"></script>
</Head>
<main>
<>
{/* This example requires Tailwind CSS v2.0+ */}
<div className="relative bg-white overflow-hidden">
<div className="max-w-7xl mx-auto">
<div className="relative z-10 pb-8 bg-white sm:pb-16 md:pb-20 lg:max-w-2xl lg:w-full lg:pb-28 xl:pb-32">
<svg className="hidden lg:block absolute right-0 inset-y-0 h-full w-48 text-white transform translate-x-1/2" fill="currentColor" viewBox="0 0 100 100" preserveAspectRatio="none" aria-hidden="true">
<polygon points="50,0 100,0 50,100 0,100" />
</svg>
<div className="relative pt-6 px-4 sm:px-6 lg:px-8">
<nav className="relative flex items-center justify-between sm:h-10 lg:justify-start" aria-label="Global">
<div className="flex items-center flex-grow flex-shrink-0 lg:flex-grow-0">
<div className="flex items-center justify-between w-full md:w-auto">
<a href="#">
{/* <span className="sr-only">Workflow</span> */}
<img className="h-8 w-auto sm:h-10" src="images/logo.png"/>
</a>
<div className="-mr-2 flex items-center md:hidden">
<button type="button" className="bg-white rounded-md p-2 inline-flex items-center justify-center text-gray-400 hover:text-gray-500 hover:bg-gray-100 focus:outline-none focus:ring-2 focus:ring-inset focus:ring-indigo-500" id="main-menu" aria-haspopup="true">
<span className="sr-only">Open main menu</span>
<svg className="h-6 w-6" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor" aria-hidden="true">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 6h16M4 12h16M4 18h16" />
</svg>
</button>
</div>
</div>
</div>
<div className="hidden md:block md:ml-10 md:pr-4 md:space-x-8">
{/* <a href="#" className="font-medium text-gray-500 hover:text-gray-900">Services</a>
<a href="#" className="font-medium text-gray-500 hover:text-gray-900">Products</a> */}
<Link href="login">
<a className="font-medium text-gray-500 hover:text-gray-900">Developers Login</a>
</Link>
</div>
</nav>
</div>
{/*
based on menu open state.
Entering: "duration-150 ease-out"
From: "opacity-0 scale-95"
To: "opacity-100 scale-100"
Leaving: "duration-100 ease-in"
From: "opacity-100 scale-100"
To: "opacity-0 scale-95"
*/}
<div className="absolute top-0 inset-x-0 p-2 transition transform origin-top-right md:hidden">
<div className="rounded-lg shadow-md bg-white ring-1 ring-black ring-opacity-5 overflow-hidden">
{/* <div role="menu" aria-orientation="vertical" aria-labelledby="main-menu">
<div className="px-2 pt-2 pb-3 space-y-1" role="none">
<a href="#" className="block px-3 py-2 rounded-md text-base font-medium text-gray-700 hover:text-gray-900 hover:bg-gray-50" role="menuitem">Services</a>
<a href="#" className="block px-3 py-2 rounded-md text-base font-medium text-gray-700 hover:text-gray-900 hover:bg-gray-50" role="menuitem">Products</a>
<a href="#" className="block px-3 py-2 rounded-md text-base font-medium text-gray-700 hover:text-gray-900 hover:bg-gray-50" role="menuitem">Developers</a>
</div>
<div role="none">
</div>
</div> */}
</div>
</div>
<main className="mt-10 mx-auto max-w-7xl px-4 sm:mt-12 sm:px-6 md:mt-16 lg:mt-20 lg:px-8 xl:mt-28">
<div className="sm:text-center lg:text-left">
<h1 className="text-4xl tracking-tight font-extrabold text-gray-900 sm:text-5xl md:text-6xl">
<span className="block xl:inline"><p>Developing overlooked ideas into</p></span>
<span className="block text-red-500 xl:inline">Useable products</span>
</h1>
<p className="mt-3 text-center text-gray-500 sm:mt-5 sm:text-lg sm:max-w-xl sm:mx-auto md:mt-5 md:text-xl lg:mx-0">
Ever had a need for an app service that doesn't exist and wished someone had it built already - We do too.
</p>
<div className="mt-5 sm:mt-8 sm:flex sm:justify-center lg:justify-start">
<div className="rounded-md shadow">
<a href="#" className="w-full flex items-center justify-center px-8 py-3 border border-transparent text-base font-medium rounded-md text-white bg-red-700 hover:bg-red-400 md:py-4 md:text-lg md:px-10">
Test an idea
</a>
</div>
<div className="mt-3 sm:mt-0 sm:ml-3">
<a href="#" className="w-full flex items-center justify-center px-8 py-3 border border-transparent text-base font-medium rounded-md text-white bg-green-700 hover:bg-green-400 md:py-4 md:text-lg md:px-10">
<span>Let's build together</span>
</a>
</div>
</div>
</div>
</main>
</div>
</div>
<div className="lg:absolute lg:inset-y-0 lg:right-0 lg:w-1/2">
<img className="h-56 w-full object-cover sm:h-72 md:h-96 lg:w-full lg:h-full" src="images/lastbulb2.jpg" alt="hero-image" />
</div>
</div>
<div className="relative bg-gray-50 pt-16 pb-20 px-4 sm:px-6 lg:pt-24 lg:pb-28 lg:px-8">
<div className="absolute inset-0">
<div className="bg-white h-1/3 sm:h-2/3" />
</div>
<div className="relative max-w-7xl mx-auto">
<div className="text-center">
<h2 className="text-3xl tracking-tight font-extrabold text-gray-900 sm:text-4xl">
Portfolio
</h2>
<p className="mt-3 mb-4 max-w-2xl mx-auto text-xl text-gray-500 sm:mt-4">
Growing list of web and mobile apps that once were just ideas, and now launched into actual product/service apps.
</p>
</div>
{/* Starts the Mid section */}
<div className="portfoliolist grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4 justify-center items-center">
<Portfolio />
</div>
</div>
</div>
<footer className="bg-gray-800" aria-labelledby="footerHeading">
<h2 id="footerHeading" className="sr-only">Footer</h2>
<div className="max-w-7xl mx-auto py-12 px-4 sm:px-6 lg:py-16 lg:px-8">
<div className="xl:grid xl:grid-cols-3 xl:gap-8">
<div className="grid grid-cols-2 gap-8 xl:col-span-2">
<div className="md:grid md:grid-cols-2 md:gap-8">
<div>
<h3 className="text-sm font-semibold text-gray-400 tracking-wider uppercase">
Solutions
</h3>
<ul className="mt-4 space-y-4">
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Marketing
</a>
</li>
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Analytics
</a>
</li>
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Commerce
</a>
</li>
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
Insights
</a>
</li>
</ul>
</div>
<div className="mt-12 md:mt-0">
<h3 className="text-sm font-semibold text-gray-400 tracking-wider uppercase">
Support
</h3>
<ul className="mt-4 space-y-4">
<li>
<a href="#" className="text-base text-gray-300 hover:text-white">
API
</a>
</li>
</ul>
</div>
</div>
<div className="md:grid md:grid-cols-2 md:gap-8">
</div>
</div>
<div className="mt-8 xl:mt-0">
<h3 className="text-sm font-semibold text-gray-400 tracking-wider uppercase">
Subscribe to our newsletter
</h3>
<p className="mt-4 text-base text-gray-300">
The latest news, articles, and resources, sent to your inbox weekly.
</p>
<form className="mt-4 sm:flex sm:max-w-md">
<label htmlFor="emailAddress" className="sr-only">Email address</label>
<input type="email" name="emailAddress" id="emailAddress" autoComplete="email" required className="appearance-none min-w-0 w-full bg-white border border-transparent rounded-md py-2 px-4 text-base text-gray-900 placeholder-gray-500 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-gray-800 focus:ring-white focus:border-white focus:placeholder-gray-400" placeholder="Enter your email" />
<div className="mt-3 rounded-md sm:mt-0 sm:ml-3 sm:flex-shrink-0">
<button type="submit" className="w-full bg-indigo-500 border border-transparent rounded-md py-2 px-4 flex items-center justify-center text-base font-medium text-white hover:bg-indigo-600 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-gray-800 focus:ring-indigo-500">
Subscribe
</button>
</div>
</form>
</div>
</div>
<div className="mt-8 border-t border-gray-700 pt-8 md:flex md:items-center md:justify-between">
<p className="mt-8 text-base text-gray-400 md:mt-0 md:order-1">
© 2021 PublicTrades, LLC. All rights reserved.
</p>
</div>
</div>
</footer>
</>
</main>
<footer></footer>
</div>
)
}
| identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.