file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
mole.go | package mole
import (
"context"
"fmt"
"io/ioutil"
"os"
"os/signal"
"path/filepath"
"syscall"
"time"
"github.com/davrodpin/mole/alias"
"github.com/davrodpin/mole/fsutils"
"github.com/davrodpin/mole/rpc"
"github.com/davrodpin/mole/tunnel"
"github.com/awnumar/memguard"
"github.com/gofrs/uuid"
"github.com/mitchellh/mapstructure"
daemon "github.com/sevlyar/go-daemon"
log "github.com/sirupsen/logrus"
"golang.org/x/crypto/ssh/terminal"
)
const (
// IdFlagName is the name of the flag that carries the unique idenfier for a
// mole instance.
IdFlagName = "id"
)
// cli keeps a reference to the latest Client object created.
// This is mostly needed to introspect client states during runtime (e.g. a
// remote procedure call that needs to check certain runtime information)
var cli *Client
type Configuration struct {
Id string `json:"id" mapstructure:"id" toml:"id"`
TunnelType string `json:"tunnel-type" mapstructure:"tunnel-type" toml:"tunnel-type"`
Verbose bool `json:"verbose" mapstructure:"verbose" toml:"verbose"`
Insecure bool `json:"insecure" mapstructure:"insecure" toml:"insecure"`
Detach bool `json:"detach" mapstructure:"detach" toml:"detach"`
Source AddressInputList `json:"source" mapstructure:"source" toml:"source"`
Destination AddressInputList `json:"destination" mapstructure:"destination" toml:"destination"`
Server AddressInput `json:"server" mapstructure:"server" toml:"server"`
Key string `json:"key" mapstructure:"key" toml:"key"`
KeepAliveInterval time.Duration `json:"keep-alive-interval" mapstructure:"keep-alive-interva" toml:"keep-alive-interval"`
ConnectionRetries int `json:"connection-retries" mapstructure:"connection-retries" toml:"connection-retries"`
WaitAndRetry time.Duration `json:"wait-and-retry" mapstructure:"wait-and-retry" toml:"wait-and-retry"`
SshAgent string `json:"ssh-agent" mapstructure:"ssh-agent" toml:"ssh-agent"`
Timeout time.Duration `json:"timeout" mapstructure:"timeout" toml:"timeout"`
SshConfig string `json:"ssh-config" mapstructure:"ssh-config" toml:"ssh-config"`
Rpc bool `json:"rpc" mapstructure:"rpc" toml:"rpc"`
RpcAddress string `json:"rpc-address" mapstructure:"rpc-address" toml:"rpc-address"`
}
// ParseAlias translates a Configuration object to an Alias object.
func (c Configuration) ParseAlias(name string) *alias.Alias {
return &alias.Alias{
Name: name,
TunnelType: c.TunnelType,
Verbose: c.Verbose,
Insecure: c.Insecure,
Detach: c.Detach,
Source: c.Source.List(),
Destination: c.Destination.List(),
Server: c.Server.String(),
Key: c.Key,
KeepAliveInterval: c.KeepAliveInterval.String(),
ConnectionRetries: c.ConnectionRetries,
WaitAndRetry: c.WaitAndRetry.String(),
SshAgent: c.SshAgent,
Timeout: c.Timeout.String(),
SshConfig: c.SshConfig,
Rpc: c.Rpc,
RpcAddress: c.RpcAddress,
}
}
// Client manages the overall state of the application based on its configuration.
type Client struct {
Conf *Configuration
Tunnel *tunnel.Tunnel
sigs chan os.Signal
}
// New initializes a new mole's client.
func New(conf *Configuration) *Client {
cli = &Client{
Conf: conf,
sigs: make(chan os.Signal, 1),
}
return cli
}
// Start kicks off mole's client, establishing the tunnel and its channels
// based on the client configuration attributes.
func (c *Client) Start() error {
// memguard is used to securely keep sensitive information in memory.
// This call makes sure all data will be destroy when the program exits.
defer memguard.Purge()
if c.Conf.Id == "" {
u, err := uuid.NewV4()
if err != nil {
return fmt.Errorf("could not auto generate app instance id: %v", err)
}
c.Conf.Id = u.String()[:8]
}
r, err := c.Running()
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error while checking for another instance using the same id")
return err
}
if r {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).Error("can't start. Another instance is already using the same id")
return fmt.Errorf("can't start. Another instance is already using the same id %s", c.Conf.Id)
}
log.Infof("instance identifier is %s", c.Conf.Id)
if c.Conf.Detach {
var err error
ic, err := NewDetachedInstance(c.Conf.Id)
if err != nil {
log.WithError(err).Errorf("error while creating directory to store mole instance related files")
return err
}
err = startDaemonProcess(ic)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error starting ssh tunnel")
return err
}
} else {
go c.handleSignals()
}
if c.Conf.Verbose {
log.SetLevel(log.DebugLevel)
}
d, err := fsutils.CreateInstanceDir(c.Conf.Id)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error creating directory for mole instance")
return err
}
if c.Conf.Rpc {
addr, err := rpc.Start(c.Conf.RpcAddress)
if err != nil {
return err
}
rd := filepath.Join(d.Dir, "rpc")
err = ioutil.WriteFile(rd, []byte(addr.String()), 0644)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error creating file with rpc address")
return err
}
c.Conf.RpcAddress = addr.String()
log.Infof("rpc server address saved on %s", rd)
}
t, err := createTunnel(c.Conf)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error creating tunnel")
return err
}
c.Tunnel = t
if err = c.Tunnel.Start(); err != nil {
log.WithFields(log.Fields{
"tunnel": c.Tunnel.String(),
}).WithError(err).Error("error while starting tunnel")
return err
}
return nil
}
// Stop shuts down a detached mole's application instance.
func (c *Client) Stop() error {
pfp, err := fsutils.GetPidFileLocation(c.Conf.Id)
if err != nil {
return fmt.Errorf("error getting information about aliases directory: %v", err)
}
if _, err := os.Stat(pfp); os.IsNotExist(err) |
cntxt := &daemon.Context{
PidFileName: pfp,
}
d, err := cntxt.Search()
if err != nil {
return err
}
if c.Conf.Detach {
err = os.RemoveAll(pfp)
if err != nil {
return err
}
} else {
d, err := fsutils.InstanceDir(c.Conf.Id)
if err != nil {
return err
}
err = os.RemoveAll(d.Dir)
if err != nil {
return err
}
}
err = d.Kill()
if err != nil {
return err
}
return nil
}
func (c *Client) handleSignals() {
signal.Notify(c.sigs, syscall.SIGINT, syscall.SIGTERM, os.Interrupt)
sig := <-c.sigs
log.Debugf("process signal %s received", sig)
err := c.Stop()
if err != nil {
log.WithError(err).Error("instance not properly stopped")
}
}
// Merge overwrites Configuration from the given Alias.
//
// Certain attributes like Verbose, Insecure and Detach will be overwritten
// only if they are found on the givenFlags which should contain the name of
// all flags given by the user through UI (e.g. CLI).
func (c *Configuration) Merge(al *alias.Alias, givenFlags []string) error {
var fl flags = givenFlags
if !fl.lookup("verbose") {
c.Verbose = al.Verbose
}
if !fl.lookup("insecure") {
c.Insecure = al.Insecure
}
if !fl.lookup("detach") {
c.Detach = al.Detach
}
c.Id = al.Name
c.TunnelType = al.TunnelType
srcl := AddressInputList{}
for _, src := range al.Source {
err := srcl.Set(src)
if err != nil {
return err
}
}
c.Source = srcl
dstl := AddressInputList{}
for _, dst := range al.Destination {
err := dstl.Set(dst)
if err != nil {
return err
}
}
c.Destination = dstl
srv := AddressInput{}
err := srv.Set(al.Server)
if err != nil {
return err
}
c.Server = srv
c.Key = al.Key
kai, err := time.ParseDuration(al.KeepAliveInterval)
if err != nil {
return err
}
c.KeepAliveInterval = kai
c.ConnectionRetries = al.ConnectionRetries
war, err := time.ParseDuration(al.WaitAndRetry)
if err != nil {
return err
}
c.WaitAndRetry = war
c.SshAgent = al.SshAgent
tim, err := time.ParseDuration(al.Timeout)
if err != nil {
return err
}
c.Timeout = tim
c.SshConfig = al.SshConfig
c.Rpc = al.Rpc
c.RpcAddress = al.RpcAddress
return nil
}
// ShowInstances returns the runtime information about all instances of mole
// running on the system with rpc enabled.
func ShowInstances() (*InstancesRuntime, error) {
ctx := context.Background()
data, err := rpc.ShowAll(ctx)
if err != nil {
return nil, err
}
var instances []Runtime
err = mapstructure.Decode(data, &instances)
if err != nil {
return nil, err
}
runtime := InstancesRuntime(instances)
if len(runtime) == 0 {
return nil, fmt.Errorf("no instances were found.")
}
return &runtime, nil
}
// ShowInstance returns the runtime information about an application instance
// from the given id or alias.
func ShowInstance(id string) (*Runtime, error) {
ctx := context.Background()
info, err := rpc.Show(ctx, id)
if err != nil {
return nil, err
}
var r Runtime
err = mapstructure.Decode(info, &r)
if err != nil {
return nil, err
}
return &r, nil
}
func startDaemonProcess(instanceConf *DetachedInstance) error {
args := appendIdArg(instanceConf.Id, os.Args)
cntxt := &daemon.Context{
PidFileName: instanceConf.PidFile,
PidFilePerm: 0644,
LogFileName: instanceConf.LogFile,
LogFilePerm: 0640,
Umask: 027,
Args: args,
}
d, err := cntxt.Reborn()
if err != nil {
return err
}
if d != nil {
err = os.Rename(instanceConf.PidFile, instanceConf.PidFile)
if err != nil {
return err
}
err = os.Rename(instanceConf.LogFile, instanceConf.LogFile)
if err != nil {
return err
}
log.Infof("execute \"mole stop %s\" if you like to stop it at any time", instanceConf.Id)
os.Exit(0)
}
defer func() {
err := cntxt.Release()
if err != nil {
log.WithFields(log.Fields{
"id": instanceConf.Id,
}).WithError(err).Error("error detaching the mole instance")
}
}()
return nil
}
type flags []string
func (fs flags) lookup(flag string) bool {
for _, f := range fs {
if flag == f {
return true
}
}
return false
}
func createTunnel(conf *Configuration) (*tunnel.Tunnel, error) {
s, err := tunnel.NewServer(conf.Server.User, conf.Server.Address(), conf.Key, conf.SshAgent, conf.SshConfig)
if err != nil {
log.Errorf("error processing server options: %v\n", err)
return nil, err
}
s.Insecure = conf.Insecure
s.Timeout = conf.Timeout
err = s.Key.HandlePassphrase(func() ([]byte, error) {
fmt.Printf("The key provided is secured by a password. Please provide it below:\n")
fmt.Printf("Password: ")
p, err := terminal.ReadPassword(int(syscall.Stdin))
fmt.Printf("\n")
return p, err
})
if err != nil {
log.WithError(err).Error("error setting up password handling function")
return nil, err
}
log.Debugf("server: %s", s)
source := make([]string, len(conf.Source))
for i, r := range conf.Source {
source[i] = r.String()
}
destination := make([]string, len(conf.Destination))
for i, r := range conf.Destination {
if r.Port == "" {
log.WithError(err).Errorf("missing port in destination address: %s", r.String())
return nil, err
}
destination[i] = r.String()
}
t, err := tunnel.New(conf.TunnelType, s, source, destination, conf.SshConfig)
if err != nil {
log.Error(err)
return nil, err
}
//TODO need to find a way to require the attributes below to be always set
// since they are not optional (functionality will break if they are not
// set and CLI parsing is the one setting the default values).
// That could be done by make them required in the constructor's signature or
// by creating a configuration struct for a tunnel object.
t.ConnectionRetries = conf.ConnectionRetries
t.WaitAndRetry = conf.WaitAndRetry
t.KeepAliveInterval = conf.KeepAliveInterval
return t, nil
}
// appendIdArg adds the id argument to the list of arguments passed by the user.
// This is helpful for scenarios where the process will be detached from the
// parent process and the new child process needs context about the instance.
func appendIdArg(id string, args []string) (newArgs []string) {
for _, arg := range args {
if arg == "--id" {
return args
}
}
newArgs = make([]string, len(args)+2)
copy(newArgs, args)
newArgs[len(args)-2] = fmt.Sprintf("--%s", IdFlagName)
newArgs[len(args)-1] = id
return
}
| {
return fmt.Errorf("no instance of mole with id %s is running", c.Conf.Id)
} | conditional_block |
mole.go | package mole
import (
"context"
"fmt"
"io/ioutil"
"os"
"os/signal"
"path/filepath"
"syscall"
"time"
"github.com/davrodpin/mole/alias"
"github.com/davrodpin/mole/fsutils"
"github.com/davrodpin/mole/rpc"
"github.com/davrodpin/mole/tunnel"
"github.com/awnumar/memguard"
"github.com/gofrs/uuid"
"github.com/mitchellh/mapstructure"
daemon "github.com/sevlyar/go-daemon"
log "github.com/sirupsen/logrus"
"golang.org/x/crypto/ssh/terminal"
)
const (
// IdFlagName is the name of the flag that carries the unique idenfier for a
// mole instance.
IdFlagName = "id"
)
// cli keeps a reference to the latest Client object created.
// This is mostly needed to introspect client states during runtime (e.g. a
// remote procedure call that needs to check certain runtime information)
var cli *Client
type Configuration struct {
Id string `json:"id" mapstructure:"id" toml:"id"`
TunnelType string `json:"tunnel-type" mapstructure:"tunnel-type" toml:"tunnel-type"`
Verbose bool `json:"verbose" mapstructure:"verbose" toml:"verbose"`
Insecure bool `json:"insecure" mapstructure:"insecure" toml:"insecure"`
Detach bool `json:"detach" mapstructure:"detach" toml:"detach"`
Source AddressInputList `json:"source" mapstructure:"source" toml:"source"`
Destination AddressInputList `json:"destination" mapstructure:"destination" toml:"destination"`
Server AddressInput `json:"server" mapstructure:"server" toml:"server"`
Key string `json:"key" mapstructure:"key" toml:"key"`
KeepAliveInterval time.Duration `json:"keep-alive-interval" mapstructure:"keep-alive-interva" toml:"keep-alive-interval"`
ConnectionRetries int `json:"connection-retries" mapstructure:"connection-retries" toml:"connection-retries"`
WaitAndRetry time.Duration `json:"wait-and-retry" mapstructure:"wait-and-retry" toml:"wait-and-retry"`
SshAgent string `json:"ssh-agent" mapstructure:"ssh-agent" toml:"ssh-agent"`
Timeout time.Duration `json:"timeout" mapstructure:"timeout" toml:"timeout"`
SshConfig string `json:"ssh-config" mapstructure:"ssh-config" toml:"ssh-config"`
Rpc bool `json:"rpc" mapstructure:"rpc" toml:"rpc"`
RpcAddress string `json:"rpc-address" mapstructure:"rpc-address" toml:"rpc-address"`
}
// ParseAlias translates a Configuration object to an Alias object.
func (c Configuration) ParseAlias(name string) *alias.Alias {
return &alias.Alias{
Name: name,
TunnelType: c.TunnelType,
Verbose: c.Verbose,
Insecure: c.Insecure,
Detach: c.Detach,
Source: c.Source.List(),
Destination: c.Destination.List(),
Server: c.Server.String(),
Key: c.Key,
KeepAliveInterval: c.KeepAliveInterval.String(),
ConnectionRetries: c.ConnectionRetries,
WaitAndRetry: c.WaitAndRetry.String(),
SshAgent: c.SshAgent,
Timeout: c.Timeout.String(),
SshConfig: c.SshConfig,
Rpc: c.Rpc,
RpcAddress: c.RpcAddress,
}
}
// Client manages the overall state of the application based on its configuration.
type Client struct {
Conf *Configuration
Tunnel *tunnel.Tunnel
sigs chan os.Signal
}
// New initializes a new mole's client.
func New(conf *Configuration) *Client {
cli = &Client{
Conf: conf,
sigs: make(chan os.Signal, 1),
}
return cli
}
// Start kicks off mole's client, establishing the tunnel and its channels
// based on the client configuration attributes.
func (c *Client) Start() error {
// memguard is used to securely keep sensitive information in memory.
// This call makes sure all data will be destroy when the program exits.
defer memguard.Purge()
if c.Conf.Id == "" {
u, err := uuid.NewV4()
if err != nil {
return fmt.Errorf("could not auto generate app instance id: %v", err)
}
c.Conf.Id = u.String()[:8]
}
r, err := c.Running()
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error while checking for another instance using the same id")
return err
}
if r {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).Error("can't start. Another instance is already using the same id")
return fmt.Errorf("can't start. Another instance is already using the same id %s", c.Conf.Id)
}
log.Infof("instance identifier is %s", c.Conf.Id)
if c.Conf.Detach {
var err error
ic, err := NewDetachedInstance(c.Conf.Id)
if err != nil {
log.WithError(err).Errorf("error while creating directory to store mole instance related files")
return err
}
err = startDaemonProcess(ic)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error starting ssh tunnel")
return err
}
} else {
go c.handleSignals()
}
if c.Conf.Verbose {
log.SetLevel(log.DebugLevel)
}
d, err := fsutils.CreateInstanceDir(c.Conf.Id)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error creating directory for mole instance")
return err
}
if c.Conf.Rpc {
addr, err := rpc.Start(c.Conf.RpcAddress)
if err != nil {
return err
}
rd := filepath.Join(d.Dir, "rpc")
err = ioutil.WriteFile(rd, []byte(addr.String()), 0644)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error creating file with rpc address")
return err
}
c.Conf.RpcAddress = addr.String()
log.Infof("rpc server address saved on %s", rd)
} | t, err := createTunnel(c.Conf)
if err != nil {
log.WithFields(log.Fields{
"id": c.Conf.Id,
}).WithError(err).Error("error creating tunnel")
return err
}
c.Tunnel = t
if err = c.Tunnel.Start(); err != nil {
log.WithFields(log.Fields{
"tunnel": c.Tunnel.String(),
}).WithError(err).Error("error while starting tunnel")
return err
}
return nil
}
// Stop shuts down a detached mole's application instance.
func (c *Client) Stop() error {
pfp, err := fsutils.GetPidFileLocation(c.Conf.Id)
if err != nil {
return fmt.Errorf("error getting information about aliases directory: %v", err)
}
if _, err := os.Stat(pfp); os.IsNotExist(err) {
return fmt.Errorf("no instance of mole with id %s is running", c.Conf.Id)
}
cntxt := &daemon.Context{
PidFileName: pfp,
}
d, err := cntxt.Search()
if err != nil {
return err
}
if c.Conf.Detach {
err = os.RemoveAll(pfp)
if err != nil {
return err
}
} else {
d, err := fsutils.InstanceDir(c.Conf.Id)
if err != nil {
return err
}
err = os.RemoveAll(d.Dir)
if err != nil {
return err
}
}
err = d.Kill()
if err != nil {
return err
}
return nil
}
func (c *Client) handleSignals() {
signal.Notify(c.sigs, syscall.SIGINT, syscall.SIGTERM, os.Interrupt)
sig := <-c.sigs
log.Debugf("process signal %s received", sig)
err := c.Stop()
if err != nil {
log.WithError(err).Error("instance not properly stopped")
}
}
// Merge overwrites Configuration from the given Alias.
//
// Certain attributes like Verbose, Insecure and Detach will be overwritten
// only if they are found on the givenFlags which should contain the name of
// all flags given by the user through UI (e.g. CLI).
func (c *Configuration) Merge(al *alias.Alias, givenFlags []string) error {
var fl flags = givenFlags
if !fl.lookup("verbose") {
c.Verbose = al.Verbose
}
if !fl.lookup("insecure") {
c.Insecure = al.Insecure
}
if !fl.lookup("detach") {
c.Detach = al.Detach
}
c.Id = al.Name
c.TunnelType = al.TunnelType
srcl := AddressInputList{}
for _, src := range al.Source {
err := srcl.Set(src)
if err != nil {
return err
}
}
c.Source = srcl
dstl := AddressInputList{}
for _, dst := range al.Destination {
err := dstl.Set(dst)
if err != nil {
return err
}
}
c.Destination = dstl
srv := AddressInput{}
err := srv.Set(al.Server)
if err != nil {
return err
}
c.Server = srv
c.Key = al.Key
kai, err := time.ParseDuration(al.KeepAliveInterval)
if err != nil {
return err
}
c.KeepAliveInterval = kai
c.ConnectionRetries = al.ConnectionRetries
war, err := time.ParseDuration(al.WaitAndRetry)
if err != nil {
return err
}
c.WaitAndRetry = war
c.SshAgent = al.SshAgent
tim, err := time.ParseDuration(al.Timeout)
if err != nil {
return err
}
c.Timeout = tim
c.SshConfig = al.SshConfig
c.Rpc = al.Rpc
c.RpcAddress = al.RpcAddress
return nil
}
// ShowInstances returns the runtime information about all instances of mole
// running on the system with rpc enabled.
func ShowInstances() (*InstancesRuntime, error) {
ctx := context.Background()
data, err := rpc.ShowAll(ctx)
if err != nil {
return nil, err
}
var instances []Runtime
err = mapstructure.Decode(data, &instances)
if err != nil {
return nil, err
}
runtime := InstancesRuntime(instances)
if len(runtime) == 0 {
return nil, fmt.Errorf("no instances were found.")
}
return &runtime, nil
}
// ShowInstance returns the runtime information about an application instance
// from the given id or alias.
func ShowInstance(id string) (*Runtime, error) {
ctx := context.Background()
info, err := rpc.Show(ctx, id)
if err != nil {
return nil, err
}
var r Runtime
err = mapstructure.Decode(info, &r)
if err != nil {
return nil, err
}
return &r, nil
}
func startDaemonProcess(instanceConf *DetachedInstance) error {
args := appendIdArg(instanceConf.Id, os.Args)
cntxt := &daemon.Context{
PidFileName: instanceConf.PidFile,
PidFilePerm: 0644,
LogFileName: instanceConf.LogFile,
LogFilePerm: 0640,
Umask: 027,
Args: args,
}
d, err := cntxt.Reborn()
if err != nil {
return err
}
if d != nil {
err = os.Rename(instanceConf.PidFile, instanceConf.PidFile)
if err != nil {
return err
}
err = os.Rename(instanceConf.LogFile, instanceConf.LogFile)
if err != nil {
return err
}
log.Infof("execute \"mole stop %s\" if you like to stop it at any time", instanceConf.Id)
os.Exit(0)
}
defer func() {
err := cntxt.Release()
if err != nil {
log.WithFields(log.Fields{
"id": instanceConf.Id,
}).WithError(err).Error("error detaching the mole instance")
}
}()
return nil
}
type flags []string
func (fs flags) lookup(flag string) bool {
for _, f := range fs {
if flag == f {
return true
}
}
return false
}
func createTunnel(conf *Configuration) (*tunnel.Tunnel, error) {
s, err := tunnel.NewServer(conf.Server.User, conf.Server.Address(), conf.Key, conf.SshAgent, conf.SshConfig)
if err != nil {
log.Errorf("error processing server options: %v\n", err)
return nil, err
}
s.Insecure = conf.Insecure
s.Timeout = conf.Timeout
err = s.Key.HandlePassphrase(func() ([]byte, error) {
fmt.Printf("The key provided is secured by a password. Please provide it below:\n")
fmt.Printf("Password: ")
p, err := terminal.ReadPassword(int(syscall.Stdin))
fmt.Printf("\n")
return p, err
})
if err != nil {
log.WithError(err).Error("error setting up password handling function")
return nil, err
}
log.Debugf("server: %s", s)
source := make([]string, len(conf.Source))
for i, r := range conf.Source {
source[i] = r.String()
}
destination := make([]string, len(conf.Destination))
for i, r := range conf.Destination {
if r.Port == "" {
log.WithError(err).Errorf("missing port in destination address: %s", r.String())
return nil, err
}
destination[i] = r.String()
}
t, err := tunnel.New(conf.TunnelType, s, source, destination, conf.SshConfig)
if err != nil {
log.Error(err)
return nil, err
}
//TODO need to find a way to require the attributes below to be always set
// since they are not optional (functionality will break if they are not
// set and CLI parsing is the one setting the default values).
// That could be done by make them required in the constructor's signature or
// by creating a configuration struct for a tunnel object.
t.ConnectionRetries = conf.ConnectionRetries
t.WaitAndRetry = conf.WaitAndRetry
t.KeepAliveInterval = conf.KeepAliveInterval
return t, nil
}
// appendIdArg adds the id argument to the list of arguments passed by the user.
// This is helpful for scenarios where the process will be detached from the
// parent process and the new child process needs context about the instance.
func appendIdArg(id string, args []string) (newArgs []string) {
for _, arg := range args {
if arg == "--id" {
return args
}
}
newArgs = make([]string, len(args)+2)
copy(newArgs, args)
newArgs[len(args)-2] = fmt.Sprintf("--%s", IdFlagName)
newArgs[len(args)-1] = id
return
} | random_line_split | |
cliches.go | package main
// NEEDCAPS
func | (match string) *BadTerm {
return &BadTerm{match, "'%s' is a cliche. Avoid it like the plague."}
}
// ShouldNotCliche returns a slice of BadTerm's, none of which should be in the
// a text (case insensitive). See existence_checks.go for details of BadTerms.
func ShouldNotCliche() []TextCheck {
return []TextCheck{
cliche("all hell broke loose"),
cliche("american as apple pie"),
cliche("hobson's choice"),
cliche("i beg to differ"),
cliche("jack of all trades"),
cliche("a chip off the old block"),
cliche("a clean slate"),
cliche("a dark and stormy night"),
cliche("a far cry"),
cliche("a fate worse than death"),
cliche("a fine kettle of fish"),
cliche("a loose cannon"),
cliche("a matter of concern"),
cliche("a penny saved is a penny earned"),
cliche("a tough row to hoe"),
cliche("a word to the wise"),
cliche("ace in the hole"),
cliche("acid test"),
cliche("add insult to injury"),
cliche("against all odds"),
cliche("air your dirty laundry"),
cliche("alas and alack"),
cliche("all fun and games"),
cliche("all in a day's work"),
cliche("all talk, no action"),
cliche("all things being equal"),
cliche("all thumbs"),
cliche("all your eggs in one basket"),
cliche("all's fair in love and war"),
cliche("all's well that ends well"),
cliche("almighty dollar"),
cliche("an axe? to grind"),
cliche("another day, another dollar"),
cliche("armed to the teeth"),
cliche("as a last resort"),
cliche("as luck would have it"),
cliche("as old as time"),
cliche("as the crow flies"),
cliche("at loose ends"),
cliche("at my wits end"),
cliche("at the end of the day"),
cliche("attached hereto"),
cliche("avoid like the plague"),
cliche("babe in the woods"),
cliche("back against the wall"),
cliche("back in the saddle"),
cliche("back to square one"),
cliche("back to the drawing board"),
cliche("bad to the bone"),
cliche("badge of honor"),
cliche("bald faced liar"),
cliche("bald-faced lie"),
cliche("ballpark figure"),
cliche("banging your head against a brick wall"),
cliche("baptism by fire"),
cliche("barking up the wrong tree"),
cliche("bat out of hell"),
cliche("be all and end all"),
cliche("beat a dead horse"),
cliche("beat around the bush"),
cliche("been there, done that"),
cliche("beggars can't be choosers"),
cliche("behind the eight ball"),
cliche("bend over backwards"),
cliche("benefit of the doubt"),
cliche("bent out of shape"),
cliche("best thing since sliced bread"),
cliche("bet your bottom dollar"),
cliche("better half"),
cliche("better late than never"),
cliche("better mousetrap"),
cliche("better safe than sorry"),
cliche("between scylla and charybdis"),
cliche("between a rock and a hard place"),
cliche("between a rock and a hard place"),
cliche("between the devil and the deep blue sea"),
cliche("betwixt and between"),
cliche("beyond the pale"),
cliche("bide your time"),
cliche("big as life"),
cliche("big cheese"),
cliche("big fish in a small pond"),
cliche("big man on campus"),
cliche("bigger they are the harder they fall"),
cliche("bird in the hand"),
cliche("bird's eye view"),
cliche("birds and the bees"),
cliche("birds of a feather flock together"),
cliche("bit the hand that feeds you"),
cliche("bite the bullet"),
cliche("bite the dust"),
cliche("bitten off more than he can chew"),
cliche("black as coal"),
cliche("black as pitch"),
cliche("black as the ace of spades"),
cliche("blast from the past"),
cliche("bleeding heart"),
cliche("blessing in disguise"),
cliche("blind ambition"),
cliche("blind as a bat"),
cliche("blind leading the blind"),
cliche("blissful ignorance"),
cliche("blood is thicker than water"),
cliche("blood sweat and tears"),
cliche("blow a fuse"),
cliche("blow off steam"),
cliche("blow your own horn"),
cliche("blushing bride"),
cliche("boils down to"),
cliche("bolt from the blue"),
cliche("bone to pick"),
cliche("bored stiff"),
cliche("bored to tears"),
cliche("bottomless pit"),
cliche("boys will be boys"),
cliche("bright and early"),
cliche("brings home the bacon"),
cliche("broad across the beam"),
cliche("broken record"),
cliche("brought back to reality"),
cliche("bulk large"),
cliche("bull by the horns"),
cliche("bull in a china shop"),
cliche("burn the midnight oil"),
cliche("burning question"),
cliche("burning the candle at both ends"),
cliche("burst your bubble"),
cliche("bury the hatchet"),
cliche("busy as a bee"),
cliche("but that's another story"),
cliche("by hook or by crook"),
cliche("by no means"),
cliche("call a spade a spade"),
cliche("called onto the carpet"),
cliche("calm before the storm"),
cliche("can of worms"),
cliche("can't cut the mustard"),
cliche("can't hold a candle to"),
cliche("case of mistaken identity"),
cliche("cast aspersions"),
cliche("cat got your tongue"),
cliche("cat's meow"),
cliche("caught in the crossfire"),
cliche("caught red-handed"),
cliche("chase a red herring"),
cliche("checkered past"),
cliche("chomping at the bit"),
cliche("cleanliness is next to godliness"),
cliche("clear as a bell"),
cliche("clear as mud"),
cliche("close to the vest"),
cliche("cock and bull story"),
cliche("cold shoulder"),
cliche("come hell or high water"),
cliche("comparing apples and oranges"),
cliche("conspicuous by its absence"),
cliche("conspicuous by its absence"),
cliche("cool as a cucumber"),
cliche("cool, calm, and collected"),
cliche("cost a king's ransom"),
cliche("count your blessings"),
cliche("crack of dawn"),
cliche("crash course"),
cliche("creature comforts"),
cliche("cross that bridge when you come to it"),
cliche("crushing blow"),
cliche("cry like a baby"),
cliche("cry me a river"),
cliche("cry over spilt milk"),
cliche("crystal clear"),
cliche("crystal clear"),
cliche("curiosity killed the cat"),
cliche("cut and dried"),
cliche("cut through the red tape"),
cliche("cut to the chase"),
cliche("cute as a bugs ear"),
cliche("cute as a button"),
cliche("cute as a puppy"),
cliche("cuts to the quick"),
cliche("cutting edge"),
cliche("dark before the dawn"),
cliche("day in, day out"),
cliche("dead as a doornail"),
cliche("decision-making process"),
cliche("devil is in the details"),
cliche("dime a dozen"),
cliche("divide and conquer"),
cliche("dog and pony show"),
cliche("dog days"),
cliche("dog eat dog"),
cliche("dog tired"),
cliche("don't burn your bridges"),
cliche("don't count your chickens"),
cliche("don't look a gift horse in the mouth"),
cliche("don't rock the boat"),
cliche("don't step on anyone's toes"),
cliche("don't take any wooden nickels"),
cliche("down and out"),
cliche("down at the heels"),
cliche("down in the dumps"),
cliche("down the hatch"),
cliche("down to earth"),
cliche("draw the line"),
cliche("dressed to kill"),
cliche("dressed to the nines"),
cliche("drives me up the wall"),
cliche("dubious distinction"),
cliche("dull as dishwater"),
cliche("duly authorized"),
cliche("dyed in the wool"),
cliche("eagle eye"),
cliche("ear to the ground"),
cliche("early bird catches the worm"),
cliche("easier said than done"),
cliche("easier said than done"),
cliche("easy as pie"),
cliche("eat your heart out"),
cliche("eat your words"),
cliche("eleventh hour"),
cliche("enclosed herewith"),
cliche("even the playing field"),
cliche("every dog has its day"),
cliche("every fiber of my being"),
cliche("everything but the kitchen sink"),
cliche("eye for an eye"),
cliche("eyes peeled"),
cliche("face the music"),
cliche("facts of life"),
cliche("fair weather friend"),
cliche("fall by the wayside"),
cliche("fan the flames"),
cliche("far be it from me"),
cliche("fast and loose"),
cliche("feast or famine"),
cliche("feather your nest"),
cliche("feathered friends"),
cliche("few and far between"),
cliche("fifteen minutes of fame"),
cliche("fills the bill"),
cliche("filthy vermin"),
cliche("fine kettle of fish"),
cliche("first and foremost"),
cliche("fish out of water"),
cliche("fishing for a compliment"),
cliche("fit as a fiddle"),
cliche("fit the bill"),
cliche("fit to be tied"),
cliche("flash in the pan"),
cliche("flat as a pancake"),
cliche("flip your lid"),
cliche("flog a dead horse"),
cliche("fly by night"),
cliche("fly the coop"),
cliche("follow your heart"),
cliche("for all intents and purposes"),
cliche("for free"),
cliche("for the birds"),
cliche("for what it's worth"),
cliche("force of nature"),
cliche("force to be reckoned with"),
cliche("forgive and forget"),
cliche("fox in the henhouse"),
cliche("free and easy"),
cliche("free as a bird"),
cliche("fresh as a daisy"),
cliche("full steam ahead"),
cliche("fun in the sun"),
cliche("garbage in, garbage out"),
cliche("gentle as a lamb"),
cliche("get a kick out of"),
cliche("get a leg up"),
cliche("get down and dirty"),
cliche("get the lead out"),
cliche("get to the bottom of"),
cliche("get with the program"),
cliche("get your feet wet"),
cliche("gets my goat"),
cliche("gilding the lily"),
cliche("gilding the lily"),
cliche("give and take"),
cliche("go against the grain"),
cliche("go at it tooth and nail"),
cliche("go for broke"),
cliche("go him one better"),
cliche("go the extra mile"),
cliche("go with the flow"),
cliche("goes without saying"),
cliche("good as gold"),
cliche("good deed for the day"),
cliche("good things come to those who wait"),
cliche("good time was had by all"),
cliche("good times were had by all"),
cliche("greased lightning"),
cliche("greek to me"),
cliche("green thumb"),
cliche("green-eyed monster"),
cliche("grist for the mill"),
cliche("growing like a weed"),
cliche("hair of the dog"),
cliche("hand to mouth"),
cliche("happy as a clam"),
cliche("happy as a lark"),
cliche("hasn't a clue"),
cliche("have a nice day"),
cliche("have a short fuse"),
cliche("have high hopes"),
cliche("have the last laugh"),
cliche("haven't got a row to hoe"),
cliche("he's got his hands full"),
cliche("head honcho"),
cliche("head over heels"),
cliche("hear a pin drop"),
cliche("heard it through the grapevine"),
cliche("heart's content"),
cliche("heavy as lead"),
cliche("hem and haw"),
cliche("high and dry"),
cliche("high and mighty"),
cliche("high as a kite"),
cliche("his own worst enemy"),
cliche("his work cut out for him"),
cliche("hit paydirt"),
cliche("hither and yon"),
cliche("hold your head up high"),
cliche("hold your horses"),
cliche("hold your own"),
cliche("hold your tongue"),
cliche("honest as the day is long"),
cliche("horns of a dilemma"),
cliche("horns of a dilemma"),
cliche("horse of a different color"),
cliche("hot under the collar"),
cliche("hour of need"),
cliche("icing on the cake"),
cliche("if and when"),
cliche("if the shoe fits"),
cliche("if the shoe were on the other foot"),
cliche("if you catch my drift"),
cliche("in a jam"),
cliche("in a jiffy"),
cliche("in a nutshell"),
cliche("in a pig's eye"),
cliche("in a pinch"),
cliche("in a word"),
cliche("in hot water"),
cliche("in light of"),
cliche("in reference to"),
cliche("in short supply"),
cliche("in the final analysis"),
cliche("in the foreseeable future"),
cliche("in the gutter"),
cliche("in the last analysis"),
cliche("in the long run"),
cliche("in the matter of"),
cliche("in the nick of time"),
cliche("in the thick of it"),
cliche("in your dreams"),
cliche("innocent bystander"),
cliche("it ain't over till the fat lady sings"),
cliche("it goes without saying"),
cliche("it stands to reason"),
cliche("it takes all kinds"),
cliche("it takes one to know one"),
cliche("it's a small world"),
cliche("it's not what you know, it's who you know"),
cliche("it's only a matter of time"),
cliche("ivory tower"),
cliche("jockey for position"),
cliche("jog your memory"),
cliche("joined at the hip"),
cliche("judge a book by its cover"),
cliche("jump down your throat"),
cliche("jump in with both feet"),
cliche("jump on the bandwagon"),
cliche("jump the gun"),
cliche("jump to conclusions"),
cliche("just a hop, skip, and a jump"),
cliche("just the ticket"),
cliche("justice is blind"),
cliche("keep a stiff upper lip"),
cliche("keep an eye on"),
cliche("keep it simple, stupid"),
cliche("keep the home fires burning"),
cliche("keep up with the joneses"),
cliche("keep your chin up"),
cliche("keep your fingers crossed"),
cliche("kick the bucket"),
cliche("kick up your heels"),
cliche("kick your feet up"),
cliche("kid in a candy store"),
cliche("kill two birds with one stone"),
cliche("kiss of death"),
cliche("knock it out of the park"),
cliche("knock on wood"),
cliche("knock your socks off"),
cliche("know him from adam"),
cliche("know the ropes"),
cliche("know the score"),
cliche("knuckle down"),
cliche("knuckle sandwich"),
cliche("knuckle under"),
cliche("labor of love"),
cliche("ladder of success"),
cliche("land on your feet"),
cliche("lap of luxury"),
cliche("last but not least"),
cliche("last but not least"),
cliche("last hurrah"),
cliche("last-ditch effort"),
cliche("law of the jungle"),
cliche("law of the land"),
cliche("lay down the law"),
cliche("leaps and bounds"),
cliche("let sleeping dogs lie"),
cliche("let the cat out of the bag"),
cliche("let the good times roll"),
cliche("let your hair down"),
cliche("let's talk turkey"),
cliche("letter perfect"),
cliche("lick your wounds"),
cliche("lies like a rug"),
cliche("life's a bitch"),
cliche("life's a grind"),
cliche("light at the end of the tunnel"),
cliche("lighter than a feather"),
cliche("lighter than air"),
cliche("like clockwork"),
cliche("like father like son"),
cliche("like taking candy from a baby"),
cliche("like there's no tomorrow"),
cliche("lion's share"),
cliche("live and learn"),
cliche("live and let live"),
cliche("long and short of it"),
cliche("long lost love"),
cliche("look before you leap"),
cliche("look down your nose"),
cliche("look what the cat dragged in"),
cliche("looking a gift horse in the mouth"),
cliche("looks like death warmed over"),
cliche("loose cannon"),
cliche("lose your head"),
cliche("lose your temper"),
cliche("loud as a horn"),
cliche("lounge lizard"),
cliche("loved and lost"),
cliche("low man on the totem pole"),
cliche("luck of the irish"),
cliche("luck of the draw"),
cliche("make a mockery of"),
cliche("make hay while the sun shines"),
cliche("make money hand over fist"),
cliche("make my day"),
cliche("make the best of a bad situation"),
cliche("make the best of it"),
cliche("make your blood boil"),
cliche("male chauvinism"),
cliche("man of few words"),
cliche("man's best friend"),
cliche("many and diverse"),
cliche("mark my words"),
cliche("meaningful dialogue"),
cliche("missed the boat on that one"),
cliche("moment in the sun"),
cliche("moment of glory"),
cliche("moment of truth"),
cliche("moment of truth"),
cliche("money to burn"),
cliche("more in sorrow than in anger"),
cliche("more power to you"),
cliche("more sinned against than sinning"),
cliche("more than one way to skin a cat"),
cliche("movers and shakers"),
cliche("moving experience"),
cliche("my better half"),
cliche("naked as a jaybird"),
cliche("naked truth"),
cliche("neat as a pin"),
cliche("needle in a haystack"),
cliche("needless to say"),
cliche("neither here nor there"),
cliche("never look back"),
cliche("never say never"),
cliche("nip and tuck"),
cliche("nip in the bud"),
cliche("nip it in the bud"),
cliche("no guts, no glory"),
cliche("no love lost"),
cliche("no pain, no gain"),
cliche("no skin off my back"),
cliche("no stone unturned"),
cliche("no time like the present"),
cliche("no use crying over spilled milk"),
cliche("nose to the grindstone"),
cliche("not a hope in hell"),
cliche("not a minute's peace"),
cliche("not in my backyard"),
cliche("not playing with a full deck"),
cliche("not the end of the world"),
cliche("not written in stone"),
cliche("nothing to sneeze at"),
cliche("nothing ventured nothing gained"),
cliche("now we're cooking"),
cliche("off the top of my head"),
cliche("off the wagon"),
cliche("off the wall"),
cliche("old hat"),
cliche("olden days"),
cliche("older and wiser"),
cliche("older than methuselah"),
cliche("older than dirt"),
cliche("on a roll"),
cliche("on cloud nine"),
cliche("on pins and needles"),
cliche("on the bandwagon"),
cliche("on the money"),
cliche("on the nose"),
cliche("on the right track"),
cliche("on the rocks"),
cliche("on the same page"),
cliche("on the spot"),
cliche("on the tip of my tongue"),
cliche("on the wagon"),
cliche("on thin ice"),
cliche("once bitten, twice shy"),
cliche("one bad apple doesn't spoil the bushel"),
cliche("one born every minute"),
cliche("one brick short"),
cliche("one foot in the grave"),
cliche("one in a million"),
cliche("one red cent"),
cliche("only game in town"),
cliche("open a can of worms"),
cliche("open and shut case"),
cliche("open the flood gates"),
cliche("opportunity doesn't knock twice"),
cliche("out of pocket"),
cliche("out of sight, out of mind"),
cliche("out of the frying pan into the fire"),
cliche("out of the woods"),
cliche("out on a limb"),
cliche("over a barrel"),
cliche("over the hump"),
cliche("pain and suffering"),
cliche("pain in the"),
cliche("panic button"),
cliche("par for the course"),
cliche("par for the course"),
cliche("part and parcel"),
cliche("party pooper"),
cliche("pass the buck"),
cliche("patience is a virtue"),
cliche("pay through the nose"),
cliche("penny pincher"),
cliche("perfect storm"),
cliche("pig in a poke"),
cliche("pile it on"),
cliche("pillar of the community"),
cliche("pin your hopes on"),
cliche("pitter patter of little feet"),
cliche("plain as day"),
cliche("plain as the nose on your face"),
cliche("play by the rules"),
cliche("play your cards right"),
cliche("playing the field"),
cliche("playing with fire"),
cliche("please feel free to"),
cliche("pleased as punch"),
cliche("plenty of fish in the sea"),
cliche("point with pride"),
cliche("poor as a church mouse"),
cliche("pot calling the kettle black"),
cliche("presidential timber"),
cliche("pretty as a picture"),
cliche("pull a fast one"),
cliche("pull your punches"),
cliche("pulled no punches"),
cliche("pulling your leg"),
cliche("pure as the driven snow"),
cliche("pursuant to your request"),
cliche("put it in a nutshell"),
cliche("put one over on you"),
cliche("put the cart before the horse"),
cliche("put the pedal to the metal"),
cliche("put your best foot forward"),
cliche("put your foot down"),
cliche("quantum jump"),
cliche("quantum leap"),
cliche("quick as a bunny"),
cliche("quick as a lick"),
cliche("quick as a wink"),
cliche("quick as lightning"),
cliche("quiet as a dormouse"),
cliche("rags to riches"),
cliche("raining buckets"),
cliche("raining cats and dogs"),
cliche("rank and file"),
cliche("rat race"),
cliche("reap what you sow"),
cliche("red as a beet"),
cliche("red herring"),
cliche("redound to one's credit"),
cliche("redound to the benefit of"),
cliche("regarding the matter of"),
cliche("reinvent the wheel"),
cliche("rich and famous"),
cliche("rings a bell"),
cliche("ripe old age"),
cliche("ripped me off"),
cliche("rise and shine"),
cliche("road to hell is paved with good intentions"),
cliche("rob peter to pay paul"),
cliche("roll over in the grave"),
cliche("rub the wrong way"),
cliche("ruled the roost"),
cliche("running in circles"),
cliche("sad but true"),
cliche("sadder but wiser"),
cliche("salt of the earth"),
cliche("scared stiff"),
cliche("scared to death"),
cliche("sea change"),
cliche("sealed with a kiss"),
cliche("second to none"),
cliche("see eye to eye"),
cliche("seen the light"),
cliche("seize the day"),
cliche("set the record straight"),
cliche("set the world on fire"),
cliche("set your teeth on edge"),
cliche("sharp as a tack"),
cliche("shirked his duties"),
cliche("shoot for the moon"),
cliche("shoot the breeze"),
cliche("shot in the dark"),
cliche("shoulder to the wheel"),
cliche("sick as a dog"),
cliche("sigh of relief"),
cliche("signed, sealed, and delivered"),
cliche("sink or swim"),
cliche("six of one, half a dozen of another"),
cliche("six of one, half a dozen of the other"),
cliche("skating on thin ice"),
cliche("slept like a log"),
cliche("slinging mud"),
cliche("slippery as an eel"),
cliche("slow as molasses"),
cliche("slowly but surely"),
cliche("smart as a whip"),
cliche("smooth as a baby's bottom"),
cliche("sneaking suspicion"),
cliche("snug as a bug in a rug"),
cliche("sow wild oats"),
cliche("spare the rod, spoil the child"),
cliche("speak of the devil"),
cliche("spilled the beans"),
cliche("spinning your wheels"),
cliche("spitting image of"),
cliche("spoke with relish"),
cliche("spread like wildfire"),
cliche("spring to life"),
cliche("squeaky wheel gets the grease"),
cliche("stands out like a sore thumb"),
cliche("start from scratch"),
cliche("stick in the mud"),
cliche("still waters run deep"),
cliche("stitch in time"),
cliche("stop and smell the roses"),
cliche("straight as an arrow"),
cliche("straw that broke the camel's back"),
cliche("stretched to the breaking point"),
cliche("strong as an ox"),
cliche("stubborn as a mule"),
cliche("stuff that dreams are made of"),
cliche("stuffed shirt"),
cliche("sweating blood"),
cliche("sweating bullets"),
cliche("take a load off"),
cliche("take one for the team"),
cliche("take the bait"),
cliche("take the bull by the horns"),
cliche("take the plunge"),
cliche("takes one to know one"),
cliche("takes two to tango"),
cliche("than you can shake a stick at"),
cliche("the cream of the crop"),
cliche("the cream rises to the top"),
cliche("the more the merrier"),
cliche("the real mccoy"),
cliche("the real deal"),
cliche("the red carpet treatment"),
cliche("the same old story"),
cliche("the straw that broke the camel's back"),
cliche("there is no accounting for taste"),
cliche("thick as a brick"),
cliche("thick as thieves"),
cliche("thick as thieves"),
cliche("thin as a rail"),
cliche("think outside of the box"),
cliche("thinking outside the box"),
cliche("third time's the charm"),
cliche("this day and age"),
cliche("this hurts me worse than it hurts you"),
cliche("this point in time"),
cliche("this will acknowledge"),
cliche("thought leaders?"),
cliche("three sheets to the wind"),
cliche("through thick and thin"),
cliche("throw in the towel"),
cliche("throw the baby out with the bathwater"),
cliche("tie one on"),
cliche("tighter than a drum"),
cliche("time and time again"),
cliche("time is of the essence"),
cliche("tip of the iceberg"),
cliche("tired but happy"),
cliche("to coin a phrase"),
cliche("to each his own"),
cliche("to make a long story short"),
cliche("to the best of my knowledge"),
cliche("toe the line"),
cliche("tongue in cheek"),
cliche("too good to be true"),
cliche("too hot to handle"),
cliche("too numerous to mention"),
cliche("touch with a ten foot pole"),
cliche("tough as nails"),
cliche("trial and error"),
cliche("trials and tribulations"),
cliche("tried and true"),
cliche("trip down memory lane"),
cliche("twist of fate"),
cliche("two cents worth"),
cliche("two peas in a pod"),
cliche("ugly as sin"),
cliche("under the counter"),
cliche("under the gun"),
cliche("under the same roof"),
cliche("under the weather"),
cliche("until the cows come home"),
cliche("unvarnished truth"),
cliche("up the creek"),
cliche("uphill battle"),
cliche("upper crust"),
cliche("upset the applecart"),
cliche("vain attempt"),
cliche("vain effort"),
cliche("vanquish the enemy"),
cliche("various and sundry"),
cliche("vested interest"),
cliche("viable alternative"),
cliche("waiting for the other shoe to drop"),
cliche("wakeup call"),
cliche("warm welcome"),
cliche("watch your p's and q's"),
cliche("watch your tongue"),
cliche("watching the clock"),
cliche("water under the bridge"),
cliche("wax eloquent"),
cliche("wax poetic"),
cliche("we are pleased to advice"),
cliche("we regret to inform you"),
cliche("we wish to state"),
cliche("we've got a situation here"),
cliche("weather the storm"),
cliche("weed them out"),
cliche("week of sundays"),
cliche("went belly up"),
cliche("wet behind the ears"),
cliche("what goes around comes around"),
cliche("what you see is what you get"),
cliche("when it rains, it pours"),
cliche("when push comes to shove"),
cliche("when the cat's away"),
cliche("when the going gets tough, the tough get going"),
cliche("whet (?:the|your) appetite"),
cliche("white as a sheet"),
cliche("whole ball of wax"),
cliche("whole hog"),
cliche("whole nine yards"),
cliche("wild goose chase"),
cliche("will wonders never cease?"),
cliche("wisdom of the ages"),
cliche("wise as an owl"),
cliche("wolf at the door"),
cliche("wool pulled over our eyes"),
cliche("words fail me"),
cliche("work like a dog"),
cliche("world weary"),
cliche("worst nightmare"),
cliche("worth its weight in gold"),
cliche("writ large"),
cliche("wrong side of the bed"),
cliche("yanking your chain"),
cliche("yappy as a dog"),
cliche("years young"),
cliche("you are hereby advised that"),
cliche("you are what you eat"),
cliche("you can run but you can't hide"),
cliche("you only live once"),
cliche("you're the boss "),
cliche("young and foolish"),
cliche("young and vibrant"),
}
}
| cliche | identifier_name |
cliches.go | package main
// NEEDCAPS
func cliche(match string) *BadTerm |
// ShouldNotCliche returns a slice of BadTerm's, none of which should be in the
// a text (case insensitive). See existence_checks.go for details of BadTerms.
func ShouldNotCliche() []TextCheck {
return []TextCheck{
cliche("all hell broke loose"),
cliche("american as apple pie"),
cliche("hobson's choice"),
cliche("i beg to differ"),
cliche("jack of all trades"),
cliche("a chip off the old block"),
cliche("a clean slate"),
cliche("a dark and stormy night"),
cliche("a far cry"),
cliche("a fate worse than death"),
cliche("a fine kettle of fish"),
cliche("a loose cannon"),
cliche("a matter of concern"),
cliche("a penny saved is a penny earned"),
cliche("a tough row to hoe"),
cliche("a word to the wise"),
cliche("ace in the hole"),
cliche("acid test"),
cliche("add insult to injury"),
cliche("against all odds"),
cliche("air your dirty laundry"),
cliche("alas and alack"),
cliche("all fun and games"),
cliche("all in a day's work"),
cliche("all talk, no action"),
cliche("all things being equal"),
cliche("all thumbs"),
cliche("all your eggs in one basket"),
cliche("all's fair in love and war"),
cliche("all's well that ends well"),
cliche("almighty dollar"),
cliche("an axe? to grind"),
cliche("another day, another dollar"),
cliche("armed to the teeth"),
cliche("as a last resort"),
cliche("as luck would have it"),
cliche("as old as time"),
cliche("as the crow flies"),
cliche("at loose ends"),
cliche("at my wits end"),
cliche("at the end of the day"),
cliche("attached hereto"),
cliche("avoid like the plague"),
cliche("babe in the woods"),
cliche("back against the wall"),
cliche("back in the saddle"),
cliche("back to square one"),
cliche("back to the drawing board"),
cliche("bad to the bone"),
cliche("badge of honor"),
cliche("bald faced liar"),
cliche("bald-faced lie"),
cliche("ballpark figure"),
cliche("banging your head against a brick wall"),
cliche("baptism by fire"),
cliche("barking up the wrong tree"),
cliche("bat out of hell"),
cliche("be all and end all"),
cliche("beat a dead horse"),
cliche("beat around the bush"),
cliche("been there, done that"),
cliche("beggars can't be choosers"),
cliche("behind the eight ball"),
cliche("bend over backwards"),
cliche("benefit of the doubt"),
cliche("bent out of shape"),
cliche("best thing since sliced bread"),
cliche("bet your bottom dollar"),
cliche("better half"),
cliche("better late than never"),
cliche("better mousetrap"),
cliche("better safe than sorry"),
cliche("between scylla and charybdis"),
cliche("between a rock and a hard place"),
cliche("between a rock and a hard place"),
cliche("between the devil and the deep blue sea"),
cliche("betwixt and between"),
cliche("beyond the pale"),
cliche("bide your time"),
cliche("big as life"),
cliche("big cheese"),
cliche("big fish in a small pond"),
cliche("big man on campus"),
cliche("bigger they are the harder they fall"),
cliche("bird in the hand"),
cliche("bird's eye view"),
cliche("birds and the bees"),
cliche("birds of a feather flock together"),
cliche("bit the hand that feeds you"),
cliche("bite the bullet"),
cliche("bite the dust"),
cliche("bitten off more than he can chew"),
cliche("black as coal"),
cliche("black as pitch"),
cliche("black as the ace of spades"),
cliche("blast from the past"),
cliche("bleeding heart"),
cliche("blessing in disguise"),
cliche("blind ambition"),
cliche("blind as a bat"),
cliche("blind leading the blind"),
cliche("blissful ignorance"),
cliche("blood is thicker than water"),
cliche("blood sweat and tears"),
cliche("blow a fuse"),
cliche("blow off steam"),
cliche("blow your own horn"),
cliche("blushing bride"),
cliche("boils down to"),
cliche("bolt from the blue"),
cliche("bone to pick"),
cliche("bored stiff"),
cliche("bored to tears"),
cliche("bottomless pit"),
cliche("boys will be boys"),
cliche("bright and early"),
cliche("brings home the bacon"),
cliche("broad across the beam"),
cliche("broken record"),
cliche("brought back to reality"),
cliche("bulk large"),
cliche("bull by the horns"),
cliche("bull in a china shop"),
cliche("burn the midnight oil"),
cliche("burning question"),
cliche("burning the candle at both ends"),
cliche("burst your bubble"),
cliche("bury the hatchet"),
cliche("busy as a bee"),
cliche("but that's another story"),
cliche("by hook or by crook"),
cliche("by no means"),
cliche("call a spade a spade"),
cliche("called onto the carpet"),
cliche("calm before the storm"),
cliche("can of worms"),
cliche("can't cut the mustard"),
cliche("can't hold a candle to"),
cliche("case of mistaken identity"),
cliche("cast aspersions"),
cliche("cat got your tongue"),
cliche("cat's meow"),
cliche("caught in the crossfire"),
cliche("caught red-handed"),
cliche("chase a red herring"),
cliche("checkered past"),
cliche("chomping at the bit"),
cliche("cleanliness is next to godliness"),
cliche("clear as a bell"),
cliche("clear as mud"),
cliche("close to the vest"),
cliche("cock and bull story"),
cliche("cold shoulder"),
cliche("come hell or high water"),
cliche("comparing apples and oranges"),
cliche("conspicuous by its absence"),
cliche("conspicuous by its absence"),
cliche("cool as a cucumber"),
cliche("cool, calm, and collected"),
cliche("cost a king's ransom"),
cliche("count your blessings"),
cliche("crack of dawn"),
cliche("crash course"),
cliche("creature comforts"),
cliche("cross that bridge when you come to it"),
cliche("crushing blow"),
cliche("cry like a baby"),
cliche("cry me a river"),
cliche("cry over spilt milk"),
cliche("crystal clear"),
cliche("crystal clear"),
cliche("curiosity killed the cat"),
cliche("cut and dried"),
cliche("cut through the red tape"),
cliche("cut to the chase"),
cliche("cute as a bugs ear"),
cliche("cute as a button"),
cliche("cute as a puppy"),
cliche("cuts to the quick"),
cliche("cutting edge"),
cliche("dark before the dawn"),
cliche("day in, day out"),
cliche("dead as a doornail"),
cliche("decision-making process"),
cliche("devil is in the details"),
cliche("dime a dozen"),
cliche("divide and conquer"),
cliche("dog and pony show"),
cliche("dog days"),
cliche("dog eat dog"),
cliche("dog tired"),
cliche("don't burn your bridges"),
cliche("don't count your chickens"),
cliche("don't look a gift horse in the mouth"),
cliche("don't rock the boat"),
cliche("don't step on anyone's toes"),
cliche("don't take any wooden nickels"),
cliche("down and out"),
cliche("down at the heels"),
cliche("down in the dumps"),
cliche("down the hatch"),
cliche("down to earth"),
cliche("draw the line"),
cliche("dressed to kill"),
cliche("dressed to the nines"),
cliche("drives me up the wall"),
cliche("dubious distinction"),
cliche("dull as dishwater"),
cliche("duly authorized"),
cliche("dyed in the wool"),
cliche("eagle eye"),
cliche("ear to the ground"),
cliche("early bird catches the worm"),
cliche("easier said than done"),
cliche("easier said than done"),
cliche("easy as pie"),
cliche("eat your heart out"),
cliche("eat your words"),
cliche("eleventh hour"),
cliche("enclosed herewith"),
cliche("even the playing field"),
cliche("every dog has its day"),
cliche("every fiber of my being"),
cliche("everything but the kitchen sink"),
cliche("eye for an eye"),
cliche("eyes peeled"),
cliche("face the music"),
cliche("facts of life"),
cliche("fair weather friend"),
cliche("fall by the wayside"),
cliche("fan the flames"),
cliche("far be it from me"),
cliche("fast and loose"),
cliche("feast or famine"),
cliche("feather your nest"),
cliche("feathered friends"),
cliche("few and far between"),
cliche("fifteen minutes of fame"),
cliche("fills the bill"),
cliche("filthy vermin"),
cliche("fine kettle of fish"),
cliche("first and foremost"),
cliche("fish out of water"),
cliche("fishing for a compliment"),
cliche("fit as a fiddle"),
cliche("fit the bill"),
cliche("fit to be tied"),
cliche("flash in the pan"),
cliche("flat as a pancake"),
cliche("flip your lid"),
cliche("flog a dead horse"),
cliche("fly by night"),
cliche("fly the coop"),
cliche("follow your heart"),
cliche("for all intents and purposes"),
cliche("for free"),
cliche("for the birds"),
cliche("for what it's worth"),
cliche("force of nature"),
cliche("force to be reckoned with"),
cliche("forgive and forget"),
cliche("fox in the henhouse"),
cliche("free and easy"),
cliche("free as a bird"),
cliche("fresh as a daisy"),
cliche("full steam ahead"),
cliche("fun in the sun"),
cliche("garbage in, garbage out"),
cliche("gentle as a lamb"),
cliche("get a kick out of"),
cliche("get a leg up"),
cliche("get down and dirty"),
cliche("get the lead out"),
cliche("get to the bottom of"),
cliche("get with the program"),
cliche("get your feet wet"),
cliche("gets my goat"),
cliche("gilding the lily"),
cliche("gilding the lily"),
cliche("give and take"),
cliche("go against the grain"),
cliche("go at it tooth and nail"),
cliche("go for broke"),
cliche("go him one better"),
cliche("go the extra mile"),
cliche("go with the flow"),
cliche("goes without saying"),
cliche("good as gold"),
cliche("good deed for the day"),
cliche("good things come to those who wait"),
cliche("good time was had by all"),
cliche("good times were had by all"),
cliche("greased lightning"),
cliche("greek to me"),
cliche("green thumb"),
cliche("green-eyed monster"),
cliche("grist for the mill"),
cliche("growing like a weed"),
cliche("hair of the dog"),
cliche("hand to mouth"),
cliche("happy as a clam"),
cliche("happy as a lark"),
cliche("hasn't a clue"),
cliche("have a nice day"),
cliche("have a short fuse"),
cliche("have high hopes"),
cliche("have the last laugh"),
cliche("haven't got a row to hoe"),
cliche("he's got his hands full"),
cliche("head honcho"),
cliche("head over heels"),
cliche("hear a pin drop"),
cliche("heard it through the grapevine"),
cliche("heart's content"),
cliche("heavy as lead"),
cliche("hem and haw"),
cliche("high and dry"),
cliche("high and mighty"),
cliche("high as a kite"),
cliche("his own worst enemy"),
cliche("his work cut out for him"),
cliche("hit paydirt"),
cliche("hither and yon"),
cliche("hold your head up high"),
cliche("hold your horses"),
cliche("hold your own"),
cliche("hold your tongue"),
cliche("honest as the day is long"),
cliche("horns of a dilemma"),
cliche("horns of a dilemma"),
cliche("horse of a different color"),
cliche("hot under the collar"),
cliche("hour of need"),
cliche("icing on the cake"),
cliche("if and when"),
cliche("if the shoe fits"),
cliche("if the shoe were on the other foot"),
cliche("if you catch my drift"),
cliche("in a jam"),
cliche("in a jiffy"),
cliche("in a nutshell"),
cliche("in a pig's eye"),
cliche("in a pinch"),
cliche("in a word"),
cliche("in hot water"),
cliche("in light of"),
cliche("in reference to"),
cliche("in short supply"),
cliche("in the final analysis"),
cliche("in the foreseeable future"),
cliche("in the gutter"),
cliche("in the last analysis"),
cliche("in the long run"),
cliche("in the matter of"),
cliche("in the nick of time"),
cliche("in the thick of it"),
cliche("in your dreams"),
cliche("innocent bystander"),
cliche("it ain't over till the fat lady sings"),
cliche("it goes without saying"),
cliche("it stands to reason"),
cliche("it takes all kinds"),
cliche("it takes one to know one"),
cliche("it's a small world"),
cliche("it's not what you know, it's who you know"),
cliche("it's only a matter of time"),
cliche("ivory tower"),
cliche("jockey for position"),
cliche("jog your memory"),
cliche("joined at the hip"),
cliche("judge a book by its cover"),
cliche("jump down your throat"),
cliche("jump in with both feet"),
cliche("jump on the bandwagon"),
cliche("jump the gun"),
cliche("jump to conclusions"),
cliche("just a hop, skip, and a jump"),
cliche("just the ticket"),
cliche("justice is blind"),
cliche("keep a stiff upper lip"),
cliche("keep an eye on"),
cliche("keep it simple, stupid"),
cliche("keep the home fires burning"),
cliche("keep up with the joneses"),
cliche("keep your chin up"),
cliche("keep your fingers crossed"),
cliche("kick the bucket"),
cliche("kick up your heels"),
cliche("kick your feet up"),
cliche("kid in a candy store"),
cliche("kill two birds with one stone"),
cliche("kiss of death"),
cliche("knock it out of the park"),
cliche("knock on wood"),
cliche("knock your socks off"),
cliche("know him from adam"),
cliche("know the ropes"),
cliche("know the score"),
cliche("knuckle down"),
cliche("knuckle sandwich"),
cliche("knuckle under"),
cliche("labor of love"),
cliche("ladder of success"),
cliche("land on your feet"),
cliche("lap of luxury"),
cliche("last but not least"),
cliche("last but not least"),
cliche("last hurrah"),
cliche("last-ditch effort"),
cliche("law of the jungle"),
cliche("law of the land"),
cliche("lay down the law"),
cliche("leaps and bounds"),
cliche("let sleeping dogs lie"),
cliche("let the cat out of the bag"),
cliche("let the good times roll"),
cliche("let your hair down"),
cliche("let's talk turkey"),
cliche("letter perfect"),
cliche("lick your wounds"),
cliche("lies like a rug"),
cliche("life's a bitch"),
cliche("life's a grind"),
cliche("light at the end of the tunnel"),
cliche("lighter than a feather"),
cliche("lighter than air"),
cliche("like clockwork"),
cliche("like father like son"),
cliche("like taking candy from a baby"),
cliche("like there's no tomorrow"),
cliche("lion's share"),
cliche("live and learn"),
cliche("live and let live"),
cliche("long and short of it"),
cliche("long lost love"),
cliche("look before you leap"),
cliche("look down your nose"),
cliche("look what the cat dragged in"),
cliche("looking a gift horse in the mouth"),
cliche("looks like death warmed over"),
cliche("loose cannon"),
cliche("lose your head"),
cliche("lose your temper"),
cliche("loud as a horn"),
cliche("lounge lizard"),
cliche("loved and lost"),
cliche("low man on the totem pole"),
cliche("luck of the irish"),
cliche("luck of the draw"),
cliche("make a mockery of"),
cliche("make hay while the sun shines"),
cliche("make money hand over fist"),
cliche("make my day"),
cliche("make the best of a bad situation"),
cliche("make the best of it"),
cliche("make your blood boil"),
cliche("male chauvinism"),
cliche("man of few words"),
cliche("man's best friend"),
cliche("many and diverse"),
cliche("mark my words"),
cliche("meaningful dialogue"),
cliche("missed the boat on that one"),
cliche("moment in the sun"),
cliche("moment of glory"),
cliche("moment of truth"),
cliche("moment of truth"),
cliche("money to burn"),
cliche("more in sorrow than in anger"),
cliche("more power to you"),
cliche("more sinned against than sinning"),
cliche("more than one way to skin a cat"),
cliche("movers and shakers"),
cliche("moving experience"),
cliche("my better half"),
cliche("naked as a jaybird"),
cliche("naked truth"),
cliche("neat as a pin"),
cliche("needle in a haystack"),
cliche("needless to say"),
cliche("neither here nor there"),
cliche("never look back"),
cliche("never say never"),
cliche("nip and tuck"),
cliche("nip in the bud"),
cliche("nip it in the bud"),
cliche("no guts, no glory"),
cliche("no love lost"),
cliche("no pain, no gain"),
cliche("no skin off my back"),
cliche("no stone unturned"),
cliche("no time like the present"),
cliche("no use crying over spilled milk"),
cliche("nose to the grindstone"),
cliche("not a hope in hell"),
cliche("not a minute's peace"),
cliche("not in my backyard"),
cliche("not playing with a full deck"),
cliche("not the end of the world"),
cliche("not written in stone"),
cliche("nothing to sneeze at"),
cliche("nothing ventured nothing gained"),
cliche("now we're cooking"),
cliche("off the top of my head"),
cliche("off the wagon"),
cliche("off the wall"),
cliche("old hat"),
cliche("olden days"),
cliche("older and wiser"),
cliche("older than methuselah"),
cliche("older than dirt"),
cliche("on a roll"),
cliche("on cloud nine"),
cliche("on pins and needles"),
cliche("on the bandwagon"),
cliche("on the money"),
cliche("on the nose"),
cliche("on the right track"),
cliche("on the rocks"),
cliche("on the same page"),
cliche("on the spot"),
cliche("on the tip of my tongue"),
cliche("on the wagon"),
cliche("on thin ice"),
cliche("once bitten, twice shy"),
cliche("one bad apple doesn't spoil the bushel"),
cliche("one born every minute"),
cliche("one brick short"),
cliche("one foot in the grave"),
cliche("one in a million"),
cliche("one red cent"),
cliche("only game in town"),
cliche("open a can of worms"),
cliche("open and shut case"),
cliche("open the flood gates"),
cliche("opportunity doesn't knock twice"),
cliche("out of pocket"),
cliche("out of sight, out of mind"),
cliche("out of the frying pan into the fire"),
cliche("out of the woods"),
cliche("out on a limb"),
cliche("over a barrel"),
cliche("over the hump"),
cliche("pain and suffering"),
cliche("pain in the"),
cliche("panic button"),
cliche("par for the course"),
cliche("par for the course"),
cliche("part and parcel"),
cliche("party pooper"),
cliche("pass the buck"),
cliche("patience is a virtue"),
cliche("pay through the nose"),
cliche("penny pincher"),
cliche("perfect storm"),
cliche("pig in a poke"),
cliche("pile it on"),
cliche("pillar of the community"),
cliche("pin your hopes on"),
cliche("pitter patter of little feet"),
cliche("plain as day"),
cliche("plain as the nose on your face"),
cliche("play by the rules"),
cliche("play your cards right"),
cliche("playing the field"),
cliche("playing with fire"),
cliche("please feel free to"),
cliche("pleased as punch"),
cliche("plenty of fish in the sea"),
cliche("point with pride"),
cliche("poor as a church mouse"),
cliche("pot calling the kettle black"),
cliche("presidential timber"),
cliche("pretty as a picture"),
cliche("pull a fast one"),
cliche("pull your punches"),
cliche("pulled no punches"),
cliche("pulling your leg"),
cliche("pure as the driven snow"),
cliche("pursuant to your request"),
cliche("put it in a nutshell"),
cliche("put one over on you"),
cliche("put the cart before the horse"),
cliche("put the pedal to the metal"),
cliche("put your best foot forward"),
cliche("put your foot down"),
cliche("quantum jump"),
cliche("quantum leap"),
cliche("quick as a bunny"),
cliche("quick as a lick"),
cliche("quick as a wink"),
cliche("quick as lightning"),
cliche("quiet as a dormouse"),
cliche("rags to riches"),
cliche("raining buckets"),
cliche("raining cats and dogs"),
cliche("rank and file"),
cliche("rat race"),
cliche("reap what you sow"),
cliche("red as a beet"),
cliche("red herring"),
cliche("redound to one's credit"),
cliche("redound to the benefit of"),
cliche("regarding the matter of"),
cliche("reinvent the wheel"),
cliche("rich and famous"),
cliche("rings a bell"),
cliche("ripe old age"),
cliche("ripped me off"),
cliche("rise and shine"),
cliche("road to hell is paved with good intentions"),
cliche("rob peter to pay paul"),
cliche("roll over in the grave"),
cliche("rub the wrong way"),
cliche("ruled the roost"),
cliche("running in circles"),
cliche("sad but true"),
cliche("sadder but wiser"),
cliche("salt of the earth"),
cliche("scared stiff"),
cliche("scared to death"),
cliche("sea change"),
cliche("sealed with a kiss"),
cliche("second to none"),
cliche("see eye to eye"),
cliche("seen the light"),
cliche("seize the day"),
cliche("set the record straight"),
cliche("set the world on fire"),
cliche("set your teeth on edge"),
cliche("sharp as a tack"),
cliche("shirked his duties"),
cliche("shoot for the moon"),
cliche("shoot the breeze"),
cliche("shot in the dark"),
cliche("shoulder to the wheel"),
cliche("sick as a dog"),
cliche("sigh of relief"),
cliche("signed, sealed, and delivered"),
cliche("sink or swim"),
cliche("six of one, half a dozen of another"),
cliche("six of one, half a dozen of the other"),
cliche("skating on thin ice"),
cliche("slept like a log"),
cliche("slinging mud"),
cliche("slippery as an eel"),
cliche("slow as molasses"),
cliche("slowly but surely"),
cliche("smart as a whip"),
cliche("smooth as a baby's bottom"),
cliche("sneaking suspicion"),
cliche("snug as a bug in a rug"),
cliche("sow wild oats"),
cliche("spare the rod, spoil the child"),
cliche("speak of the devil"),
cliche("spilled the beans"),
cliche("spinning your wheels"),
cliche("spitting image of"),
cliche("spoke with relish"),
cliche("spread like wildfire"),
cliche("spring to life"),
cliche("squeaky wheel gets the grease"),
cliche("stands out like a sore thumb"),
cliche("start from scratch"),
cliche("stick in the mud"),
cliche("still waters run deep"),
cliche("stitch in time"),
cliche("stop and smell the roses"),
cliche("straight as an arrow"),
cliche("straw that broke the camel's back"),
cliche("stretched to the breaking point"),
cliche("strong as an ox"),
cliche("stubborn as a mule"),
cliche("stuff that dreams are made of"),
cliche("stuffed shirt"),
cliche("sweating blood"),
cliche("sweating bullets"),
cliche("take a load off"),
cliche("take one for the team"),
cliche("take the bait"),
cliche("take the bull by the horns"),
cliche("take the plunge"),
cliche("takes one to know one"),
cliche("takes two to tango"),
cliche("than you can shake a stick at"),
cliche("the cream of the crop"),
cliche("the cream rises to the top"),
cliche("the more the merrier"),
cliche("the real mccoy"),
cliche("the real deal"),
cliche("the red carpet treatment"),
cliche("the same old story"),
cliche("the straw that broke the camel's back"),
cliche("there is no accounting for taste"),
cliche("thick as a brick"),
cliche("thick as thieves"),
cliche("thick as thieves"),
cliche("thin as a rail"),
cliche("think outside of the box"),
cliche("thinking outside the box"),
cliche("third time's the charm"),
cliche("this day and age"),
cliche("this hurts me worse than it hurts you"),
cliche("this point in time"),
cliche("this will acknowledge"),
cliche("thought leaders?"),
cliche("three sheets to the wind"),
cliche("through thick and thin"),
cliche("throw in the towel"),
cliche("throw the baby out with the bathwater"),
cliche("tie one on"),
cliche("tighter than a drum"),
cliche("time and time again"),
cliche("time is of the essence"),
cliche("tip of the iceberg"),
cliche("tired but happy"),
cliche("to coin a phrase"),
cliche("to each his own"),
cliche("to make a long story short"),
cliche("to the best of my knowledge"),
cliche("toe the line"),
cliche("tongue in cheek"),
cliche("too good to be true"),
cliche("too hot to handle"),
cliche("too numerous to mention"),
cliche("touch with a ten foot pole"),
cliche("tough as nails"),
cliche("trial and error"),
cliche("trials and tribulations"),
cliche("tried and true"),
cliche("trip down memory lane"),
cliche("twist of fate"),
cliche("two cents worth"),
cliche("two peas in a pod"),
cliche("ugly as sin"),
cliche("under the counter"),
cliche("under the gun"),
cliche("under the same roof"),
cliche("under the weather"),
cliche("until the cows come home"),
cliche("unvarnished truth"),
cliche("up the creek"),
cliche("uphill battle"),
cliche("upper crust"),
cliche("upset the applecart"),
cliche("vain attempt"),
cliche("vain effort"),
cliche("vanquish the enemy"),
cliche("various and sundry"),
cliche("vested interest"),
cliche("viable alternative"),
cliche("waiting for the other shoe to drop"),
cliche("wakeup call"),
cliche("warm welcome"),
cliche("watch your p's and q's"),
cliche("watch your tongue"),
cliche("watching the clock"),
cliche("water under the bridge"),
cliche("wax eloquent"),
cliche("wax poetic"),
cliche("we are pleased to advice"),
cliche("we regret to inform you"),
cliche("we wish to state"),
cliche("we've got a situation here"),
cliche("weather the storm"),
cliche("weed them out"),
cliche("week of sundays"),
cliche("went belly up"),
cliche("wet behind the ears"),
cliche("what goes around comes around"),
cliche("what you see is what you get"),
cliche("when it rains, it pours"),
cliche("when push comes to shove"),
cliche("when the cat's away"),
cliche("when the going gets tough, the tough get going"),
cliche("whet (?:the|your) appetite"),
cliche("white as a sheet"),
cliche("whole ball of wax"),
cliche("whole hog"),
cliche("whole nine yards"),
cliche("wild goose chase"),
cliche("will wonders never cease?"),
cliche("wisdom of the ages"),
cliche("wise as an owl"),
cliche("wolf at the door"),
cliche("wool pulled over our eyes"),
cliche("words fail me"),
cliche("work like a dog"),
cliche("world weary"),
cliche("worst nightmare"),
cliche("worth its weight in gold"),
cliche("writ large"),
cliche("wrong side of the bed"),
cliche("yanking your chain"),
cliche("yappy as a dog"),
cliche("years young"),
cliche("you are hereby advised that"),
cliche("you are what you eat"),
cliche("you can run but you can't hide"),
cliche("you only live once"),
cliche("you're the boss "),
cliche("young and foolish"),
cliche("young and vibrant"),
}
}
| {
return &BadTerm{match, "'%s' is a cliche. Avoid it like the plague."}
} | identifier_body |
cliches.go | package main
// NEEDCAPS
func cliche(match string) *BadTerm {
return &BadTerm{match, "'%s' is a cliche. Avoid it like the plague."}
}
// ShouldNotCliche returns a slice of BadTerm's, none of which should be in the
// a text (case insensitive). See existence_checks.go for details of BadTerms.
func ShouldNotCliche() []TextCheck {
return []TextCheck{
cliche("all hell broke loose"),
cliche("american as apple pie"),
cliche("hobson's choice"),
cliche("i beg to differ"),
cliche("jack of all trades"),
cliche("a chip off the old block"),
cliche("a clean slate"),
cliche("a dark and stormy night"),
cliche("a far cry"),
cliche("a fate worse than death"),
cliche("a fine kettle of fish"),
cliche("a loose cannon"),
cliche("a matter of concern"),
cliche("a penny saved is a penny earned"),
cliche("a tough row to hoe"),
cliche("a word to the wise"),
cliche("ace in the hole"),
cliche("acid test"),
cliche("add insult to injury"),
cliche("against all odds"),
cliche("air your dirty laundry"),
cliche("alas and alack"),
cliche("all fun and games"),
cliche("all in a day's work"),
cliche("all talk, no action"),
cliche("all things being equal"),
cliche("all thumbs"),
cliche("all your eggs in one basket"),
cliche("all's fair in love and war"),
cliche("all's well that ends well"),
cliche("almighty dollar"),
cliche("an axe? to grind"),
cliche("another day, another dollar"),
cliche("armed to the teeth"),
cliche("as a last resort"),
cliche("as luck would have it"),
cliche("as old as time"),
cliche("as the crow flies"),
cliche("at loose ends"),
cliche("at my wits end"),
cliche("at the end of the day"),
cliche("attached hereto"),
cliche("avoid like the plague"),
cliche("babe in the woods"),
cliche("back against the wall"),
cliche("back in the saddle"),
cliche("back to square one"),
cliche("back to the drawing board"),
cliche("bad to the bone"),
cliche("badge of honor"),
cliche("bald faced liar"),
cliche("bald-faced lie"),
cliche("ballpark figure"),
cliche("banging your head against a brick wall"),
cliche("baptism by fire"),
cliche("barking up the wrong tree"),
cliche("bat out of hell"),
cliche("be all and end all"),
cliche("beat a dead horse"),
cliche("beat around the bush"),
cliche("been there, done that"),
cliche("beggars can't be choosers"),
cliche("behind the eight ball"),
cliche("bend over backwards"),
cliche("benefit of the doubt"),
cliche("bent out of shape"),
cliche("best thing since sliced bread"),
cliche("bet your bottom dollar"),
cliche("better half"),
cliche("better late than never"),
cliche("better mousetrap"),
cliche("better safe than sorry"),
cliche("between scylla and charybdis"),
cliche("between a rock and a hard place"),
cliche("between a rock and a hard place"),
cliche("between the devil and the deep blue sea"),
cliche("betwixt and between"),
cliche("beyond the pale"),
cliche("bide your time"),
cliche("big as life"),
cliche("big cheese"),
cliche("big fish in a small pond"),
cliche("big man on campus"),
cliche("bigger they are the harder they fall"),
cliche("bird in the hand"),
cliche("bird's eye view"),
cliche("birds and the bees"),
cliche("birds of a feather flock together"),
cliche("bit the hand that feeds you"),
cliche("bite the bullet"),
cliche("bite the dust"),
cliche("bitten off more than he can chew"),
cliche("black as coal"),
cliche("black as pitch"),
cliche("black as the ace of spades"),
cliche("blast from the past"),
cliche("bleeding heart"),
cliche("blessing in disguise"),
cliche("blind ambition"),
cliche("blind as a bat"),
cliche("blind leading the blind"),
cliche("blissful ignorance"),
cliche("blood is thicker than water"),
cliche("blood sweat and tears"),
cliche("blow a fuse"),
cliche("blow off steam"),
cliche("blow your own horn"),
cliche("blushing bride"),
cliche("boils down to"),
cliche("bolt from the blue"),
cliche("bone to pick"),
cliche("bored stiff"),
cliche("bored to tears"),
cliche("bottomless pit"),
cliche("boys will be boys"),
cliche("bright and early"),
cliche("brings home the bacon"),
cliche("broad across the beam"),
cliche("broken record"),
cliche("brought back to reality"),
cliche("bulk large"),
cliche("bull by the horns"),
cliche("bull in a china shop"),
cliche("burn the midnight oil"),
cliche("burning question"),
cliche("burning the candle at both ends"),
cliche("burst your bubble"),
cliche("bury the hatchet"),
cliche("busy as a bee"),
cliche("but that's another story"),
cliche("by hook or by crook"),
cliche("by no means"),
cliche("call a spade a spade"),
cliche("called onto the carpet"),
cliche("calm before the storm"),
cliche("can of worms"),
cliche("can't cut the mustard"),
cliche("can't hold a candle to"),
cliche("case of mistaken identity"),
cliche("cast aspersions"),
cliche("cat got your tongue"),
cliche("cat's meow"),
cliche("caught in the crossfire"),
cliche("caught red-handed"),
cliche("chase a red herring"),
cliche("checkered past"),
cliche("chomping at the bit"),
cliche("cleanliness is next to godliness"),
cliche("clear as a bell"),
cliche("clear as mud"),
cliche("close to the vest"),
cliche("cock and bull story"),
cliche("cold shoulder"),
cliche("come hell or high water"),
cliche("comparing apples and oranges"),
cliche("conspicuous by its absence"),
cliche("conspicuous by its absence"),
cliche("cool as a cucumber"),
cliche("cool, calm, and collected"),
cliche("cost a king's ransom"),
cliche("count your blessings"),
cliche("crack of dawn"),
cliche("crash course"),
cliche("creature comforts"),
cliche("cross that bridge when you come to it"),
cliche("crushing blow"),
cliche("cry like a baby"),
cliche("cry me a river"),
cliche("cry over spilt milk"),
cliche("crystal clear"),
cliche("crystal clear"),
cliche("curiosity killed the cat"),
cliche("cut and dried"),
cliche("cut through the red tape"),
cliche("cut to the chase"),
cliche("cute as a bugs ear"),
cliche("cute as a button"),
cliche("cute as a puppy"),
cliche("cuts to the quick"),
cliche("cutting edge"),
cliche("dark before the dawn"), | cliche("dime a dozen"),
cliche("divide and conquer"),
cliche("dog and pony show"),
cliche("dog days"),
cliche("dog eat dog"),
cliche("dog tired"),
cliche("don't burn your bridges"),
cliche("don't count your chickens"),
cliche("don't look a gift horse in the mouth"),
cliche("don't rock the boat"),
cliche("don't step on anyone's toes"),
cliche("don't take any wooden nickels"),
cliche("down and out"),
cliche("down at the heels"),
cliche("down in the dumps"),
cliche("down the hatch"),
cliche("down to earth"),
cliche("draw the line"),
cliche("dressed to kill"),
cliche("dressed to the nines"),
cliche("drives me up the wall"),
cliche("dubious distinction"),
cliche("dull as dishwater"),
cliche("duly authorized"),
cliche("dyed in the wool"),
cliche("eagle eye"),
cliche("ear to the ground"),
cliche("early bird catches the worm"),
cliche("easier said than done"),
cliche("easier said than done"),
cliche("easy as pie"),
cliche("eat your heart out"),
cliche("eat your words"),
cliche("eleventh hour"),
cliche("enclosed herewith"),
cliche("even the playing field"),
cliche("every dog has its day"),
cliche("every fiber of my being"),
cliche("everything but the kitchen sink"),
cliche("eye for an eye"),
cliche("eyes peeled"),
cliche("face the music"),
cliche("facts of life"),
cliche("fair weather friend"),
cliche("fall by the wayside"),
cliche("fan the flames"),
cliche("far be it from me"),
cliche("fast and loose"),
cliche("feast or famine"),
cliche("feather your nest"),
cliche("feathered friends"),
cliche("few and far between"),
cliche("fifteen minutes of fame"),
cliche("fills the bill"),
cliche("filthy vermin"),
cliche("fine kettle of fish"),
cliche("first and foremost"),
cliche("fish out of water"),
cliche("fishing for a compliment"),
cliche("fit as a fiddle"),
cliche("fit the bill"),
cliche("fit to be tied"),
cliche("flash in the pan"),
cliche("flat as a pancake"),
cliche("flip your lid"),
cliche("flog a dead horse"),
cliche("fly by night"),
cliche("fly the coop"),
cliche("follow your heart"),
cliche("for all intents and purposes"),
cliche("for free"),
cliche("for the birds"),
cliche("for what it's worth"),
cliche("force of nature"),
cliche("force to be reckoned with"),
cliche("forgive and forget"),
cliche("fox in the henhouse"),
cliche("free and easy"),
cliche("free as a bird"),
cliche("fresh as a daisy"),
cliche("full steam ahead"),
cliche("fun in the sun"),
cliche("garbage in, garbage out"),
cliche("gentle as a lamb"),
cliche("get a kick out of"),
cliche("get a leg up"),
cliche("get down and dirty"),
cliche("get the lead out"),
cliche("get to the bottom of"),
cliche("get with the program"),
cliche("get your feet wet"),
cliche("gets my goat"),
cliche("gilding the lily"),
cliche("gilding the lily"),
cliche("give and take"),
cliche("go against the grain"),
cliche("go at it tooth and nail"),
cliche("go for broke"),
cliche("go him one better"),
cliche("go the extra mile"),
cliche("go with the flow"),
cliche("goes without saying"),
cliche("good as gold"),
cliche("good deed for the day"),
cliche("good things come to those who wait"),
cliche("good time was had by all"),
cliche("good times were had by all"),
cliche("greased lightning"),
cliche("greek to me"),
cliche("green thumb"),
cliche("green-eyed monster"),
cliche("grist for the mill"),
cliche("growing like a weed"),
cliche("hair of the dog"),
cliche("hand to mouth"),
cliche("happy as a clam"),
cliche("happy as a lark"),
cliche("hasn't a clue"),
cliche("have a nice day"),
cliche("have a short fuse"),
cliche("have high hopes"),
cliche("have the last laugh"),
cliche("haven't got a row to hoe"),
cliche("he's got his hands full"),
cliche("head honcho"),
cliche("head over heels"),
cliche("hear a pin drop"),
cliche("heard it through the grapevine"),
cliche("heart's content"),
cliche("heavy as lead"),
cliche("hem and haw"),
cliche("high and dry"),
cliche("high and mighty"),
cliche("high as a kite"),
cliche("his own worst enemy"),
cliche("his work cut out for him"),
cliche("hit paydirt"),
cliche("hither and yon"),
cliche("hold your head up high"),
cliche("hold your horses"),
cliche("hold your own"),
cliche("hold your tongue"),
cliche("honest as the day is long"),
cliche("horns of a dilemma"),
cliche("horns of a dilemma"),
cliche("horse of a different color"),
cliche("hot under the collar"),
cliche("hour of need"),
cliche("icing on the cake"),
cliche("if and when"),
cliche("if the shoe fits"),
cliche("if the shoe were on the other foot"),
cliche("if you catch my drift"),
cliche("in a jam"),
cliche("in a jiffy"),
cliche("in a nutshell"),
cliche("in a pig's eye"),
cliche("in a pinch"),
cliche("in a word"),
cliche("in hot water"),
cliche("in light of"),
cliche("in reference to"),
cliche("in short supply"),
cliche("in the final analysis"),
cliche("in the foreseeable future"),
cliche("in the gutter"),
cliche("in the last analysis"),
cliche("in the long run"),
cliche("in the matter of"),
cliche("in the nick of time"),
cliche("in the thick of it"),
cliche("in your dreams"),
cliche("innocent bystander"),
cliche("it ain't over till the fat lady sings"),
cliche("it goes without saying"),
cliche("it stands to reason"),
cliche("it takes all kinds"),
cliche("it takes one to know one"),
cliche("it's a small world"),
cliche("it's not what you know, it's who you know"),
cliche("it's only a matter of time"),
cliche("ivory tower"),
cliche("jockey for position"),
cliche("jog your memory"),
cliche("joined at the hip"),
cliche("judge a book by its cover"),
cliche("jump down your throat"),
cliche("jump in with both feet"),
cliche("jump on the bandwagon"),
cliche("jump the gun"),
cliche("jump to conclusions"),
cliche("just a hop, skip, and a jump"),
cliche("just the ticket"),
cliche("justice is blind"),
cliche("keep a stiff upper lip"),
cliche("keep an eye on"),
cliche("keep it simple, stupid"),
cliche("keep the home fires burning"),
cliche("keep up with the joneses"),
cliche("keep your chin up"),
cliche("keep your fingers crossed"),
cliche("kick the bucket"),
cliche("kick up your heels"),
cliche("kick your feet up"),
cliche("kid in a candy store"),
cliche("kill two birds with one stone"),
cliche("kiss of death"),
cliche("knock it out of the park"),
cliche("knock on wood"),
cliche("knock your socks off"),
cliche("know him from adam"),
cliche("know the ropes"),
cliche("know the score"),
cliche("knuckle down"),
cliche("knuckle sandwich"),
cliche("knuckle under"),
cliche("labor of love"),
cliche("ladder of success"),
cliche("land on your feet"),
cliche("lap of luxury"),
cliche("last but not least"),
cliche("last but not least"),
cliche("last hurrah"),
cliche("last-ditch effort"),
cliche("law of the jungle"),
cliche("law of the land"),
cliche("lay down the law"),
cliche("leaps and bounds"),
cliche("let sleeping dogs lie"),
cliche("let the cat out of the bag"),
cliche("let the good times roll"),
cliche("let your hair down"),
cliche("let's talk turkey"),
cliche("letter perfect"),
cliche("lick your wounds"),
cliche("lies like a rug"),
cliche("life's a bitch"),
cliche("life's a grind"),
cliche("light at the end of the tunnel"),
cliche("lighter than a feather"),
cliche("lighter than air"),
cliche("like clockwork"),
cliche("like father like son"),
cliche("like taking candy from a baby"),
cliche("like there's no tomorrow"),
cliche("lion's share"),
cliche("live and learn"),
cliche("live and let live"),
cliche("long and short of it"),
cliche("long lost love"),
cliche("look before you leap"),
cliche("look down your nose"),
cliche("look what the cat dragged in"),
cliche("looking a gift horse in the mouth"),
cliche("looks like death warmed over"),
cliche("loose cannon"),
cliche("lose your head"),
cliche("lose your temper"),
cliche("loud as a horn"),
cliche("lounge lizard"),
cliche("loved and lost"),
cliche("low man on the totem pole"),
cliche("luck of the irish"),
cliche("luck of the draw"),
cliche("make a mockery of"),
cliche("make hay while the sun shines"),
cliche("make money hand over fist"),
cliche("make my day"),
cliche("make the best of a bad situation"),
cliche("make the best of it"),
cliche("make your blood boil"),
cliche("male chauvinism"),
cliche("man of few words"),
cliche("man's best friend"),
cliche("many and diverse"),
cliche("mark my words"),
cliche("meaningful dialogue"),
cliche("missed the boat on that one"),
cliche("moment in the sun"),
cliche("moment of glory"),
cliche("moment of truth"),
cliche("moment of truth"),
cliche("money to burn"),
cliche("more in sorrow than in anger"),
cliche("more power to you"),
cliche("more sinned against than sinning"),
cliche("more than one way to skin a cat"),
cliche("movers and shakers"),
cliche("moving experience"),
cliche("my better half"),
cliche("naked as a jaybird"),
cliche("naked truth"),
cliche("neat as a pin"),
cliche("needle in a haystack"),
cliche("needless to say"),
cliche("neither here nor there"),
cliche("never look back"),
cliche("never say never"),
cliche("nip and tuck"),
cliche("nip in the bud"),
cliche("nip it in the bud"),
cliche("no guts, no glory"),
cliche("no love lost"),
cliche("no pain, no gain"),
cliche("no skin off my back"),
cliche("no stone unturned"),
cliche("no time like the present"),
cliche("no use crying over spilled milk"),
cliche("nose to the grindstone"),
cliche("not a hope in hell"),
cliche("not a minute's peace"),
cliche("not in my backyard"),
cliche("not playing with a full deck"),
cliche("not the end of the world"),
cliche("not written in stone"),
cliche("nothing to sneeze at"),
cliche("nothing ventured nothing gained"),
cliche("now we're cooking"),
cliche("off the top of my head"),
cliche("off the wagon"),
cliche("off the wall"),
cliche("old hat"),
cliche("olden days"),
cliche("older and wiser"),
cliche("older than methuselah"),
cliche("older than dirt"),
cliche("on a roll"),
cliche("on cloud nine"),
cliche("on pins and needles"),
cliche("on the bandwagon"),
cliche("on the money"),
cliche("on the nose"),
cliche("on the right track"),
cliche("on the rocks"),
cliche("on the same page"),
cliche("on the spot"),
cliche("on the tip of my tongue"),
cliche("on the wagon"),
cliche("on thin ice"),
cliche("once bitten, twice shy"),
cliche("one bad apple doesn't spoil the bushel"),
cliche("one born every minute"),
cliche("one brick short"),
cliche("one foot in the grave"),
cliche("one in a million"),
cliche("one red cent"),
cliche("only game in town"),
cliche("open a can of worms"),
cliche("open and shut case"),
cliche("open the flood gates"),
cliche("opportunity doesn't knock twice"),
cliche("out of pocket"),
cliche("out of sight, out of mind"),
cliche("out of the frying pan into the fire"),
cliche("out of the woods"),
cliche("out on a limb"),
cliche("over a barrel"),
cliche("over the hump"),
cliche("pain and suffering"),
cliche("pain in the"),
cliche("panic button"),
cliche("par for the course"),
cliche("par for the course"),
cliche("part and parcel"),
cliche("party pooper"),
cliche("pass the buck"),
cliche("patience is a virtue"),
cliche("pay through the nose"),
cliche("penny pincher"),
cliche("perfect storm"),
cliche("pig in a poke"),
cliche("pile it on"),
cliche("pillar of the community"),
cliche("pin your hopes on"),
cliche("pitter patter of little feet"),
cliche("plain as day"),
cliche("plain as the nose on your face"),
cliche("play by the rules"),
cliche("play your cards right"),
cliche("playing the field"),
cliche("playing with fire"),
cliche("please feel free to"),
cliche("pleased as punch"),
cliche("plenty of fish in the sea"),
cliche("point with pride"),
cliche("poor as a church mouse"),
cliche("pot calling the kettle black"),
cliche("presidential timber"),
cliche("pretty as a picture"),
cliche("pull a fast one"),
cliche("pull your punches"),
cliche("pulled no punches"),
cliche("pulling your leg"),
cliche("pure as the driven snow"),
cliche("pursuant to your request"),
cliche("put it in a nutshell"),
cliche("put one over on you"),
cliche("put the cart before the horse"),
cliche("put the pedal to the metal"),
cliche("put your best foot forward"),
cliche("put your foot down"),
cliche("quantum jump"),
cliche("quantum leap"),
cliche("quick as a bunny"),
cliche("quick as a lick"),
cliche("quick as a wink"),
cliche("quick as lightning"),
cliche("quiet as a dormouse"),
cliche("rags to riches"),
cliche("raining buckets"),
cliche("raining cats and dogs"),
cliche("rank and file"),
cliche("rat race"),
cliche("reap what you sow"),
cliche("red as a beet"),
cliche("red herring"),
cliche("redound to one's credit"),
cliche("redound to the benefit of"),
cliche("regarding the matter of"),
cliche("reinvent the wheel"),
cliche("rich and famous"),
cliche("rings a bell"),
cliche("ripe old age"),
cliche("ripped me off"),
cliche("rise and shine"),
cliche("road to hell is paved with good intentions"),
cliche("rob peter to pay paul"),
cliche("roll over in the grave"),
cliche("rub the wrong way"),
cliche("ruled the roost"),
cliche("running in circles"),
cliche("sad but true"),
cliche("sadder but wiser"),
cliche("salt of the earth"),
cliche("scared stiff"),
cliche("scared to death"),
cliche("sea change"),
cliche("sealed with a kiss"),
cliche("second to none"),
cliche("see eye to eye"),
cliche("seen the light"),
cliche("seize the day"),
cliche("set the record straight"),
cliche("set the world on fire"),
cliche("set your teeth on edge"),
cliche("sharp as a tack"),
cliche("shirked his duties"),
cliche("shoot for the moon"),
cliche("shoot the breeze"),
cliche("shot in the dark"),
cliche("shoulder to the wheel"),
cliche("sick as a dog"),
cliche("sigh of relief"),
cliche("signed, sealed, and delivered"),
cliche("sink or swim"),
cliche("six of one, half a dozen of another"),
cliche("six of one, half a dozen of the other"),
cliche("skating on thin ice"),
cliche("slept like a log"),
cliche("slinging mud"),
cliche("slippery as an eel"),
cliche("slow as molasses"),
cliche("slowly but surely"),
cliche("smart as a whip"),
cliche("smooth as a baby's bottom"),
cliche("sneaking suspicion"),
cliche("snug as a bug in a rug"),
cliche("sow wild oats"),
cliche("spare the rod, spoil the child"),
cliche("speak of the devil"),
cliche("spilled the beans"),
cliche("spinning your wheels"),
cliche("spitting image of"),
cliche("spoke with relish"),
cliche("spread like wildfire"),
cliche("spring to life"),
cliche("squeaky wheel gets the grease"),
cliche("stands out like a sore thumb"),
cliche("start from scratch"),
cliche("stick in the mud"),
cliche("still waters run deep"),
cliche("stitch in time"),
cliche("stop and smell the roses"),
cliche("straight as an arrow"),
cliche("straw that broke the camel's back"),
cliche("stretched to the breaking point"),
cliche("strong as an ox"),
cliche("stubborn as a mule"),
cliche("stuff that dreams are made of"),
cliche("stuffed shirt"),
cliche("sweating blood"),
cliche("sweating bullets"),
cliche("take a load off"),
cliche("take one for the team"),
cliche("take the bait"),
cliche("take the bull by the horns"),
cliche("take the plunge"),
cliche("takes one to know one"),
cliche("takes two to tango"),
cliche("than you can shake a stick at"),
cliche("the cream of the crop"),
cliche("the cream rises to the top"),
cliche("the more the merrier"),
cliche("the real mccoy"),
cliche("the real deal"),
cliche("the red carpet treatment"),
cliche("the same old story"),
cliche("the straw that broke the camel's back"),
cliche("there is no accounting for taste"),
cliche("thick as a brick"),
cliche("thick as thieves"),
cliche("thick as thieves"),
cliche("thin as a rail"),
cliche("think outside of the box"),
cliche("thinking outside the box"),
cliche("third time's the charm"),
cliche("this day and age"),
cliche("this hurts me worse than it hurts you"),
cliche("this point in time"),
cliche("this will acknowledge"),
cliche("thought leaders?"),
cliche("three sheets to the wind"),
cliche("through thick and thin"),
cliche("throw in the towel"),
cliche("throw the baby out with the bathwater"),
cliche("tie one on"),
cliche("tighter than a drum"),
cliche("time and time again"),
cliche("time is of the essence"),
cliche("tip of the iceberg"),
cliche("tired but happy"),
cliche("to coin a phrase"),
cliche("to each his own"),
cliche("to make a long story short"),
cliche("to the best of my knowledge"),
cliche("toe the line"),
cliche("tongue in cheek"),
cliche("too good to be true"),
cliche("too hot to handle"),
cliche("too numerous to mention"),
cliche("touch with a ten foot pole"),
cliche("tough as nails"),
cliche("trial and error"),
cliche("trials and tribulations"),
cliche("tried and true"),
cliche("trip down memory lane"),
cliche("twist of fate"),
cliche("two cents worth"),
cliche("two peas in a pod"),
cliche("ugly as sin"),
cliche("under the counter"),
cliche("under the gun"),
cliche("under the same roof"),
cliche("under the weather"),
cliche("until the cows come home"),
cliche("unvarnished truth"),
cliche("up the creek"),
cliche("uphill battle"),
cliche("upper crust"),
cliche("upset the applecart"),
cliche("vain attempt"),
cliche("vain effort"),
cliche("vanquish the enemy"),
cliche("various and sundry"),
cliche("vested interest"),
cliche("viable alternative"),
cliche("waiting for the other shoe to drop"),
cliche("wakeup call"),
cliche("warm welcome"),
cliche("watch your p's and q's"),
cliche("watch your tongue"),
cliche("watching the clock"),
cliche("water under the bridge"),
cliche("wax eloquent"),
cliche("wax poetic"),
cliche("we are pleased to advice"),
cliche("we regret to inform you"),
cliche("we wish to state"),
cliche("we've got a situation here"),
cliche("weather the storm"),
cliche("weed them out"),
cliche("week of sundays"),
cliche("went belly up"),
cliche("wet behind the ears"),
cliche("what goes around comes around"),
cliche("what you see is what you get"),
cliche("when it rains, it pours"),
cliche("when push comes to shove"),
cliche("when the cat's away"),
cliche("when the going gets tough, the tough get going"),
cliche("whet (?:the|your) appetite"),
cliche("white as a sheet"),
cliche("whole ball of wax"),
cliche("whole hog"),
cliche("whole nine yards"),
cliche("wild goose chase"),
cliche("will wonders never cease?"),
cliche("wisdom of the ages"),
cliche("wise as an owl"),
cliche("wolf at the door"),
cliche("wool pulled over our eyes"),
cliche("words fail me"),
cliche("work like a dog"),
cliche("world weary"),
cliche("worst nightmare"),
cliche("worth its weight in gold"),
cliche("writ large"),
cliche("wrong side of the bed"),
cliche("yanking your chain"),
cliche("yappy as a dog"),
cliche("years young"),
cliche("you are hereby advised that"),
cliche("you are what you eat"),
cliche("you can run but you can't hide"),
cliche("you only live once"),
cliche("you're the boss "),
cliche("young and foolish"),
cliche("young and vibrant"),
}
} | cliche("day in, day out"),
cliche("dead as a doornail"),
cliche("decision-making process"),
cliche("devil is in the details"), | random_line_split |
spi_host.rs | use crate::hil::spi_host::SpiHost;
use core::cell::Cell;
use core::cmp::min;
use kernel::common::cells::{OptionalCell, TakeCell};
use kernel::common::registers::{register_bitfields, register_structs, ReadOnly, ReadWrite, WriteOnly};
use kernel::common::StaticRef;
use kernel::hil::spi::{ClockPolarity, ClockPhase, SpiMaster, SpiMasterClient};
use kernel::ReturnCode;
// The TX and RX FIFOs both have the same length. We write and read at the same
// time.
// Registers for the SPI host controller
register_structs! {
Registers {
(0x0000 => ctrl: ReadWrite<u32, CTRL::Register>),
(0x0004 => xact: ReadWrite<u32, XACT::Register>),
(0x0008 => ictrl: ReadWrite<u32, ICTRL::Register>),
(0x000c => istate: ReadOnly<u32, ISTATE::Register>),
(0x0010 => istate_clr: ReadWrite<u32, ISTATE_CLR::Register>),
(0x0014 => _reserved),
(0x1000 => tx_fifo: [WriteOnly<u8>; 128]),
(0x1080 => rx_fifo: [ReadOnly<u8>; 128]),
(0x1100 => @END),
}
}
register_bitfields![u32,
CTRL [ | CSBSU OFFSET(2) NUMBITS(4) [],
/// CSB from SCK hold time in SCK cycles + 1 (defined with respect to
/// the last SCK edge)
CSBHLD OFFSET(6) NUMBITS(4) [],
/// SPI Clk Divider. Actual divider is IDIV+1. A value of 0 gives divide
/// by 1 clock, 1 gives divide by 2 etc.
IDIV OFFSET(10) NUMBITS(12) [],
/// Polarity of CSB signal. 0:active low 1:active high
CSBPOL OFFSET(22) NUMBITS(1) [],
/// Order in which bits of byte are sent. 0: send bit 0 first. 1: send
/// bit 7 first
TXBITOR OFFSET(23) NUMBITS(1) [],
/// Order in which bytes of buffer word are sent.
/// 0: send byte 0 first. 1: send byte 3 first
TXBYTOR OFFSET(24) NUMBITS(1) [],
/// Order in which received bits are packed into byte.
/// 0: first bit received is bit0 1: last bit received is bit 0
RXBITOR OFFSET(25) NUMBITS(1) [],
/// Order in which received bytes are packed into word.
/// 0: first byte received is byte 0 1: first byte received is byte 3
RXBYTOR OFFSET(26) NUMBITS(1) [],
/// SPI Passthrough Mode. 0: Disable, 1: Enable. This is the host side
/// control of whether passthrough is allowed. In order for full
/// passthrough functionality, both the host and device passthrough
/// functionality have to be enabled
ENPASSTHRU OFFSET(27) NUMBITS(1) []
],
XACT [
/// Initiate transaction in buffer
START OFFSET(0) NUMBITS(1) [],
/// Bits-1 in last byte transferred. The default assumes last byte will
/// have 8 bits, this should be sufficient for most usage.
BCNT OFFSET(1) NUMBITS(3) [],
/// Total number of transactions in bytes-1. If 64 bytes are to be
/// transferred, this should be programmed as 63.
SIZE OFFSET(4) NUMBITS(7) [],
/// Poll for ready
RDY_POLL OFFSET(11) NUMBITS(1) [],
/// Delay before polling in PCLK cycles + 1
RDY_POLL_DLY OFFSET(12) NUMBITS(5) []
],
ICTRL [
/// TX interrupt enable
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE [
/// TX done interrupt
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE_CLR [
/// TX done interrupt clear
TXDONE OFFSET(0) NUMBITS(1) []
]
];
const SPI_HOST0_BASE_ADDR: u32 = 0x4070_0000;
const SPI_HOST1_BASE_ADDR: u32 = 0x4071_0000;
const SPI_HOST0_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST0_BASE_ADDR as *const Registers) };
const SPI_HOST1_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST1_BASE_ADDR as *const Registers) };
pub static mut SPI_HOST0: SpiHostHardware = SpiHostHardware::new(SPI_HOST0_REGISTERS);
pub static mut SPI_HOST1: SpiHostHardware = SpiHostHardware::new(SPI_HOST1_REGISTERS);
/// A SPI Host
pub struct SpiHostHardware {
registers: StaticRef<Registers>,
transaction_len: Cell<usize>,
tx_buffer: TakeCell<'static, [u8]>,
rx_buffer: TakeCell<'static, [u8]>,
client: OptionalCell<&'static dyn SpiMasterClient>,
}
impl SpiHostHardware {
const fn new(base_addr: StaticRef<Registers>) -> SpiHostHardware {
SpiHostHardware {
registers: base_addr,
transaction_len: Cell::new(0),
tx_buffer: TakeCell::empty(),
rx_buffer: TakeCell::empty(),
client: OptionalCell::empty(),
}
}
pub fn init(&self) {
self.registers.ctrl.write(
CTRL::CPOL::CLEAR +
CTRL::CPHA::CLEAR +
CTRL::CSBSU::CLEAR +
CTRL::CSBHLD::CLEAR +
CTRL::IDIV.val(2) +
CTRL::CSBPOL::CLEAR +
CTRL::TXBITOR::SET +
CTRL::TXBYTOR::CLEAR +
CTRL::RXBITOR::SET +
CTRL::RXBYTOR::CLEAR +
CTRL::ENPASSTHRU::CLEAR);
self.registers.xact.write(
XACT::START::CLEAR +
XACT::BCNT.val(7) +
XACT::SIZE.val(0) +
XACT::RDY_POLL::CLEAR +
XACT::RDY_POLL_DLY.val(0));
}
fn enable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::SET);
}
fn disable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::CLEAR);
}
pub fn handle_interrupt(&self) {
//debug!("SpiHostHardware::handle_interrupt: ISTATE = {:08x}", self.registers.istate.get());
if self.registers.istate.is_set(ISTATE::TXDONE) {
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.client.map(|client| {
self.tx_buffer.take()
.map(|tx_buf| {
self.rx_buffer
.map(|rx_buf| {
self.read_data(rx_buf);
});
client.read_write_done(
tx_buf,
self.rx_buffer.take(),
self.transaction_len.get())
});
});
}
self.disable_tx_interrupt();
}
fn start_transaction(
&self,
write_buffer: Option<&'static mut [u8]>,
read_buffer: Option<&'static mut [u8]>,
transaction_len: usize) -> ReturnCode {
//debug!("SpiHostHardware::start_transaction: transaction_len={}", transaction_len);
// The transaction needs at least one byte.
// It also cannot have more bytes than tx_fifo or rx_fifo is long.
if (transaction_len == 0) ||
(transaction_len > self.registers.tx_fifo.len()) ||
(transaction_len > self.registers.rx_fifo.len()) {
//debug!("SpiHostHardware::start_transaction: Invalid transaction_len={}", transaction_len);
return ReturnCode::ESIZE;
}
self.registers.xact.modify(XACT::BCNT.val(7));
self.registers.xact.modify(XACT::SIZE.val((transaction_len - 1) as u32));
let mut tx_buf_len = 0;
write_buffer.as_ref().map(|tx_buf| {
tx_buf_len = min(tx_buf.len(), transaction_len);
for idx in 0..tx_buf_len {
self.registers.tx_fifo[idx].set(tx_buf[idx]);
}
});
// Clear the TX FIFO for additional bytes not supplied by write_buffer.
// Since we have no control over how many bytes the SPI host reads, we
// want to make sure to not accidentally leak information that made it
// into the TX FIFO beyond the length of the `write_buffer`.
for idx in tx_buf_len..transaction_len {
self.registers.tx_fifo[idx].set(0xff);
}
write_buffer.map(|buf| {
self.tx_buffer.replace(buf);
});
read_buffer.map(|buf| {
self.rx_buffer.replace(buf);
});
self.transaction_len.set(transaction_len);
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.enable_tx_interrupt();
self.registers.xact.modify(XACT::START::SET);
ReturnCode::SUCCESS
}
fn read_data(&self, read_buffer: &mut [u8]) {
let read_len = min(read_buffer.len(), self.transaction_len.get());
for idx in 0..read_len {
let val = self.registers.rx_fifo[idx].get();
read_buffer[idx] = val;
}
}
}
impl SpiHost for SpiHostHardware {
fn spi_device_spi_host_passthrough(&self, enabled: bool) {
self.registers.ctrl.modify(
if enabled { CTRL::ENPASSTHRU::SET } else { CTRL::ENPASSTHRU::CLEAR });
}
fn wait_busy_clear_in_transactions(&self, enabled: bool) {
self.registers.xact.modify(
if enabled { XACT::RDY_POLL::SET } else { XACT::RDY_POLL::CLEAR });
}
}
impl SpiMaster for SpiHostHardware {
type ChipSelect = bool;
fn set_client(&self, client: &'static dyn kernel::hil::spi::SpiMasterClient) {
self.client.set(client);
}
fn init(&self) {}
fn is_busy(&self) -> bool {
self.registers.istate.is_set(ISTATE::TXDONE)
}
fn read_write_bytes(
&self,
write_buffer: &'static mut [u8],
read_buffer: Option<&'static mut [u8]>,
len: usize,
) -> ReturnCode {
// If busy, don't start
if self.is_busy() {
return ReturnCode::EBUSY;
}
self.start_transaction(Some(write_buffer), read_buffer, len)
}
fn write_byte(&self, _val: u8) {
panic!("write_byte is not implemented");
}
fn read_byte(&self) -> u8 {
panic!("read_byte is not implemented");
}
fn read_write_byte(&self, _val: u8) -> u8 {
panic!("read_write_byte is not implemented");
}
fn specify_chip_select(&self, _cs: Self::ChipSelect) {
// Nothing to be done
}
/// Returns the actual rate set
fn set_rate(&self, _rate: u32) -> u32 {
panic!("set_rate is not implemented");
}
fn get_rate(&self) -> u32 {
panic!("get_rate is not implemented");
}
fn set_clock(&self, _polarity: ClockPolarity) {
panic!("set_clock is not implemented");
}
fn get_clock(&self) -> ClockPolarity {
panic!("get_clock is not implemented");
}
fn set_phase(&self, _phase: ClockPhase) {
panic!("set_phase is not implemented");
}
fn get_phase(&self) -> ClockPhase {
panic!("get_phase is not implemented");
}
fn hold_low(&self) {
panic!("hold_low is not implemented");
}
fn release_low(&self) {
// Nothing to do, since this is the only mode supported.
}
} | /// CPOL setting
CPOL OFFSET(0) NUMBITS(1) [],
/// CPHA setting
CPHA OFFSET(1) NUMBITS(1) [],
/// CSB to SCK setup time in SCK cycles + 1.5 | random_line_split |
spi_host.rs | use crate::hil::spi_host::SpiHost;
use core::cell::Cell;
use core::cmp::min;
use kernel::common::cells::{OptionalCell, TakeCell};
use kernel::common::registers::{register_bitfields, register_structs, ReadOnly, ReadWrite, WriteOnly};
use kernel::common::StaticRef;
use kernel::hil::spi::{ClockPolarity, ClockPhase, SpiMaster, SpiMasterClient};
use kernel::ReturnCode;
// The TX and RX FIFOs both have the same length. We write and read at the same
// time.
// Registers for the SPI host controller
register_structs! {
Registers {
(0x0000 => ctrl: ReadWrite<u32, CTRL::Register>),
(0x0004 => xact: ReadWrite<u32, XACT::Register>),
(0x0008 => ictrl: ReadWrite<u32, ICTRL::Register>),
(0x000c => istate: ReadOnly<u32, ISTATE::Register>),
(0x0010 => istate_clr: ReadWrite<u32, ISTATE_CLR::Register>),
(0x0014 => _reserved),
(0x1000 => tx_fifo: [WriteOnly<u8>; 128]),
(0x1080 => rx_fifo: [ReadOnly<u8>; 128]),
(0x1100 => @END),
}
}
register_bitfields![u32,
CTRL [
/// CPOL setting
CPOL OFFSET(0) NUMBITS(1) [],
/// CPHA setting
CPHA OFFSET(1) NUMBITS(1) [],
/// CSB to SCK setup time in SCK cycles + 1.5
CSBSU OFFSET(2) NUMBITS(4) [],
/// CSB from SCK hold time in SCK cycles + 1 (defined with respect to
/// the last SCK edge)
CSBHLD OFFSET(6) NUMBITS(4) [],
/// SPI Clk Divider. Actual divider is IDIV+1. A value of 0 gives divide
/// by 1 clock, 1 gives divide by 2 etc.
IDIV OFFSET(10) NUMBITS(12) [],
/// Polarity of CSB signal. 0:active low 1:active high
CSBPOL OFFSET(22) NUMBITS(1) [],
/// Order in which bits of byte are sent. 0: send bit 0 first. 1: send
/// bit 7 first
TXBITOR OFFSET(23) NUMBITS(1) [],
/// Order in which bytes of buffer word are sent.
/// 0: send byte 0 first. 1: send byte 3 first
TXBYTOR OFFSET(24) NUMBITS(1) [],
/// Order in which received bits are packed into byte.
/// 0: first bit received is bit0 1: last bit received is bit 0
RXBITOR OFFSET(25) NUMBITS(1) [],
/// Order in which received bytes are packed into word.
/// 0: first byte received is byte 0 1: first byte received is byte 3
RXBYTOR OFFSET(26) NUMBITS(1) [],
/// SPI Passthrough Mode. 0: Disable, 1: Enable. This is the host side
/// control of whether passthrough is allowed. In order for full
/// passthrough functionality, both the host and device passthrough
/// functionality have to be enabled
ENPASSTHRU OFFSET(27) NUMBITS(1) []
],
XACT [
/// Initiate transaction in buffer
START OFFSET(0) NUMBITS(1) [],
/// Bits-1 in last byte transferred. The default assumes last byte will
/// have 8 bits, this should be sufficient for most usage.
BCNT OFFSET(1) NUMBITS(3) [],
/// Total number of transactions in bytes-1. If 64 bytes are to be
/// transferred, this should be programmed as 63.
SIZE OFFSET(4) NUMBITS(7) [],
/// Poll for ready
RDY_POLL OFFSET(11) NUMBITS(1) [],
/// Delay before polling in PCLK cycles + 1
RDY_POLL_DLY OFFSET(12) NUMBITS(5) []
],
ICTRL [
/// TX interrupt enable
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE [
/// TX done interrupt
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE_CLR [
/// TX done interrupt clear
TXDONE OFFSET(0) NUMBITS(1) []
]
];
const SPI_HOST0_BASE_ADDR: u32 = 0x4070_0000;
const SPI_HOST1_BASE_ADDR: u32 = 0x4071_0000;
const SPI_HOST0_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST0_BASE_ADDR as *const Registers) };
const SPI_HOST1_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST1_BASE_ADDR as *const Registers) };
pub static mut SPI_HOST0: SpiHostHardware = SpiHostHardware::new(SPI_HOST0_REGISTERS);
pub static mut SPI_HOST1: SpiHostHardware = SpiHostHardware::new(SPI_HOST1_REGISTERS);
/// A SPI Host
pub struct SpiHostHardware {
registers: StaticRef<Registers>,
transaction_len: Cell<usize>,
tx_buffer: TakeCell<'static, [u8]>,
rx_buffer: TakeCell<'static, [u8]>,
client: OptionalCell<&'static dyn SpiMasterClient>,
}
impl SpiHostHardware {
const fn new(base_addr: StaticRef<Registers>) -> SpiHostHardware {
SpiHostHardware {
registers: base_addr,
transaction_len: Cell::new(0),
tx_buffer: TakeCell::empty(),
rx_buffer: TakeCell::empty(),
client: OptionalCell::empty(),
}
}
pub fn init(&self) {
self.registers.ctrl.write(
CTRL::CPOL::CLEAR +
CTRL::CPHA::CLEAR +
CTRL::CSBSU::CLEAR +
CTRL::CSBHLD::CLEAR +
CTRL::IDIV.val(2) +
CTRL::CSBPOL::CLEAR +
CTRL::TXBITOR::SET +
CTRL::TXBYTOR::CLEAR +
CTRL::RXBITOR::SET +
CTRL::RXBYTOR::CLEAR +
CTRL::ENPASSTHRU::CLEAR);
self.registers.xact.write(
XACT::START::CLEAR +
XACT::BCNT.val(7) +
XACT::SIZE.val(0) +
XACT::RDY_POLL::CLEAR +
XACT::RDY_POLL_DLY.val(0));
}
fn enable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::SET);
}
fn disable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::CLEAR);
}
pub fn handle_interrupt(&self) {
//debug!("SpiHostHardware::handle_interrupt: ISTATE = {:08x}", self.registers.istate.get());
if self.registers.istate.is_set(ISTATE::TXDONE) {
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.client.map(|client| {
self.tx_buffer.take()
.map(|tx_buf| {
self.rx_buffer
.map(|rx_buf| {
self.read_data(rx_buf);
});
client.read_write_done(
tx_buf,
self.rx_buffer.take(),
self.transaction_len.get())
});
});
}
self.disable_tx_interrupt();
}
fn start_transaction(
&self,
write_buffer: Option<&'static mut [u8]>,
read_buffer: Option<&'static mut [u8]>,
transaction_len: usize) -> ReturnCode {
//debug!("SpiHostHardware::start_transaction: transaction_len={}", transaction_len);
// The transaction needs at least one byte.
// It also cannot have more bytes than tx_fifo or rx_fifo is long.
if (transaction_len == 0) ||
(transaction_len > self.registers.tx_fifo.len()) ||
(transaction_len > self.registers.rx_fifo.len()) {
//debug!("SpiHostHardware::start_transaction: Invalid transaction_len={}", transaction_len);
return ReturnCode::ESIZE;
}
self.registers.xact.modify(XACT::BCNT.val(7));
self.registers.xact.modify(XACT::SIZE.val((transaction_len - 1) as u32));
let mut tx_buf_len = 0;
write_buffer.as_ref().map(|tx_buf| {
tx_buf_len = min(tx_buf.len(), transaction_len);
for idx in 0..tx_buf_len {
self.registers.tx_fifo[idx].set(tx_buf[idx]);
}
});
// Clear the TX FIFO for additional bytes not supplied by write_buffer.
// Since we have no control over how many bytes the SPI host reads, we
// want to make sure to not accidentally leak information that made it
// into the TX FIFO beyond the length of the `write_buffer`.
for idx in tx_buf_len..transaction_len {
self.registers.tx_fifo[idx].set(0xff);
}
write_buffer.map(|buf| {
self.tx_buffer.replace(buf);
});
read_buffer.map(|buf| {
self.rx_buffer.replace(buf);
});
self.transaction_len.set(transaction_len);
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.enable_tx_interrupt();
self.registers.xact.modify(XACT::START::SET);
ReturnCode::SUCCESS
}
fn read_data(&self, read_buffer: &mut [u8]) {
let read_len = min(read_buffer.len(), self.transaction_len.get());
for idx in 0..read_len {
let val = self.registers.rx_fifo[idx].get();
read_buffer[idx] = val;
}
}
}
impl SpiHost for SpiHostHardware {
fn spi_device_spi_host_passthrough(&self, enabled: bool) {
self.registers.ctrl.modify(
if enabled { CTRL::ENPASSTHRU::SET } else { CTRL::ENPASSTHRU::CLEAR });
}
fn wait_busy_clear_in_transactions(&self, enabled: bool) {
self.registers.xact.modify(
if enabled { XACT::RDY_POLL::SET } else { XACT::RDY_POLL::CLEAR });
}
}
impl SpiMaster for SpiHostHardware {
type ChipSelect = bool;
fn set_client(&self, client: &'static dyn kernel::hil::spi::SpiMasterClient) {
self.client.set(client);
}
fn init(&self) {}
fn is_busy(&self) -> bool {
self.registers.istate.is_set(ISTATE::TXDONE)
}
fn read_write_bytes(
&self,
write_buffer: &'static mut [u8],
read_buffer: Option<&'static mut [u8]>,
len: usize,
) -> ReturnCode {
// If busy, don't start
if self.is_busy() {
return ReturnCode::EBUSY;
}
self.start_transaction(Some(write_buffer), read_buffer, len)
}
fn write_byte(&self, _val: u8) {
panic!("write_byte is not implemented");
}
fn read_byte(&self) -> u8 {
panic!("read_byte is not implemented");
}
fn read_write_byte(&self, _val: u8) -> u8 {
panic!("read_write_byte is not implemented");
}
fn specify_chip_select(&self, _cs: Self::ChipSelect) {
// Nothing to be done
}
/// Returns the actual rate set
fn set_rate(&self, _rate: u32) -> u32 |
fn get_rate(&self) -> u32 {
panic!("get_rate is not implemented");
}
fn set_clock(&self, _polarity: ClockPolarity) {
panic!("set_clock is not implemented");
}
fn get_clock(&self) -> ClockPolarity {
panic!("get_clock is not implemented");
}
fn set_phase(&self, _phase: ClockPhase) {
panic!("set_phase is not implemented");
}
fn get_phase(&self) -> ClockPhase {
panic!("get_phase is not implemented");
}
fn hold_low(&self) {
panic!("hold_low is not implemented");
}
fn release_low(&self) {
// Nothing to do, since this is the only mode supported.
}
}
| {
panic!("set_rate is not implemented");
} | identifier_body |
spi_host.rs | use crate::hil::spi_host::SpiHost;
use core::cell::Cell;
use core::cmp::min;
use kernel::common::cells::{OptionalCell, TakeCell};
use kernel::common::registers::{register_bitfields, register_structs, ReadOnly, ReadWrite, WriteOnly};
use kernel::common::StaticRef;
use kernel::hil::spi::{ClockPolarity, ClockPhase, SpiMaster, SpiMasterClient};
use kernel::ReturnCode;
// The TX and RX FIFOs both have the same length. We write and read at the same
// time.
// Registers for the SPI host controller
register_structs! {
Registers {
(0x0000 => ctrl: ReadWrite<u32, CTRL::Register>),
(0x0004 => xact: ReadWrite<u32, XACT::Register>),
(0x0008 => ictrl: ReadWrite<u32, ICTRL::Register>),
(0x000c => istate: ReadOnly<u32, ISTATE::Register>),
(0x0010 => istate_clr: ReadWrite<u32, ISTATE_CLR::Register>),
(0x0014 => _reserved),
(0x1000 => tx_fifo: [WriteOnly<u8>; 128]),
(0x1080 => rx_fifo: [ReadOnly<u8>; 128]),
(0x1100 => @END),
}
}
register_bitfields![u32,
CTRL [
/// CPOL setting
CPOL OFFSET(0) NUMBITS(1) [],
/// CPHA setting
CPHA OFFSET(1) NUMBITS(1) [],
/// CSB to SCK setup time in SCK cycles + 1.5
CSBSU OFFSET(2) NUMBITS(4) [],
/// CSB from SCK hold time in SCK cycles + 1 (defined with respect to
/// the last SCK edge)
CSBHLD OFFSET(6) NUMBITS(4) [],
/// SPI Clk Divider. Actual divider is IDIV+1. A value of 0 gives divide
/// by 1 clock, 1 gives divide by 2 etc.
IDIV OFFSET(10) NUMBITS(12) [],
/// Polarity of CSB signal. 0:active low 1:active high
CSBPOL OFFSET(22) NUMBITS(1) [],
/// Order in which bits of byte are sent. 0: send bit 0 first. 1: send
/// bit 7 first
TXBITOR OFFSET(23) NUMBITS(1) [],
/// Order in which bytes of buffer word are sent.
/// 0: send byte 0 first. 1: send byte 3 first
TXBYTOR OFFSET(24) NUMBITS(1) [],
/// Order in which received bits are packed into byte.
/// 0: first bit received is bit0 1: last bit received is bit 0
RXBITOR OFFSET(25) NUMBITS(1) [],
/// Order in which received bytes are packed into word.
/// 0: first byte received is byte 0 1: first byte received is byte 3
RXBYTOR OFFSET(26) NUMBITS(1) [],
/// SPI Passthrough Mode. 0: Disable, 1: Enable. This is the host side
/// control of whether passthrough is allowed. In order for full
/// passthrough functionality, both the host and device passthrough
/// functionality have to be enabled
ENPASSTHRU OFFSET(27) NUMBITS(1) []
],
XACT [
/// Initiate transaction in buffer
START OFFSET(0) NUMBITS(1) [],
/// Bits-1 in last byte transferred. The default assumes last byte will
/// have 8 bits, this should be sufficient for most usage.
BCNT OFFSET(1) NUMBITS(3) [],
/// Total number of transactions in bytes-1. If 64 bytes are to be
/// transferred, this should be programmed as 63.
SIZE OFFSET(4) NUMBITS(7) [],
/// Poll for ready
RDY_POLL OFFSET(11) NUMBITS(1) [],
/// Delay before polling in PCLK cycles + 1
RDY_POLL_DLY OFFSET(12) NUMBITS(5) []
],
ICTRL [
/// TX interrupt enable
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE [
/// TX done interrupt
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE_CLR [
/// TX done interrupt clear
TXDONE OFFSET(0) NUMBITS(1) []
]
];
const SPI_HOST0_BASE_ADDR: u32 = 0x4070_0000;
const SPI_HOST1_BASE_ADDR: u32 = 0x4071_0000;
const SPI_HOST0_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST0_BASE_ADDR as *const Registers) };
const SPI_HOST1_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST1_BASE_ADDR as *const Registers) };
pub static mut SPI_HOST0: SpiHostHardware = SpiHostHardware::new(SPI_HOST0_REGISTERS);
pub static mut SPI_HOST1: SpiHostHardware = SpiHostHardware::new(SPI_HOST1_REGISTERS);
/// A SPI Host
pub struct SpiHostHardware {
registers: StaticRef<Registers>,
transaction_len: Cell<usize>,
tx_buffer: TakeCell<'static, [u8]>,
rx_buffer: TakeCell<'static, [u8]>,
client: OptionalCell<&'static dyn SpiMasterClient>,
}
impl SpiHostHardware {
const fn new(base_addr: StaticRef<Registers>) -> SpiHostHardware {
SpiHostHardware {
registers: base_addr,
transaction_len: Cell::new(0),
tx_buffer: TakeCell::empty(),
rx_buffer: TakeCell::empty(),
client: OptionalCell::empty(),
}
}
pub fn init(&self) {
self.registers.ctrl.write(
CTRL::CPOL::CLEAR +
CTRL::CPHA::CLEAR +
CTRL::CSBSU::CLEAR +
CTRL::CSBHLD::CLEAR +
CTRL::IDIV.val(2) +
CTRL::CSBPOL::CLEAR +
CTRL::TXBITOR::SET +
CTRL::TXBYTOR::CLEAR +
CTRL::RXBITOR::SET +
CTRL::RXBYTOR::CLEAR +
CTRL::ENPASSTHRU::CLEAR);
self.registers.xact.write(
XACT::START::CLEAR +
XACT::BCNT.val(7) +
XACT::SIZE.val(0) +
XACT::RDY_POLL::CLEAR +
XACT::RDY_POLL_DLY.val(0));
}
fn enable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::SET);
}
fn disable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::CLEAR);
}
pub fn handle_interrupt(&self) {
//debug!("SpiHostHardware::handle_interrupt: ISTATE = {:08x}", self.registers.istate.get());
if self.registers.istate.is_set(ISTATE::TXDONE) {
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.client.map(|client| {
self.tx_buffer.take()
.map(|tx_buf| {
self.rx_buffer
.map(|rx_buf| {
self.read_data(rx_buf);
});
client.read_write_done(
tx_buf,
self.rx_buffer.take(),
self.transaction_len.get())
});
});
}
self.disable_tx_interrupt();
}
fn start_transaction(
&self,
write_buffer: Option<&'static mut [u8]>,
read_buffer: Option<&'static mut [u8]>,
transaction_len: usize) -> ReturnCode {
//debug!("SpiHostHardware::start_transaction: transaction_len={}", transaction_len);
// The transaction needs at least one byte.
// It also cannot have more bytes than tx_fifo or rx_fifo is long.
if (transaction_len == 0) ||
(transaction_len > self.registers.tx_fifo.len()) ||
(transaction_len > self.registers.rx_fifo.len()) {
//debug!("SpiHostHardware::start_transaction: Invalid transaction_len={}", transaction_len);
return ReturnCode::ESIZE;
}
self.registers.xact.modify(XACT::BCNT.val(7));
self.registers.xact.modify(XACT::SIZE.val((transaction_len - 1) as u32));
let mut tx_buf_len = 0;
write_buffer.as_ref().map(|tx_buf| {
tx_buf_len = min(tx_buf.len(), transaction_len);
for idx in 0..tx_buf_len {
self.registers.tx_fifo[idx].set(tx_buf[idx]);
}
});
// Clear the TX FIFO for additional bytes not supplied by write_buffer.
// Since we have no control over how many bytes the SPI host reads, we
// want to make sure to not accidentally leak information that made it
// into the TX FIFO beyond the length of the `write_buffer`.
for idx in tx_buf_len..transaction_len {
self.registers.tx_fifo[idx].set(0xff);
}
write_buffer.map(|buf| {
self.tx_buffer.replace(buf);
});
read_buffer.map(|buf| {
self.rx_buffer.replace(buf);
});
self.transaction_len.set(transaction_len);
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.enable_tx_interrupt();
self.registers.xact.modify(XACT::START::SET);
ReturnCode::SUCCESS
}
fn read_data(&self, read_buffer: &mut [u8]) {
let read_len = min(read_buffer.len(), self.transaction_len.get());
for idx in 0..read_len {
let val = self.registers.rx_fifo[idx].get();
read_buffer[idx] = val;
}
}
}
impl SpiHost for SpiHostHardware {
fn spi_device_spi_host_passthrough(&self, enabled: bool) {
self.registers.ctrl.modify(
if enabled { CTRL::ENPASSTHRU::SET } else { CTRL::ENPASSTHRU::CLEAR });
}
fn wait_busy_clear_in_transactions(&self, enabled: bool) {
self.registers.xact.modify(
if enabled { XACT::RDY_POLL::SET } else { XACT::RDY_POLL::CLEAR });
}
}
impl SpiMaster for SpiHostHardware {
type ChipSelect = bool;
fn set_client(&self, client: &'static dyn kernel::hil::spi::SpiMasterClient) {
self.client.set(client);
}
fn init(&self) {}
fn is_busy(&self) -> bool {
self.registers.istate.is_set(ISTATE::TXDONE)
}
fn read_write_bytes(
&self,
write_buffer: &'static mut [u8],
read_buffer: Option<&'static mut [u8]>,
len: usize,
) -> ReturnCode {
// If busy, don't start
if self.is_busy() {
return ReturnCode::EBUSY;
}
self.start_transaction(Some(write_buffer), read_buffer, len)
}
fn write_byte(&self, _val: u8) {
panic!("write_byte is not implemented");
}
fn read_byte(&self) -> u8 {
panic!("read_byte is not implemented");
}
fn read_write_byte(&self, _val: u8) -> u8 {
panic!("read_write_byte is not implemented");
}
fn specify_chip_select(&self, _cs: Self::ChipSelect) {
// Nothing to be done
}
/// Returns the actual rate set
fn set_rate(&self, _rate: u32) -> u32 {
panic!("set_rate is not implemented");
}
fn get_rate(&self) -> u32 {
panic!("get_rate is not implemented");
}
fn set_clock(&self, _polarity: ClockPolarity) {
panic!("set_clock is not implemented");
}
fn get_clock(&self) -> ClockPolarity {
panic!("get_clock is not implemented");
}
fn | (&self, _phase: ClockPhase) {
panic!("set_phase is not implemented");
}
fn get_phase(&self) -> ClockPhase {
panic!("get_phase is not implemented");
}
fn hold_low(&self) {
panic!("hold_low is not implemented");
}
fn release_low(&self) {
// Nothing to do, since this is the only mode supported.
}
}
| set_phase | identifier_name |
spi_host.rs | use crate::hil::spi_host::SpiHost;
use core::cell::Cell;
use core::cmp::min;
use kernel::common::cells::{OptionalCell, TakeCell};
use kernel::common::registers::{register_bitfields, register_structs, ReadOnly, ReadWrite, WriteOnly};
use kernel::common::StaticRef;
use kernel::hil::spi::{ClockPolarity, ClockPhase, SpiMaster, SpiMasterClient};
use kernel::ReturnCode;
// The TX and RX FIFOs both have the same length. We write and read at the same
// time.
// Registers for the SPI host controller
register_structs! {
Registers {
(0x0000 => ctrl: ReadWrite<u32, CTRL::Register>),
(0x0004 => xact: ReadWrite<u32, XACT::Register>),
(0x0008 => ictrl: ReadWrite<u32, ICTRL::Register>),
(0x000c => istate: ReadOnly<u32, ISTATE::Register>),
(0x0010 => istate_clr: ReadWrite<u32, ISTATE_CLR::Register>),
(0x0014 => _reserved),
(0x1000 => tx_fifo: [WriteOnly<u8>; 128]),
(0x1080 => rx_fifo: [ReadOnly<u8>; 128]),
(0x1100 => @END),
}
}
register_bitfields![u32,
CTRL [
/// CPOL setting
CPOL OFFSET(0) NUMBITS(1) [],
/// CPHA setting
CPHA OFFSET(1) NUMBITS(1) [],
/// CSB to SCK setup time in SCK cycles + 1.5
CSBSU OFFSET(2) NUMBITS(4) [],
/// CSB from SCK hold time in SCK cycles + 1 (defined with respect to
/// the last SCK edge)
CSBHLD OFFSET(6) NUMBITS(4) [],
/// SPI Clk Divider. Actual divider is IDIV+1. A value of 0 gives divide
/// by 1 clock, 1 gives divide by 2 etc.
IDIV OFFSET(10) NUMBITS(12) [],
/// Polarity of CSB signal. 0:active low 1:active high
CSBPOL OFFSET(22) NUMBITS(1) [],
/// Order in which bits of byte are sent. 0: send bit 0 first. 1: send
/// bit 7 first
TXBITOR OFFSET(23) NUMBITS(1) [],
/// Order in which bytes of buffer word are sent.
/// 0: send byte 0 first. 1: send byte 3 first
TXBYTOR OFFSET(24) NUMBITS(1) [],
/// Order in which received bits are packed into byte.
/// 0: first bit received is bit0 1: last bit received is bit 0
RXBITOR OFFSET(25) NUMBITS(1) [],
/// Order in which received bytes are packed into word.
/// 0: first byte received is byte 0 1: first byte received is byte 3
RXBYTOR OFFSET(26) NUMBITS(1) [],
/// SPI Passthrough Mode. 0: Disable, 1: Enable. This is the host side
/// control of whether passthrough is allowed. In order for full
/// passthrough functionality, both the host and device passthrough
/// functionality have to be enabled
ENPASSTHRU OFFSET(27) NUMBITS(1) []
],
XACT [
/// Initiate transaction in buffer
START OFFSET(0) NUMBITS(1) [],
/// Bits-1 in last byte transferred. The default assumes last byte will
/// have 8 bits, this should be sufficient for most usage.
BCNT OFFSET(1) NUMBITS(3) [],
/// Total number of transactions in bytes-1. If 64 bytes are to be
/// transferred, this should be programmed as 63.
SIZE OFFSET(4) NUMBITS(7) [],
/// Poll for ready
RDY_POLL OFFSET(11) NUMBITS(1) [],
/// Delay before polling in PCLK cycles + 1
RDY_POLL_DLY OFFSET(12) NUMBITS(5) []
],
ICTRL [
/// TX interrupt enable
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE [
/// TX done interrupt
TXDONE OFFSET(0) NUMBITS(1) []
],
ISTATE_CLR [
/// TX done interrupt clear
TXDONE OFFSET(0) NUMBITS(1) []
]
];
const SPI_HOST0_BASE_ADDR: u32 = 0x4070_0000;
const SPI_HOST1_BASE_ADDR: u32 = 0x4071_0000;
const SPI_HOST0_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST0_BASE_ADDR as *const Registers) };
const SPI_HOST1_REGISTERS: StaticRef<Registers> =
unsafe { StaticRef::new(SPI_HOST1_BASE_ADDR as *const Registers) };
pub static mut SPI_HOST0: SpiHostHardware = SpiHostHardware::new(SPI_HOST0_REGISTERS);
pub static mut SPI_HOST1: SpiHostHardware = SpiHostHardware::new(SPI_HOST1_REGISTERS);
/// A SPI Host
pub struct SpiHostHardware {
registers: StaticRef<Registers>,
transaction_len: Cell<usize>,
tx_buffer: TakeCell<'static, [u8]>,
rx_buffer: TakeCell<'static, [u8]>,
client: OptionalCell<&'static dyn SpiMasterClient>,
}
impl SpiHostHardware {
const fn new(base_addr: StaticRef<Registers>) -> SpiHostHardware {
SpiHostHardware {
registers: base_addr,
transaction_len: Cell::new(0),
tx_buffer: TakeCell::empty(),
rx_buffer: TakeCell::empty(),
client: OptionalCell::empty(),
}
}
pub fn init(&self) {
self.registers.ctrl.write(
CTRL::CPOL::CLEAR +
CTRL::CPHA::CLEAR +
CTRL::CSBSU::CLEAR +
CTRL::CSBHLD::CLEAR +
CTRL::IDIV.val(2) +
CTRL::CSBPOL::CLEAR +
CTRL::TXBITOR::SET +
CTRL::TXBYTOR::CLEAR +
CTRL::RXBITOR::SET +
CTRL::RXBYTOR::CLEAR +
CTRL::ENPASSTHRU::CLEAR);
self.registers.xact.write(
XACT::START::CLEAR +
XACT::BCNT.val(7) +
XACT::SIZE.val(0) +
XACT::RDY_POLL::CLEAR +
XACT::RDY_POLL_DLY.val(0));
}
fn enable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::SET);
}
fn disable_tx_interrupt(&self) {
self.registers.ictrl.modify(ICTRL::TXDONE::CLEAR);
}
pub fn handle_interrupt(&self) {
//debug!("SpiHostHardware::handle_interrupt: ISTATE = {:08x}", self.registers.istate.get());
if self.registers.istate.is_set(ISTATE::TXDONE) {
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.client.map(|client| {
self.tx_buffer.take()
.map(|tx_buf| {
self.rx_buffer
.map(|rx_buf| {
self.read_data(rx_buf);
});
client.read_write_done(
tx_buf,
self.rx_buffer.take(),
self.transaction_len.get())
});
});
}
self.disable_tx_interrupt();
}
fn start_transaction(
&self,
write_buffer: Option<&'static mut [u8]>,
read_buffer: Option<&'static mut [u8]>,
transaction_len: usize) -> ReturnCode {
//debug!("SpiHostHardware::start_transaction: transaction_len={}", transaction_len);
// The transaction needs at least one byte.
// It also cannot have more bytes than tx_fifo or rx_fifo is long.
if (transaction_len == 0) ||
(transaction_len > self.registers.tx_fifo.len()) ||
(transaction_len > self.registers.rx_fifo.len()) {
//debug!("SpiHostHardware::start_transaction: Invalid transaction_len={}", transaction_len);
return ReturnCode::ESIZE;
}
self.registers.xact.modify(XACT::BCNT.val(7));
self.registers.xact.modify(XACT::SIZE.val((transaction_len - 1) as u32));
let mut tx_buf_len = 0;
write_buffer.as_ref().map(|tx_buf| {
tx_buf_len = min(tx_buf.len(), transaction_len);
for idx in 0..tx_buf_len {
self.registers.tx_fifo[idx].set(tx_buf[idx]);
}
});
// Clear the TX FIFO for additional bytes not supplied by write_buffer.
// Since we have no control over how many bytes the SPI host reads, we
// want to make sure to not accidentally leak information that made it
// into the TX FIFO beyond the length of the `write_buffer`.
for idx in tx_buf_len..transaction_len {
self.registers.tx_fifo[idx].set(0xff);
}
write_buffer.map(|buf| {
self.tx_buffer.replace(buf);
});
read_buffer.map(|buf| {
self.rx_buffer.replace(buf);
});
self.transaction_len.set(transaction_len);
self.registers.istate_clr.write(ISTATE_CLR::TXDONE::SET);
self.enable_tx_interrupt();
self.registers.xact.modify(XACT::START::SET);
ReturnCode::SUCCESS
}
fn read_data(&self, read_buffer: &mut [u8]) {
let read_len = min(read_buffer.len(), self.transaction_len.get());
for idx in 0..read_len {
let val = self.registers.rx_fifo[idx].get();
read_buffer[idx] = val;
}
}
}
impl SpiHost for SpiHostHardware {
fn spi_device_spi_host_passthrough(&self, enabled: bool) {
self.registers.ctrl.modify(
if enabled { CTRL::ENPASSTHRU::SET } else | );
}
fn wait_busy_clear_in_transactions(&self, enabled: bool) {
self.registers.xact.modify(
if enabled { XACT::RDY_POLL::SET } else { XACT::RDY_POLL::CLEAR });
}
}
impl SpiMaster for SpiHostHardware {
type ChipSelect = bool;
fn set_client(&self, client: &'static dyn kernel::hil::spi::SpiMasterClient) {
self.client.set(client);
}
fn init(&self) {}
fn is_busy(&self) -> bool {
self.registers.istate.is_set(ISTATE::TXDONE)
}
fn read_write_bytes(
&self,
write_buffer: &'static mut [u8],
read_buffer: Option<&'static mut [u8]>,
len: usize,
) -> ReturnCode {
// If busy, don't start
if self.is_busy() {
return ReturnCode::EBUSY;
}
self.start_transaction(Some(write_buffer), read_buffer, len)
}
fn write_byte(&self, _val: u8) {
panic!("write_byte is not implemented");
}
fn read_byte(&self) -> u8 {
panic!("read_byte is not implemented");
}
fn read_write_byte(&self, _val: u8) -> u8 {
panic!("read_write_byte is not implemented");
}
fn specify_chip_select(&self, _cs: Self::ChipSelect) {
// Nothing to be done
}
/// Returns the actual rate set
fn set_rate(&self, _rate: u32) -> u32 {
panic!("set_rate is not implemented");
}
fn get_rate(&self) -> u32 {
panic!("get_rate is not implemented");
}
fn set_clock(&self, _polarity: ClockPolarity) {
panic!("set_clock is not implemented");
}
fn get_clock(&self) -> ClockPolarity {
panic!("get_clock is not implemented");
}
fn set_phase(&self, _phase: ClockPhase) {
panic!("set_phase is not implemented");
}
fn get_phase(&self) -> ClockPhase {
panic!("get_phase is not implemented");
}
fn hold_low(&self) {
panic!("hold_low is not implemented");
}
fn release_low(&self) {
// Nothing to do, since this is the only mode supported.
}
}
| { CTRL::ENPASSTHRU::CLEAR } | conditional_block |
campaign-gant-chart.js | var taskStyle = ['gtaskblue', 'gtaskred', 'gtaskpurple', 'gtaskgreen', 'gtaskpink'];
var element; // global variable
var getCanvas;
function populateGantChart(campaignList, g, campaignIds){
if (g.getDivId() != null) {
g.setCaptionType('Complete'); // Set to Show Caption (None,Caption,Resource,Duration,Complete)
g.setQuarterColWidth(36);
g.setDateTaskDisplayFormat('day dd month yyyy'); // Shown in tool tip box
g.setDayMajorDateDisplayFormat('mon yyyy - Week ww') // Set format to display dates in the "Major" header of the "Day" view
g.setWeekMinorDateDisplayFormat('dd mon') // Set format to display dates in the "Minor" header of the "Week" view
g.setShowTaskInfoLink(1); // Show link in tool tip (0/1)
g.setShowEndWeekDate(0); // Show/Hide the date for the last day of the week in header for daily view (1/0)
g.setUseSingleCell(10000); // Set the threshold at which we will only use one cell per table row (0 disables). Helps with rendering performance for large charts.
g.setShowComp(0);
g.setShowTaskInfoLink(0);
g.setShowTaskInfoRes(0);
g.setShowTaskInfoComp(0);
g.setFormatArr('Day', 'Week', 'Month', 'Quarter'); // Even with setUseSingleCell using Hour format on such a large chart can cause issues in some browsers
//(pID, pName, pStart, pEnd, pStyle, pLink, pMile, pRes, pComp, pGroup, pParent, pOpen, pDepend, pCaption, pNotes, pGantt)
for (var i = 0; i < campaignList.length; i++) {
var campaign = campaignList[i];
if(campaignIds.length > 0){
if((campaignIds.indexOf(campaign.id+"") == -1)) continue;
}
//populate campaigns
var startDate = new Date(parseFloat(campaign.startDate));
var startDateMM = startDate.getMonth() + 1;
var startDateDD = startDate.getDate();
var startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
var endDate = new Date(parseFloat(campaign.endDate));
var endDateMM = endDate.getMonth() + 1;
var endDateDD = endDate.getDate();
var endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var campaignId = campaign.id;
g.AddTaskItem(new JSGantt.TaskItem(campaignId,
campaign.campaignName, startDateString, endDateString,
'ggroupblack', '#', 0, '-', 0, 1, 0, 0, '', '', '', g));
//populate campaignStages
for (var j = 0; j < campaign.campaignStagesSet.length; j++) {
var campaignStage = campaign.campaignStagesSet[j];
startDate = new Date(parseFloat(campaignStage.startDate));
startDateMM = startDate.getMonth() + 1;
startDateDD = startDate.getDate();
startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
endDate = new Date(parseFloat(campaignStage.endDate));
endDateMM = endDate.getMonth() + 1;
endDateDD = endDate.getDate();
endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var stageId = "1111" + campaignStage.id;
g.AddTaskItem(new JSGantt.TaskItem(stageId,
campaignStage.stageName, startDateString, endDateString,
'gtaskyellow', '#', 0, '-', 0, 1, campaignId, 0, '', '', '', g));
//populate campaignSubstages
for (var k = 0; k < campaignStage.campaignSubstagesSet.length; k++) {
var table = "";
var campaignSubstage = campaignStage.campaignSubstagesSet[k];
startDate = new Date(parseFloat(campaignSubstage.startDate));
startDateMM = startDate.getMonth() + 1;
startDateDD = startDate.getDate();
startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
endDate = new Date(parseFloat(campaignSubstage.endDate));
endDateMM = endDate.getMonth() + 1;
endDateDD = endDate.getDate();
endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var substageId = "9999" + campaignSubstage.id;
var passDependency = "", failDependency = "";
if((campaignSubstage.ssIdForPass != '-1') && (campaignSubstage.ssIdForPass != '0'))
passDependency = "9999" + campaignSubstage.ssIdForPass + "SS";
if((campaignSubstage.ssIdForFail != '-1') && (campaignSubstage.ssIdForFail != '0'))
failDependency = "9999" + campaignSubstage.ssIdForFail + "FF";
var dependency = passDependency + "," + failDependency;
var substageStatusList = campaignSubstage.campaignSubstageStatusList;
var onEnterCount = 0, sentCount = 0, viewedCount = 0, passCount = 0, failCount = 0, noShowCount = 0;
for(var l = 0; l < substageStatusList.length; l++){
var status = substageStatusList[l];
if(status.onEnter == true) onEnterCount = onEnterCount + 1;
if(status.sent == true) sentCount = sentCount + 1;
if(status.pass == true) passCount = passCount + 1;
if(status.fail == true) failCount = failCount + 1;
if(status.viewed == true) viewedCount = viewedCount + 1;
if(status.noShow == true) noShowCount = noShowCount + 1;
}
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No. of Users:</span><span class='gTaskText'>" + onEnterCount + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Remainders:</span><span class='gTaskText'>" + campaignSubstage.remainders + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Pass Substage:</span><span class='gTaskText'>" + (((campaignSubstage.ssIdForPass == 0) || (campaignSubstage.ssIdForPass == -1)) ? "Not Assigned" : substageList[campaignSubstage.ssIdForPass]) + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Fail Substage:</span><span class='gTaskText'>" + (((campaignSubstage.ssIdForFail == 0) || (campaignSubstage.ssIdForFail == -1)) ? "Not Assigned" : substageList[campaignSubstage.ssIdForFail]) + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No-Show :</span><span class='gTaskText'>" + noShowList[campaignSubstage.noShow] + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Sent: </span><span class='gTaskText'>"+sentCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Viewed: </span><span class='gTaskText'>"+viewedCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No-Show: </span><span class='gTaskText'>"+noShowCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Pass: </span><span class='gTaskText'>"+passCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Fail: </span><span class='gTaskText'>"+failCount+"</span></div>";
| "gtaskpurple", '#', 0, content, 0, 0, stageId, 0, dependency, '', table, g));
}
}
}
g.Draw();
} else {
alert("Error, unable to create Gantt Chart");
}
}
function previewChart(divName){
element = $("#"+divName);
/*$("#downloadChart").show();*/
$("#chartPreviewContainer").show();
$("#printChart").show();
html2canvas(element, {
onrendered: function (canvas) {
var imgsrc = canvas.toDataURL("image/png");
$("#chartPreviewImage").attr('src',imgsrc);
getCanvas = canvas;
}
});
}
function showLoader(){
$("#form-loader").css("display","inline-block");
$(".wrapper").css("pointer-events", "none");
$(".wrapper").css("opacity", "0.5");
}
function hideLoader(){
$("#form-loader").css("display","none");
$(".wrapper").css("pointer-events", "");
$(".wrapper").css("opacity", "1");
}
function print(divId){
var contents = $("#"+divId).html();
var frame1 = $('<iframe />');
frame1[0].name = "frame1";
frame1.css({ "position": "absolute", "top": "-1000000px" });
$("body").append(frame1);
var frameDoc = frame1[0].contentWindow ? frame1[0].contentWindow : frame1[0].contentDocument.document ? frame1[0].contentDocument.document : frame1[0].contentDocument;
frameDoc.document.open();
//Create a new HTML document.
var currDate = new Date();
var title = "GanttChart-" + (currDate.getMonth() + 1) + "-" + currDate.getDate() + "-" + currDate.getFullYear();
frameDoc.document.write('<html><head><title>'+title+'</title>');
frameDoc.document.write('</head><body>');
//Append the external CSS file.
//frameDoc.document.write('<link href="style.css" rel="stylesheet" type="text/css" />');
//Append the DIV contents.
frameDoc.document.write(contents);
frameDoc.document.write('</body></html>');
frameDoc.document.close();
setTimeout(function () {
window.frames["frame1"].focus();
window.frames["frame1"].print();
frame1.remove();
}, 500);
}
function expandAllFolders(){
$(".gfoldercollapse").each(function( index ){
var pID = $(this).attr("id").split("_")[1];
console.log(index + ": " + $(this).text() + "\t id : " + pID);
JSGantt.folder(pID, {"vTool":{"vToolCont":{},"moveInterval":20,"fadeInterval":23,"delayTimeout":22}});
});
}
function printChart(divName){
console.log("Loading");
showLoader();
previewChart(divName);
setTimeout(function () {
print("chartPreviewContainer");
hideLoader();
}, 3500);
$("#chartPreviewContainer").hide();
}
/*function downloadChartAsImage(divName){
console.log("getCanvas : " + getCanvas);
var imgageData = getCanvas.toDataURL("image/png");
console.log("imgageData : " + imgageData);
// Now browser starts downloading it instead of just showing it
var newData = imgageData.replace(/^data:image\/png/, "data:application/octet-stream");
console.log("newData : " + newData);
$("#btn-Convert-Html2Image").attr("download", "gant-chart.png").attr("href", newData);
}*/ |
var taskStyleIndex = Math.floor(Math.random() * (taskStyle.length - 1)) + 0;
var content = campaignSubstage.connectType == 0 ? campaignSubstage.contentId : campaignSubstage.connectUrl;
g.AddTaskItem(new JSGantt.TaskItem(substageId,
campaignSubstage.campaignSubStageName, startDateString, endDateString,
| random_line_split |
campaign-gant-chart.js | var taskStyle = ['gtaskblue', 'gtaskred', 'gtaskpurple', 'gtaskgreen', 'gtaskpink'];
var element; // global variable
var getCanvas;
function populateGantChart(campaignList, g, campaignIds){
if (g.getDivId() != null) {
g.setCaptionType('Complete'); // Set to Show Caption (None,Caption,Resource,Duration,Complete)
g.setQuarterColWidth(36);
g.setDateTaskDisplayFormat('day dd month yyyy'); // Shown in tool tip box
g.setDayMajorDateDisplayFormat('mon yyyy - Week ww') // Set format to display dates in the "Major" header of the "Day" view
g.setWeekMinorDateDisplayFormat('dd mon') // Set format to display dates in the "Minor" header of the "Week" view
g.setShowTaskInfoLink(1); // Show link in tool tip (0/1)
g.setShowEndWeekDate(0); // Show/Hide the date for the last day of the week in header for daily view (1/0)
g.setUseSingleCell(10000); // Set the threshold at which we will only use one cell per table row (0 disables). Helps with rendering performance for large charts.
g.setShowComp(0);
g.setShowTaskInfoLink(0);
g.setShowTaskInfoRes(0);
g.setShowTaskInfoComp(0);
g.setFormatArr('Day', 'Week', 'Month', 'Quarter'); // Even with setUseSingleCell using Hour format on such a large chart can cause issues in some browsers
//(pID, pName, pStart, pEnd, pStyle, pLink, pMile, pRes, pComp, pGroup, pParent, pOpen, pDepend, pCaption, pNotes, pGantt)
for (var i = 0; i < campaignList.length; i++) {
var campaign = campaignList[i];
if(campaignIds.length > 0){
if((campaignIds.indexOf(campaign.id+"") == -1)) continue;
}
//populate campaigns
var startDate = new Date(parseFloat(campaign.startDate));
var startDateMM = startDate.getMonth() + 1;
var startDateDD = startDate.getDate();
var startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
var endDate = new Date(parseFloat(campaign.endDate));
var endDateMM = endDate.getMonth() + 1;
var endDateDD = endDate.getDate();
var endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var campaignId = campaign.id;
g.AddTaskItem(new JSGantt.TaskItem(campaignId,
campaign.campaignName, startDateString, endDateString,
'ggroupblack', '#', 0, '-', 0, 1, 0, 0, '', '', '', g));
//populate campaignStages
for (var j = 0; j < campaign.campaignStagesSet.length; j++) {
var campaignStage = campaign.campaignStagesSet[j];
startDate = new Date(parseFloat(campaignStage.startDate));
startDateMM = startDate.getMonth() + 1;
startDateDD = startDate.getDate();
startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
endDate = new Date(parseFloat(campaignStage.endDate));
endDateMM = endDate.getMonth() + 1;
endDateDD = endDate.getDate();
endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var stageId = "1111" + campaignStage.id;
g.AddTaskItem(new JSGantt.TaskItem(stageId,
campaignStage.stageName, startDateString, endDateString,
'gtaskyellow', '#', 0, '-', 0, 1, campaignId, 0, '', '', '', g));
//populate campaignSubstages
for (var k = 0; k < campaignStage.campaignSubstagesSet.length; k++) {
var table = "";
var campaignSubstage = campaignStage.campaignSubstagesSet[k];
startDate = new Date(parseFloat(campaignSubstage.startDate));
startDateMM = startDate.getMonth() + 1;
startDateDD = startDate.getDate();
startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
endDate = new Date(parseFloat(campaignSubstage.endDate));
endDateMM = endDate.getMonth() + 1;
endDateDD = endDate.getDate();
endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var substageId = "9999" + campaignSubstage.id;
var passDependency = "", failDependency = "";
if((campaignSubstage.ssIdForPass != '-1') && (campaignSubstage.ssIdForPass != '0'))
passDependency = "9999" + campaignSubstage.ssIdForPass + "SS";
if((campaignSubstage.ssIdForFail != '-1') && (campaignSubstage.ssIdForFail != '0'))
failDependency = "9999" + campaignSubstage.ssIdForFail + "FF";
var dependency = passDependency + "," + failDependency;
var substageStatusList = campaignSubstage.campaignSubstageStatusList;
var onEnterCount = 0, sentCount = 0, viewedCount = 0, passCount = 0, failCount = 0, noShowCount = 0;
for(var l = 0; l < substageStatusList.length; l++){
var status = substageStatusList[l];
if(status.onEnter == true) onEnterCount = onEnterCount + 1;
if(status.sent == true) sentCount = sentCount + 1;
if(status.pass == true) passCount = passCount + 1;
if(status.fail == true) failCount = failCount + 1;
if(status.viewed == true) viewedCount = viewedCount + 1;
if(status.noShow == true) noShowCount = noShowCount + 1;
}
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No. of Users:</span><span class='gTaskText'>" + onEnterCount + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Remainders:</span><span class='gTaskText'>" + campaignSubstage.remainders + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Pass Substage:</span><span class='gTaskText'>" + (((campaignSubstage.ssIdForPass == 0) || (campaignSubstage.ssIdForPass == -1)) ? "Not Assigned" : substageList[campaignSubstage.ssIdForPass]) + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Fail Substage:</span><span class='gTaskText'>" + (((campaignSubstage.ssIdForFail == 0) || (campaignSubstage.ssIdForFail == -1)) ? "Not Assigned" : substageList[campaignSubstage.ssIdForFail]) + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No-Show :</span><span class='gTaskText'>" + noShowList[campaignSubstage.noShow] + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Sent: </span><span class='gTaskText'>"+sentCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Viewed: </span><span class='gTaskText'>"+viewedCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No-Show: </span><span class='gTaskText'>"+noShowCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Pass: </span><span class='gTaskText'>"+passCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Fail: </span><span class='gTaskText'>"+failCount+"</span></div>";
var taskStyleIndex = Math.floor(Math.random() * (taskStyle.length - 1)) + 0;
var content = campaignSubstage.connectType == 0 ? campaignSubstage.contentId : campaignSubstage.connectUrl;
g.AddTaskItem(new JSGantt.TaskItem(substageId,
campaignSubstage.campaignSubStageName, startDateString, endDateString,
"gtaskpurple", '#', 0, content, 0, 0, stageId, 0, dependency, '', table, g));
}
}
}
g.Draw();
} else {
alert("Error, unable to create Gantt Chart");
}
}
function previewChart(divName){
element = $("#"+divName);
/*$("#downloadChart").show();*/
$("#chartPreviewContainer").show();
$("#printChart").show();
html2canvas(element, {
onrendered: function (canvas) {
var imgsrc = canvas.toDataURL("image/png");
$("#chartPreviewImage").attr('src',imgsrc);
getCanvas = canvas;
}
});
}
function showLoader(){
$("#form-loader").css("display","inline-block");
$(".wrapper").css("pointer-events", "none");
$(".wrapper").css("opacity", "0.5");
}
function hideLoader(){
$("#form-loader").css("display","none");
$(".wrapper").css("pointer-events", "");
$(".wrapper").css("opacity", "1");
}
function print(divId){
var contents = $("#"+divId).html();
var frame1 = $('<iframe />');
frame1[0].name = "frame1";
frame1.css({ "position": "absolute", "top": "-1000000px" });
$("body").append(frame1);
var frameDoc = frame1[0].contentWindow ? frame1[0].contentWindow : frame1[0].contentDocument.document ? frame1[0].contentDocument.document : frame1[0].contentDocument;
frameDoc.document.open();
//Create a new HTML document.
var currDate = new Date();
var title = "GanttChart-" + (currDate.getMonth() + 1) + "-" + currDate.getDate() + "-" + currDate.getFullYear();
frameDoc.document.write('<html><head><title>'+title+'</title>');
frameDoc.document.write('</head><body>');
//Append the external CSS file.
//frameDoc.document.write('<link href="style.css" rel="stylesheet" type="text/css" />');
//Append the DIV contents.
frameDoc.document.write(contents);
frameDoc.document.write('</body></html>');
frameDoc.document.close();
setTimeout(function () {
window.frames["frame1"].focus();
window.frames["frame1"].print();
frame1.remove();
}, 500);
}
function expandAllFolders(){
$(".gfoldercollapse").each(function( index ){
var pID = $(this).attr("id").split("_")[1];
console.log(index + ": " + $(this).text() + "\t id : " + pID);
JSGantt.folder(pID, {"vTool":{"vToolCont":{},"moveInterval":20,"fadeInterval":23,"delayTimeout":22}});
});
}
function printChart(divName) |
/*function downloadChartAsImage(divName){
console.log("getCanvas : " + getCanvas);
var imgageData = getCanvas.toDataURL("image/png");
console.log("imgageData : " + imgageData);
// Now browser starts downloading it instead of just showing it
var newData = imgageData.replace(/^data:image\/png/, "data:application/octet-stream");
console.log("newData : " + newData);
$("#btn-Convert-Html2Image").attr("download", "gant-chart.png").attr("href", newData);
}*/ | {
console.log("Loading");
showLoader();
previewChart(divName);
setTimeout(function () {
print("chartPreviewContainer");
hideLoader();
}, 3500);
$("#chartPreviewContainer").hide();
} | identifier_body |
campaign-gant-chart.js | var taskStyle = ['gtaskblue', 'gtaskred', 'gtaskpurple', 'gtaskgreen', 'gtaskpink'];
var element; // global variable
var getCanvas;
function populateGantChart(campaignList, g, campaignIds){
if (g.getDivId() != null) {
g.setCaptionType('Complete'); // Set to Show Caption (None,Caption,Resource,Duration,Complete)
g.setQuarterColWidth(36);
g.setDateTaskDisplayFormat('day dd month yyyy'); // Shown in tool tip box
g.setDayMajorDateDisplayFormat('mon yyyy - Week ww') // Set format to display dates in the "Major" header of the "Day" view
g.setWeekMinorDateDisplayFormat('dd mon') // Set format to display dates in the "Minor" header of the "Week" view
g.setShowTaskInfoLink(1); // Show link in tool tip (0/1)
g.setShowEndWeekDate(0); // Show/Hide the date for the last day of the week in header for daily view (1/0)
g.setUseSingleCell(10000); // Set the threshold at which we will only use one cell per table row (0 disables). Helps with rendering performance for large charts.
g.setShowComp(0);
g.setShowTaskInfoLink(0);
g.setShowTaskInfoRes(0);
g.setShowTaskInfoComp(0);
g.setFormatArr('Day', 'Week', 'Month', 'Quarter'); // Even with setUseSingleCell using Hour format on such a large chart can cause issues in some browsers
//(pID, pName, pStart, pEnd, pStyle, pLink, pMile, pRes, pComp, pGroup, pParent, pOpen, pDepend, pCaption, pNotes, pGantt)
for (var i = 0; i < campaignList.length; i++) {
var campaign = campaignList[i];
if(campaignIds.length > 0){
if((campaignIds.indexOf(campaign.id+"") == -1)) continue;
}
//populate campaigns
var startDate = new Date(parseFloat(campaign.startDate));
var startDateMM = startDate.getMonth() + 1;
var startDateDD = startDate.getDate();
var startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
var endDate = new Date(parseFloat(campaign.endDate));
var endDateMM = endDate.getMonth() + 1;
var endDateDD = endDate.getDate();
var endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var campaignId = campaign.id;
g.AddTaskItem(new JSGantt.TaskItem(campaignId,
campaign.campaignName, startDateString, endDateString,
'ggroupblack', '#', 0, '-', 0, 1, 0, 0, '', '', '', g));
//populate campaignStages
for (var j = 0; j < campaign.campaignStagesSet.length; j++) {
var campaignStage = campaign.campaignStagesSet[j];
startDate = new Date(parseFloat(campaignStage.startDate));
startDateMM = startDate.getMonth() + 1;
startDateDD = startDate.getDate();
startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
endDate = new Date(parseFloat(campaignStage.endDate));
endDateMM = endDate.getMonth() + 1;
endDateDD = endDate.getDate();
endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var stageId = "1111" + campaignStage.id;
g.AddTaskItem(new JSGantt.TaskItem(stageId,
campaignStage.stageName, startDateString, endDateString,
'gtaskyellow', '#', 0, '-', 0, 1, campaignId, 0, '', '', '', g));
//populate campaignSubstages
for (var k = 0; k < campaignStage.campaignSubstagesSet.length; k++) {
var table = "";
var campaignSubstage = campaignStage.campaignSubstagesSet[k];
startDate = new Date(parseFloat(campaignSubstage.startDate));
startDateMM = startDate.getMonth() + 1;
startDateDD = startDate.getDate();
startDateString = startDate.getFullYear() + "-" + (startDateMM < 10 ? ("0" + startDateMM) : startDateMM) + "-" + (startDateDD < 10 ? ("0" + startDateDD) : startDateDD);
endDate = new Date(parseFloat(campaignSubstage.endDate));
endDateMM = endDate.getMonth() + 1;
endDateDD = endDate.getDate();
endDateString = endDate.getFullYear() + "-" + (endDateMM < 10 ? ("0" + endDateMM) : endDateMM) + "-" + (endDateDD < 10 ? ("0" + endDateDD) : endDateDD);
var substageId = "9999" + campaignSubstage.id;
var passDependency = "", failDependency = "";
if((campaignSubstage.ssIdForPass != '-1') && (campaignSubstage.ssIdForPass != '0'))
passDependency = "9999" + campaignSubstage.ssIdForPass + "SS";
if((campaignSubstage.ssIdForFail != '-1') && (campaignSubstage.ssIdForFail != '0'))
failDependency = "9999" + campaignSubstage.ssIdForFail + "FF";
var dependency = passDependency + "," + failDependency;
var substageStatusList = campaignSubstage.campaignSubstageStatusList;
var onEnterCount = 0, sentCount = 0, viewedCount = 0, passCount = 0, failCount = 0, noShowCount = 0;
for(var l = 0; l < substageStatusList.length; l++){
var status = substageStatusList[l];
if(status.onEnter == true) onEnterCount = onEnterCount + 1;
if(status.sent == true) sentCount = sentCount + 1;
if(status.pass == true) passCount = passCount + 1;
if(status.fail == true) failCount = failCount + 1;
if(status.viewed == true) viewedCount = viewedCount + 1;
if(status.noShow == true) noShowCount = noShowCount + 1;
}
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No. of Users:</span><span class='gTaskText'>" + onEnterCount + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Remainders:</span><span class='gTaskText'>" + campaignSubstage.remainders + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Pass Substage:</span><span class='gTaskText'>" + (((campaignSubstage.ssIdForPass == 0) || (campaignSubstage.ssIdForPass == -1)) ? "Not Assigned" : substageList[campaignSubstage.ssIdForPass]) + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Fail Substage:</span><span class='gTaskText'>" + (((campaignSubstage.ssIdForFail == 0) || (campaignSubstage.ssIdForFail == -1)) ? "Not Assigned" : substageList[campaignSubstage.ssIdForFail]) + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No-Show :</span><span class='gTaskText'>" + noShowList[campaignSubstage.noShow] + "</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Sent: </span><span class='gTaskText'>"+sentCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Viewed: </span><span class='gTaskText'>"+viewedCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>No-Show: </span><span class='gTaskText'>"+noShowCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Pass: </span><span class='gTaskText'>"+passCount+"</span></div>";
table += "<div class='gTILine gTIsd'><span class='gTaskLabel'>Fail: </span><span class='gTaskText'>"+failCount+"</span></div>";
var taskStyleIndex = Math.floor(Math.random() * (taskStyle.length - 1)) + 0;
var content = campaignSubstage.connectType == 0 ? campaignSubstage.contentId : campaignSubstage.connectUrl;
g.AddTaskItem(new JSGantt.TaskItem(substageId,
campaignSubstage.campaignSubStageName, startDateString, endDateString,
"gtaskpurple", '#', 0, content, 0, 0, stageId, 0, dependency, '', table, g));
}
}
}
g.Draw();
} else {
alert("Error, unable to create Gantt Chart");
}
}
function previewChart(divName){
element = $("#"+divName);
/*$("#downloadChart").show();*/
$("#chartPreviewContainer").show();
$("#printChart").show();
html2canvas(element, {
onrendered: function (canvas) {
var imgsrc = canvas.toDataURL("image/png");
$("#chartPreviewImage").attr('src',imgsrc);
getCanvas = canvas;
}
});
}
function showLoader(){
$("#form-loader").css("display","inline-block");
$(".wrapper").css("pointer-events", "none");
$(".wrapper").css("opacity", "0.5");
}
function hideLoader(){
$("#form-loader").css("display","none");
$(".wrapper").css("pointer-events", "");
$(".wrapper").css("opacity", "1");
}
function print(divId){
var contents = $("#"+divId).html();
var frame1 = $('<iframe />');
frame1[0].name = "frame1";
frame1.css({ "position": "absolute", "top": "-1000000px" });
$("body").append(frame1);
var frameDoc = frame1[0].contentWindow ? frame1[0].contentWindow : frame1[0].contentDocument.document ? frame1[0].contentDocument.document : frame1[0].contentDocument;
frameDoc.document.open();
//Create a new HTML document.
var currDate = new Date();
var title = "GanttChart-" + (currDate.getMonth() + 1) + "-" + currDate.getDate() + "-" + currDate.getFullYear();
frameDoc.document.write('<html><head><title>'+title+'</title>');
frameDoc.document.write('</head><body>');
//Append the external CSS file.
//frameDoc.document.write('<link href="style.css" rel="stylesheet" type="text/css" />');
//Append the DIV contents.
frameDoc.document.write(contents);
frameDoc.document.write('</body></html>');
frameDoc.document.close();
setTimeout(function () {
window.frames["frame1"].focus();
window.frames["frame1"].print();
frame1.remove();
}, 500);
}
function | (){
$(".gfoldercollapse").each(function( index ){
var pID = $(this).attr("id").split("_")[1];
console.log(index + ": " + $(this).text() + "\t id : " + pID);
JSGantt.folder(pID, {"vTool":{"vToolCont":{},"moveInterval":20,"fadeInterval":23,"delayTimeout":22}});
});
}
function printChart(divName){
console.log("Loading");
showLoader();
previewChart(divName);
setTimeout(function () {
print("chartPreviewContainer");
hideLoader();
}, 3500);
$("#chartPreviewContainer").hide();
}
/*function downloadChartAsImage(divName){
console.log("getCanvas : " + getCanvas);
var imgageData = getCanvas.toDataURL("image/png");
console.log("imgageData : " + imgageData);
// Now browser starts downloading it instead of just showing it
var newData = imgageData.replace(/^data:image\/png/, "data:application/octet-stream");
console.log("newData : " + newData);
$("#btn-Convert-Html2Image").attr("download", "gant-chart.png").attr("href", newData);
}*/ | expandAllFolders | identifier_name |
adc.rs | //! The ADC Interface
//!
//! The ADC is disabled at startup and must be enabled (by calling
//! [Adc<Disabled>::enable]) before any of its registers can be accessed
//! (read or write). Attempts to access these registers will trigger a hardware
//! generated HardFault, which by default resets the microcontroller.
//!
//! The ADC can be polled for conversion completion with [Adc::is_done].
//! Completion will trigger an ADC Interrupt if enabled. See
//! [Adc::into_interrupt]
//!
//! ## Input Modes
//!
//! The Adc peripheral can operate in either single input or FIFO modes. Single
//! input mode is the mode most commonly thought of when using an ADC. A
//! multiplexer (via Adc::set_channel) is used to connect a single channel to
//! the ADC, and when the conversion is complete the hardware makes the results
//! available in the results register. The software must call
//! [Adc::set_channel] again to either select a new channel or to restart the
//! conversion on the same channel.
//!
//! The FIFO mode sets up a hardware buffer of selectable depth (2-8 channels).
//! Once the buffer is filled the Adc peripheral shoves the buffer contents
//! into the multiplexer channel by channel. Likewise, as each conversion is
//! completed the results are buffered into the result register in the same
//! order as the channel select buffer.
//!
//! Note: FIFO mode is not yet implemented in this HAL
//!
//! ## Conversion Modes
//!
//! The Adc peripheral offers 2 conversion modes, OneShot and Continuous. In
//! OneShot mode, the conversion is started when the channel is selected (or
//! when the channel select buffer is filled in FIFO mode). After completion no
//! new conversion is started until the channel is set again, even if the same
//! channel is used.
//!
//! In Continuous mode a new conversion is started immediately
//! after the previous one is completed. Changing the channel interrupts the
//! conversion and immediately begins conversion on the new channel (unless the
//! new channel is [DummyDisable], then the conversion is allowed to complete,
//! but no new conversion is started). In FIFO mode the input FIFO is reloaded
//! after completion, in other words the same N values are converted on a loop.
//!
//! Note: Continuous mode is not yet implemented in this HAL
//!
//! ## Comparison Mode
//!
//! Note: Comparison mode is not yet implemented in this HAL
//!
//! Comparison mode is a hardware feature of the Adc Peripheral. If set, the
//! conversion result is compared to the comparison value. If the result
//! is greater than or less than (depending on configuration) the comparison
//! value the result is moved into the result register. Otherwise, the result
//! is discarded \[Note: Unsure if the conversion is restarted in OneShot
//! mode\].
//!
//! A common use case for comparison mode is to enter a low power state with
//! the Adc configured to use the asynchronous clock source and to generate an
//! interrupt on completion. When the input channel crosses the comparison
//! threshold the interrupt is triggered, waking the MCU.
//!
//! ## Clocking
//!
//! The ADC requires a clock signal (ADCK), which is generated from the bus
//! clock, the bus clock divided by 2, the output of the OSC peripheral
//! (OSC_OUT), or an internal asynchronous clock, which, when selected,
//! operates in wait and stop modes. With any of these clock sources a
//! multi-value divider is provided to further divide the incoming clock by 1
//! (i.e. 1:1), 2, 4, or 8.
//!
//! The clock frequency must fall within 400kHz to 8MHz (4MHz in low power
//! mode), This is the same for all KEA MCUs. Ideally, the HAL will only
//! present valid options, but that is not yet implemented (pending clocks
//! improvements to output frequencies). For now you are trusted to input the
//! correct frequency.
//!
//! *Note:* When using the FIFO mode with FIFO scan mode disabled, the bus
//! clock must be faster than half the ADC clock (ADCK). Bus clock >= ADCK / 2.
//!
//! ## Pin Control
//!
//! This functionality is implemented in the GPIO module. See [Analog]
//! for details.
//!
//! ## Conversion Width
//!
//! The ADC can be run in 8, 10, or 12 bit modes. These modes are enumerated in
//! [AdcResolution].
//!
//! ## Hardware Trigger
//!
//! The ADC conversions can be started by a hardware trigger. This is not
//! implemented in all KEA chips, so implementation here will be Delayed. Use
//! the PAC. Enable is ADC_SC2\[ADTRG\] = 1, and trigger is the ADHWT source.
//!
//! ## Usage
//!
//! ### AdcConfig struct
//!
//! [AdcConfig] offers public fields to allow for creation in-place. The
//! [AdcConfig::calculate_divisor] method allows the user to specify the
//! desired Adc Clock frequency (given the clock source frequency). The clock
//! divider which gets the closest to that frequency is chosen.
//!
//! The AdcConfig structure also implements the [Default] trait.
//!
//! ```rust
//! let config: AdcConfig = Default::default();
//!
//! config.calculate_divisor(20_u32.MHz(), 2_u32.MHz());
//! assert!(matches!(config.clock_divisor, ClockDivisor::_8));
//! ```
use crate::hal::adc::{Channel, OneShot};
use crate::{pac::ADC, HALExt};
use core::{convert::Infallible, marker::PhantomData};
use embedded_time::rate::*;
/// Error Enumeration for this module
#[derive(Debug)]
pub enum Error {
/// The Channel has already been moved
Moved,
}
/// Analog type state for a GPIO pin.
///
/// This mode "gives" the pin to the ADC hardware peripheral.
/// The ADC Peripheral can take the GPIO pins in any state. The Peripheral will
/// reconfigure the pin to turn off any output drivers, disable input buffers
/// (reading the pin after configuring as analog will return a zero), and
/// disable the pullup. Electrically, an Analog pin that is not currently under
/// conversion is effectively HighImpedence.
///
/// Once a pin is released from the ADC, it will return to its previous state.
/// The previous state includes output enabled, input enabled, pullup enabled,
/// and level (for outputs). Note to accomplish this the pin implements the
/// outof_analog method, which is semantically different from the other type
/// states.
///
/// For example, [crate::gpio::gpioa::PTA0] is configured to be a Output that is set high is
/// converted into the analog mode with the [crate::gpio::gpioa::PTA0::into_analog] method.
/// Once measurements from that pin are completed it will be returned to an
/// Output that is set high by calling the [Analog::outof_analog] method.
///
/// ```rust
/// let pta0 = gpioa.pta0.into_push_pull_output();
/// pta0.set_high();
/// let mut pta0 = pta0.into_analog(); // pta0 is hi-Z
/// let value = adc.read(&mut pta0).unwrap_or(0);
/// let pta0 = pta0.outof_analog(); // pta0 is push-pull output, set high.
/// ```
///
/// Note: This is a hardware feature that requires effectively no clock cycles
/// to complete. "Manually" reconfiguring the pins to HighImpedence before
/// calling into_analog() is discouraged, but it would not hurt anything.
pub struct Analog<Pin> {
pin: Pin,
}
/// Interface for ADC Peripheral.
///
/// Returned by calling [HALExt::split] on the pac [ADC] structure. Holds state
/// of peripheral.
pub struct Adc<State> {
peripheral: ADC,
_state: PhantomData<State>,
/// Contains the On-Chip ADC Channels, like the MCU's temperature sensor.
pub onchip_channels: OnChipChannels,
}
impl HALExt for ADC {
type T = Adc<Disabled>;
fn split(self) -> Adc<Disabled> {
Adc {
peripheral: self,
_state: PhantomData,
onchip_channels: OnChipChannels {
vss: Some(Analog {
pin: Vss::<Input> { _mode: PhantomData },
}),
temp_sense: Some(Analog {
pin: TempSense::<Input> { _mode: PhantomData },
}),
bandgap: Some(Analog {
pin: Bandgap::<Input> { _mode: PhantomData },
}),
vref_h: Some(Analog {
pin: VrefH::<Input> { _mode: PhantomData },
}),
vref_l: Some(Analog {
pin: VrefL::<Input> { _mode: PhantomData },
}),
},
}
}
}
/// Configuration struct for Adc peripheral.
pub struct AdcConfig {
/// Determines the clock source for the ADC peripheral
///
/// Default is [AdcClocks::Bus]
pub clock_source: AdcClocks,
/// Divides the clock source to get the ADC clock into it's usable range of
/// 400kHz - 8MHz (4MHz in low power mode).
///
/// Default is [ClockDivisor::_1] (no divison)
pub clock_divisor: ClockDivisor,
/// Set the resolution of ADC conversion
///
/// Default is [AdcResolution::_8bit]
pub resolution: AdcResolution,
/// Set ADC sample time.
///
/// Default is [AdcSampleTime::Short]
pub sample_time: AdcSampleTime,
/// Set low power mode
///
/// Default is false.
pub low_power: bool,
}
impl AdcConfig {
/// Calculate the ADC clock divisor
///
/// Uses the current clock source and clock frequency to determine
/// the best divisor to use in order to have minimal error between
/// the ADC clock rate and the desired ADC clock rate.
///
/// Note: This relies on trustworthy values for source_freq and valid
/// values for req_adc_freq. In the future this should know or
/// determine what the current clock frequency is instead of relying
/// on the user to provide it.
pub fn calculate_divisor(&mut self, source_freq: Hertz, req_adc_freq: Hertz) {
let denom: u8 = (source_freq.integer() / req_adc_freq.integer()) as u8;
let mut output: u8 = 1;
let mut err: i8 = (denom - output) as i8;
let mut err_old: i8 = err;
let max_divisor = match self.clock_source {
AdcClocks::Bus => 16,
_ => 8,
};
while output < max_divisor {
err = (denom - (output << 1)) as i8;
if err.is_negative() {
err = err.abs();
}
if err <= err_old {
output <<= 1;
err_old = err;
} else {
break;
}
}
// I am of the mind that this assert is okay, at least until the input
// clock can be known at compile time.
let ad_clock = source_freq.integer() / output as u32;
assert!(400_000 <= ad_clock);
assert!(
ad_clock
<= match self.low_power {
false => 8_000_000,
true => 4_000_000,
}
);
self.clock_divisor = match output {
1 => ClockDivisor::_1,
2 => ClockDivisor::_2,
4 => ClockDivisor::_4,
8 => ClockDivisor::_8,
_ => ClockDivisor::_16,
}
}
/// Set the divisor directly. panics if divisor isn't supported by the
/// clock source.
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_divisor(&mut self, divisor: ClockDivisor) {
// divisor can't be 16 unless using the Bus clock
assert!(
!(!matches!(self.clock_source, AdcClocks::Bus) && matches!(divisor, ClockDivisor::_16))
);
self.clock_divisor = divisor;
}
/// Sets the clock source, panics if divisor isn't supported
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_clock_source(&mut self, clock: AdcClocks) {
// Panic if setting the clock to anything other than Bus if the divisor
// is set to 16
assert!(
!matches!(clock, AdcClocks::Bus) && matches!(self.clock_divisor, ClockDivisor::_16)
);
self.clock_source = clock;
}
}
impl Default for AdcConfig {
fn default() -> AdcConfig {
AdcConfig {
clock_source: AdcClocks::Bus,
clock_divisor: ClockDivisor::_1,
resolution: AdcResolution::_12bit,
sample_time: AdcSampleTime::Short,
low_power: false,
}
}
}
/// Clock types available to the Adc peripheral
///
/// Dividers will be chosen appropriately to suit requested clock rate.
pub enum AdcClocks {
/// Use the incoming Bus Clock
Bus,
/// jkl
External,
/// Available in Wait AND Stop Mode
Async,
}
/// This enum represents the availabe ADC resolutions
///
/// Regardless of resolution chosen, results are always right justified
#[repr(u8)]
pub enum AdcResolution {
/// 8 bit AD conversion mode
_8bit = 0,
/// 10 bit AD conversion mode
_10bit = 1,
/// 12 bit AD conversion mode
_12bit = 2,
}
/// Adc sample time
pub enum AdcSampleTime {
/// Sample for 3.5 ADC clock (ADCK) cycles.
Short = 0,
/// Sample for 23.5 ADC clock (ADCK) cycles.
///
/// Required for high impedence (>2k @ADCK > 4MHz, >5k @ ADCK < 4MHz)
/// inputs.
Long = 1,
}
/// Adc Clock Divisors
///
/// Note 1/16 divisor is only usable for the Bus clock
pub enum ClockDivisor {
/// Source / 1, No divison
_1 = 0,
/// Source / 2
_2 = 1,
/// Source / 4
_4 = 2,
/// Source / 8
_8 = 3,
/// Source / 16
_16 = 4,
}
/// Enabled state
pub struct Enabled;
/// Disabled state
pub struct Disabled;
impl Adc<Enabled> {
/// Poll to determine if ADC conversion is complete.
///
/// Note: This flag is cleared when the sampling mode is changed,
/// interrupts are enabled, [Adc::set_channel] is called, and when [Adc::result] is
/// called (including [Adc::try_result])
pub fn is_done(&self) -> bool {
self.peripheral.sc1.read().coco().bit()
}
/// Poll to determine if ADC conversion is underway
pub fn is_converting(&self) -> bool {
self.peripheral.sc2.read().adact().bit()
}
/// Grab the last ADC conversion result.
pub fn result(&self) -> u16 {
self.peripheral.r.read().adr().bits()
}
/// Poll for conversion completion, if done return the result.
pub fn try_result(&self) -> Option<u16> {
if self.is_done() {
Some(self.result())
} else {
None
}
}
/// Set ADC target channel.
///
/// In Single conversion mode (OneShot), setting the channel begins the conversion. In FIFO mode
/// the channel is added to the FIFO buffer.
///
/// Note: If the channel is changed while a conversion is in progress the
/// current conversion will be cancelled. If in FIFO mode, conversion will
/// resume once the FIFO channels are refilled.
pub fn set_channel<T: Channel<Adc<Enabled>, ID = u8>>(&self, _pin: &T) {
self.peripheral
.sc1
.modify(|_, w| unsafe { w.adch().bits(T::channel()) });
}
/// Set the ADC's configuration
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.peripheral.sc3.modify(|_, w| {
use pac::adc::sc3::{ADICLK_A, ADIV_A, ADLSMP_A, MODE_A};
w.adiclk()
.variant(match config.clock_source {
AdcClocks::Bus =>
// If divisor is 16, use the Bus / 2 clock source, else use
// the 1:1 Bus clock source
{
match config.clock_divisor {
ClockDivisor::_16 => ADICLK_A::_01,
_ => ADICLK_A::_00,
}
}
AdcClocks::External => ADICLK_A::_10,
AdcClocks::Async => ADICLK_A::_11,
})
.mode()
.variant(match config.resolution {
AdcResolution::_8bit => MODE_A::_00,
AdcResolution::_10bit => MODE_A::_01,
AdcResolution::_12bit => MODE_A::_10,
})
.adlsmp()
.variant(match config.sample_time {
AdcSampleTime::Short => ADLSMP_A::_0,
AdcSampleTime::Long => ADLSMP_A::_1,
})
.adiv()
.variant(match config.clock_divisor {
ClockDivisor::_1 => ADIV_A::_00,
ClockDivisor::_2 => ADIV_A::_01,
ClockDivisor::_4 => ADIV_A::_10,
_ => ADIV_A::_11,
})
.adlpc()
.bit(config.low_power)
});
// It looks like SCGC has to be set before touching the peripheral
// at all, else hardfault. Go back later to confirm that if using external clock
// scgc can be cleared.
// w.adc().variant(match config.clock_source {
// AdcClocks::Bus => ADC_A::_1,
// _ => ADC_A::_0,
// })
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
}
impl Adc<Disabled> {
/// Connects the bus clock to the adc via the SIM peripheral, allowing
/// read and write access to ADC registers.
///
/// Any attempt to access ADC registers while disabled results in a
/// HardFault, generated by hardware.
///
/// This also enables the bandgap voltage reference.
pub fn enable(self) -> Adc<Enabled> {
cortex_m::interrupt::free(|_| {
unsafe { &(*pac::SIM::ptr()) }.scgc.modify(|_, w| {
use pac::sim::scgc::ADC_A;
w.adc().variant(ADC_A::_1)
});
// Don't start a conversion (set channel to DummyDisable)
self.peripheral.sc1.modify(|_, w| w.adch()._11111());
// Bandgap. Grab directly, Currently the bandgap isn't implemented
// in [system::PMC]. We will eventually have to pass in the pmc
// peripheral handle as a variable.
unsafe { &(*pac::PMC::ptr()) }
.spmsc1
.modify(|_, w| w.bgbe()._1());
});
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
/// Set the ADC's configuration
///
/// This is a sugar method for calling [Adc<Disabled>::enable] followed by
/// [Adc<Enabled>::configure]
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.enable().configure(config)
}
}
impl<Mode> Adc<Mode> {
/// Not Implemented
pub fn into_interrupt(self) -> Adc<Mode> {
unimplemented!("Interrupt is not yet implemented");
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
}
/// Not Implemented
pub fn into_fifo(self, _depth: u8) -> Adc<Mode> {
// self.peripheral
// .sc4
// .modify(|_r, w| w.afdep().bits(depth & 0x7));
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
unimplemented!("FIFO is not yet implemented");
}
/// Not Implemented
pub fn into_continuous(self) -> Adc<Mode> {
unimplemented!("Continuous Conversion mode not yet implemented");
}
}
impl OnChipChannels {
/// Request an instance of an on-chip [Vss] channel.
pub fn vss(&mut self) -> Result<Analog<Vss<Input>>, Error> {
self.vss.take().ok_or(Error::Moved)
}
/// Return the instance of [Vss]
pub fn return_vss(&mut self, inst: Analog<Vss<Input>>) {
self.vss.replace(inst);
}
/// Try to grab an instance of the onchip [TempSense] channel.
pub fn tempsense(&mut self) -> Result<Analog<TempSense<Input>>, Error> {
self.temp_sense.take().ok_or(Error::Moved)
}
/// Return the instance of [TempSense]
pub fn return_tempsense(&mut self, inst: Analog<TempSense<Input>>) {
self.temp_sense.replace(inst);
}
/// Try to grab an instance of the onchip [Bandgap] channel.
///
/// The bandgap reference is a fixed 1.16V (nom, Factory trimmed to +/-
/// 0.02V at Vdd=5.0 at 125C) signal that is available to the ADC Module.
/// It can be used as a voltage reference for the ACMP and as an [Analog]
/// channel that can be used to (roughly) check the VDD voltage
pub fn bandgap(&mut self) -> Result<Analog<Bandgap<Input>>, Error> {
self.bandgap.take().ok_or(Error::Moved)
}
/// Return the instance of [Bandgap]
pub fn return_bandgap(&mut self, inst: Analog<Bandgap<Input>>) {
self.bandgap.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference High ([VrefH]) channel.
pub fn vref_h(&mut self) -> Result<Analog<VrefH<Input>>, Error> {
self.vref_h.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefH]
pub fn | (&mut self, inst: Analog<VrefH<Input>>) {
self.vref_h.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference Low ([VrefL]) channel.
pub fn vref_l(&mut self) -> Result<Analog<VrefL<Input>>, Error> {
self.vref_l.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefL]
pub fn return_vref_l(&mut self, inst: Analog<VrefL<Input>>) {
self.vref_l.replace(inst);
}
/// Grab a [DummyDisable] instance. Multiple Instances possible.
pub fn dummy_disable(&self) -> Analog<DummyDisable<Input>> {
Analog {
pin: DummyDisable::<Input> { _mode: PhantomData },
}
}
}
/// Holds On-Chip ADC Channel inputs and provides an interface to grab and return them.
// These have to have the Input dummy type to allow them to have the Channel
// trait.
pub struct OnChipChannels {
vss: Option<Analog<Vss<Input>>>,
temp_sense: Option<Analog<TempSense<Input>>>,
bandgap: Option<Analog<Bandgap<Input>>>,
vref_h: Option<Analog<VrefH<Input>>>,
vref_l: Option<Analog<VrefL<Input>>>,
}
/// Dummy type state for on-chip ADC input channels
pub struct Input;
/// Adc Input Channel, measures ground (should be 0?)
pub struct Vss<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, measures internal temperature sensor
pub struct TempSense<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Bandgap internal voltage reference
pub struct Bandgap<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, High
pub struct VrefH<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, Low
pub struct VrefL<Input> {
_mode: PhantomData<Input>,
}
/// Dummy Channel that temporarily disables the Adc Module.
pub struct DummyDisable<Input> {
_mode: PhantomData<Input>,
}
macro_rules! adc_input_channels {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> Channel<Adc<Enabled>> for Analog<$Pin<OldMode>> {
type ID = u8;
fn channel() -> u8 { $Chan }
}
)+
};
}
use crate::gpio::{gpioa::*, gpiob::*};
adc_input_channels! (
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
16_u8 => Vss,
22_u8 => TempSense,
23_u8 => Bandgap,
24_u8 => VrefH,
25_u8 => VrefL,
0x1F_u8 => DummyDisable,
);
macro_rules! impl_analog_pin {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> $Pin<OldMode> {
/// Convert Pin into the [Analog] state for use by the ADC.
///
/// This implementation provides the GPIO interface a method to
/// give an eligible pin to the ADC peripheral for conversion
/// into an Analog pin. This method is only implemented in
/// eligible pins. The ADC peripheral disables the GPIO and
/// PORT control over the pin and connects it to the ADC mux
/// (controlled by [Adc::set_channel].
///
/// Note: The [Analog::outof_analog] method must be used to
/// return the pin to a normal Input/Output typestate. The pin
/// will be returned in the same typestate as it was received.
pub fn into_analog(self) -> Analog<$Pin<OldMode>> {
unsafe {
(*ADC::ptr())
.apctl1
.modify(|r, w| w.adpc().bits(r.adpc().bits() | (1 << $Chan)));
}
Analog { pin: self }
}
}
impl<OldMode> Analog<$Pin<OldMode>> {
/// Return Analog state Pin to normal GPIO-state interface.
///
/// The Pin will be in the same state that it was when it
/// entered the Analog type state.
pub fn outof_analog(self) -> $Pin<OldMode> {
let adc = unsafe { &(*ADC::ptr()) };
adc.apctl1
.modify(|r, w| unsafe { w.adpc().bits(r.adpc().bits() & !(1 << $Chan)) });
self.pin
}
}
)+
};
}
impl_analog_pin!(
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
);
impl<Pin> OneShot<Adc<Enabled>, u16, Pin> for Adc<Enabled>
where
Pin: Channel<Adc<Enabled>, ID = u8>,
{
type Error = Infallible;
fn read(&mut self, pin: &mut Pin) -> nb::Result<u16, Self::Error> {
self.set_channel(pin);
while !self.is_done() {}
let ret_val = Ok(self.result());
let disable = self.onchip_channels.dummy_disable();
self.set_channel(&disable);
ret_val
}
}
| return_vref_h | identifier_name |
adc.rs | //! The ADC Interface
//!
//! The ADC is disabled at startup and must be enabled (by calling
//! [Adc<Disabled>::enable]) before any of its registers can be accessed
//! (read or write). Attempts to access these registers will trigger a hardware
//! generated HardFault, which by default resets the microcontroller.
//!
//! The ADC can be polled for conversion completion with [Adc::is_done].
//! Completion will trigger an ADC Interrupt if enabled. See
//! [Adc::into_interrupt]
//!
//! ## Input Modes
//!
//! The Adc peripheral can operate in either single input or FIFO modes. Single
//! input mode is the mode most commonly thought of when using an ADC. A
//! multiplexer (via Adc::set_channel) is used to connect a single channel to
//! the ADC, and when the conversion is complete the hardware makes the results
//! available in the results register. The software must call
//! [Adc::set_channel] again to either select a new channel or to restart the
//! conversion on the same channel.
//!
//! The FIFO mode sets up a hardware buffer of selectable depth (2-8 channels).
//! Once the buffer is filled the Adc peripheral shoves the buffer contents
//! into the multiplexer channel by channel. Likewise, as each conversion is
//! completed the results are buffered into the result register in the same
//! order as the channel select buffer.
//!
//! Note: FIFO mode is not yet implemented in this HAL
//!
//! ## Conversion Modes
//!
//! The Adc peripheral offers 2 conversion modes, OneShot and Continuous. In
//! OneShot mode, the conversion is started when the channel is selected (or
//! when the channel select buffer is filled in FIFO mode). After completion no
//! new conversion is started until the channel is set again, even if the same
//! channel is used.
//!
//! In Continuous mode a new conversion is started immediately
//! after the previous one is completed. Changing the channel interrupts the
//! conversion and immediately begins conversion on the new channel (unless the
//! new channel is [DummyDisable], then the conversion is allowed to complete,
//! but no new conversion is started). In FIFO mode the input FIFO is reloaded
//! after completion, in other words the same N values are converted on a loop.
//!
//! Note: Continuous mode is not yet implemented in this HAL
//!
//! ## Comparison Mode
//!
//! Note: Comparison mode is not yet implemented in this HAL
//!
//! Comparison mode is a hardware feature of the Adc Peripheral. If set, the
//! conversion result is compared to the comparison value. If the result
//! is greater than or less than (depending on configuration) the comparison
//! value the result is moved into the result register. Otherwise, the result
//! is discarded \[Note: Unsure if the conversion is restarted in OneShot
//! mode\].
//!
//! A common use case for comparison mode is to enter a low power state with
//! the Adc configured to use the asynchronous clock source and to generate an
//! interrupt on completion. When the input channel crosses the comparison
//! threshold the interrupt is triggered, waking the MCU.
//!
//! ## Clocking
//!
//! The ADC requires a clock signal (ADCK), which is generated from the bus
//! clock, the bus clock divided by 2, the output of the OSC peripheral
//! (OSC_OUT), or an internal asynchronous clock, which, when selected,
//! operates in wait and stop modes. With any of these clock sources a
//! multi-value divider is provided to further divide the incoming clock by 1
//! (i.e. 1:1), 2, 4, or 8.
//!
//! The clock frequency must fall within 400kHz to 8MHz (4MHz in low power
//! mode), This is the same for all KEA MCUs. Ideally, the HAL will only
//! present valid options, but that is not yet implemented (pending clocks
//! improvements to output frequencies). For now you are trusted to input the
//! correct frequency.
//!
//! *Note:* When using the FIFO mode with FIFO scan mode disabled, the bus
//! clock must be faster than half the ADC clock (ADCK). Bus clock >= ADCK / 2.
//!
//! ## Pin Control
//!
//! This functionality is implemented in the GPIO module. See [Analog]
//! for details.
//!
//! ## Conversion Width
//!
//! The ADC can be run in 8, 10, or 12 bit modes. These modes are enumerated in
//! [AdcResolution].
//!
//! ## Hardware Trigger
//!
//! The ADC conversions can be started by a hardware trigger. This is not
//! implemented in all KEA chips, so implementation here will be Delayed. Use
//! the PAC. Enable is ADC_SC2\[ADTRG\] = 1, and trigger is the ADHWT source.
//!
//! ## Usage
//!
//! ### AdcConfig struct
//!
//! [AdcConfig] offers public fields to allow for creation in-place. The
//! [AdcConfig::calculate_divisor] method allows the user to specify the
//! desired Adc Clock frequency (given the clock source frequency). The clock
//! divider which gets the closest to that frequency is chosen.
//!
//! The AdcConfig structure also implements the [Default] trait.
//!
//! ```rust
//! let config: AdcConfig = Default::default();
//!
//! config.calculate_divisor(20_u32.MHz(), 2_u32.MHz());
//! assert!(matches!(config.clock_divisor, ClockDivisor::_8));
//! ```
use crate::hal::adc::{Channel, OneShot};
use crate::{pac::ADC, HALExt};
use core::{convert::Infallible, marker::PhantomData};
use embedded_time::rate::*;
/// Error Enumeration for this module
#[derive(Debug)]
pub enum Error {
/// The Channel has already been moved
Moved,
}
/// Analog type state for a GPIO pin.
///
/// This mode "gives" the pin to the ADC hardware peripheral.
/// The ADC Peripheral can take the GPIO pins in any state. The Peripheral will
/// reconfigure the pin to turn off any output drivers, disable input buffers
/// (reading the pin after configuring as analog will return a zero), and
/// disable the pullup. Electrically, an Analog pin that is not currently under
/// conversion is effectively HighImpedence.
///
/// Once a pin is released from the ADC, it will return to its previous state.
/// The previous state includes output enabled, input enabled, pullup enabled,
/// and level (for outputs). Note to accomplish this the pin implements the
/// outof_analog method, which is semantically different from the other type
/// states.
///
/// For example, [crate::gpio::gpioa::PTA0] is configured to be a Output that is set high is
/// converted into the analog mode with the [crate::gpio::gpioa::PTA0::into_analog] method.
/// Once measurements from that pin are completed it will be returned to an
/// Output that is set high by calling the [Analog::outof_analog] method.
///
/// ```rust
/// let pta0 = gpioa.pta0.into_push_pull_output();
/// pta0.set_high();
/// let mut pta0 = pta0.into_analog(); // pta0 is hi-Z
/// let value = adc.read(&mut pta0).unwrap_or(0);
/// let pta0 = pta0.outof_analog(); // pta0 is push-pull output, set high.
/// ```
///
/// Note: This is a hardware feature that requires effectively no clock cycles
/// to complete. "Manually" reconfiguring the pins to HighImpedence before
/// calling into_analog() is discouraged, but it would not hurt anything.
pub struct Analog<Pin> {
pin: Pin,
}
/// Interface for ADC Peripheral.
///
/// Returned by calling [HALExt::split] on the pac [ADC] structure. Holds state
/// of peripheral.
pub struct Adc<State> {
peripheral: ADC,
_state: PhantomData<State>,
/// Contains the On-Chip ADC Channels, like the MCU's temperature sensor.
pub onchip_channels: OnChipChannels,
}
impl HALExt for ADC {
type T = Adc<Disabled>;
fn split(self) -> Adc<Disabled> {
Adc {
peripheral: self,
_state: PhantomData,
onchip_channels: OnChipChannels {
vss: Some(Analog {
pin: Vss::<Input> { _mode: PhantomData },
}),
temp_sense: Some(Analog {
pin: TempSense::<Input> { _mode: PhantomData },
}),
bandgap: Some(Analog {
pin: Bandgap::<Input> { _mode: PhantomData },
}),
vref_h: Some(Analog {
pin: VrefH::<Input> { _mode: PhantomData },
}),
vref_l: Some(Analog {
pin: VrefL::<Input> { _mode: PhantomData },
}),
},
}
}
}
/// Configuration struct for Adc peripheral.
pub struct AdcConfig {
/// Determines the clock source for the ADC peripheral
///
/// Default is [AdcClocks::Bus]
pub clock_source: AdcClocks,
/// Divides the clock source to get the ADC clock into it's usable range of
/// 400kHz - 8MHz (4MHz in low power mode).
///
/// Default is [ClockDivisor::_1] (no divison)
pub clock_divisor: ClockDivisor,
/// Set the resolution of ADC conversion
///
/// Default is [AdcResolution::_8bit]
pub resolution: AdcResolution,
/// Set ADC sample time.
///
/// Default is [AdcSampleTime::Short]
pub sample_time: AdcSampleTime,
/// Set low power mode
///
/// Default is false.
pub low_power: bool,
}
impl AdcConfig {
/// Calculate the ADC clock divisor
///
/// Uses the current clock source and clock frequency to determine
/// the best divisor to use in order to have minimal error between
/// the ADC clock rate and the desired ADC clock rate.
///
/// Note: This relies on trustworthy values for source_freq and valid
/// values for req_adc_freq. In the future this should know or
/// determine what the current clock frequency is instead of relying
/// on the user to provide it.
pub fn calculate_divisor(&mut self, source_freq: Hertz, req_adc_freq: Hertz) {
let denom: u8 = (source_freq.integer() / req_adc_freq.integer()) as u8;
let mut output: u8 = 1;
let mut err: i8 = (denom - output) as i8;
let mut err_old: i8 = err;
let max_divisor = match self.clock_source {
AdcClocks::Bus => 16,
_ => 8,
};
while output < max_divisor {
err = (denom - (output << 1)) as i8;
if err.is_negative() {
err = err.abs();
}
if err <= err_old {
output <<= 1;
err_old = err;
} else {
break;
}
}
// I am of the mind that this assert is okay, at least until the input
// clock can be known at compile time.
let ad_clock = source_freq.integer() / output as u32;
assert!(400_000 <= ad_clock);
assert!(
ad_clock
<= match self.low_power {
false => 8_000_000,
true => 4_000_000,
}
);
self.clock_divisor = match output {
1 => ClockDivisor::_1,
2 => ClockDivisor::_2,
4 => ClockDivisor::_4,
8 => ClockDivisor::_8,
_ => ClockDivisor::_16,
}
}
/// Set the divisor directly. panics if divisor isn't supported by the
/// clock source.
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_divisor(&mut self, divisor: ClockDivisor) {
// divisor can't be 16 unless using the Bus clock
assert!(
!(!matches!(self.clock_source, AdcClocks::Bus) && matches!(divisor, ClockDivisor::_16))
);
self.clock_divisor = divisor;
}
/// Sets the clock source, panics if divisor isn't supported
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_clock_source(&mut self, clock: AdcClocks) {
// Panic if setting the clock to anything other than Bus if the divisor
// is set to 16
assert!(
!matches!(clock, AdcClocks::Bus) && matches!(self.clock_divisor, ClockDivisor::_16)
);
self.clock_source = clock;
}
}
impl Default for AdcConfig {
fn default() -> AdcConfig {
AdcConfig {
clock_source: AdcClocks::Bus,
clock_divisor: ClockDivisor::_1,
resolution: AdcResolution::_12bit,
sample_time: AdcSampleTime::Short,
low_power: false,
}
}
}
/// Clock types available to the Adc peripheral
///
/// Dividers will be chosen appropriately to suit requested clock rate.
pub enum AdcClocks {
/// Use the incoming Bus Clock
Bus,
/// jkl
External,
/// Available in Wait AND Stop Mode
Async,
}
/// This enum represents the availabe ADC resolutions
///
/// Regardless of resolution chosen, results are always right justified
#[repr(u8)]
pub enum AdcResolution {
/// 8 bit AD conversion mode
_8bit = 0,
/// 10 bit AD conversion mode
_10bit = 1,
/// 12 bit AD conversion mode
_12bit = 2,
}
/// Adc sample time
pub enum AdcSampleTime {
/// Sample for 3.5 ADC clock (ADCK) cycles.
Short = 0,
/// Sample for 23.5 ADC clock (ADCK) cycles.
///
/// Required for high impedence (>2k @ADCK > 4MHz, >5k @ ADCK < 4MHz)
/// inputs.
Long = 1,
}
/// Adc Clock Divisors
///
/// Note 1/16 divisor is only usable for the Bus clock
pub enum ClockDivisor {
/// Source / 1, No divison
_1 = 0,
/// Source / 2
_2 = 1,
/// Source / 4
_4 = 2,
/// Source / 8
_8 = 3,
/// Source / 16
_16 = 4,
}
/// Enabled state
pub struct Enabled;
/// Disabled state
pub struct Disabled;
impl Adc<Enabled> {
/// Poll to determine if ADC conversion is complete.
///
/// Note: This flag is cleared when the sampling mode is changed,
/// interrupts are enabled, [Adc::set_channel] is called, and when [Adc::result] is
/// called (including [Adc::try_result])
pub fn is_done(&self) -> bool {
self.peripheral.sc1.read().coco().bit()
}
/// Poll to determine if ADC conversion is underway
pub fn is_converting(&self) -> bool {
self.peripheral.sc2.read().adact().bit()
}
/// Grab the last ADC conversion result.
pub fn result(&self) -> u16 {
self.peripheral.r.read().adr().bits()
}
/// Poll for conversion completion, if done return the result.
pub fn try_result(&self) -> Option<u16> {
if self.is_done() {
Some(self.result())
} else {
None
}
}
/// Set ADC target channel.
///
/// In Single conversion mode (OneShot), setting the channel begins the conversion. In FIFO mode
/// the channel is added to the FIFO buffer.
///
/// Note: If the channel is changed while a conversion is in progress the
/// current conversion will be cancelled. If in FIFO mode, conversion will
/// resume once the FIFO channels are refilled.
pub fn set_channel<T: Channel<Adc<Enabled>, ID = u8>>(&self, _pin: &T) {
self.peripheral
.sc1
.modify(|_, w| unsafe { w.adch().bits(T::channel()) });
}
/// Set the ADC's configuration
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.peripheral.sc3.modify(|_, w| {
use pac::adc::sc3::{ADICLK_A, ADIV_A, ADLSMP_A, MODE_A};
w.adiclk()
.variant(match config.clock_source {
AdcClocks::Bus =>
// If divisor is 16, use the Bus / 2 clock source, else use
// the 1:1 Bus clock source
{
match config.clock_divisor {
ClockDivisor::_16 => ADICLK_A::_01,
_ => ADICLK_A::_00,
}
}
AdcClocks::External => ADICLK_A::_10,
AdcClocks::Async => ADICLK_A::_11,
})
.mode()
.variant(match config.resolution {
AdcResolution::_8bit => MODE_A::_00,
AdcResolution::_10bit => MODE_A::_01,
AdcResolution::_12bit => MODE_A::_10,
})
.adlsmp()
.variant(match config.sample_time {
AdcSampleTime::Short => ADLSMP_A::_0,
AdcSampleTime::Long => ADLSMP_A::_1,
})
.adiv()
.variant(match config.clock_divisor {
ClockDivisor::_1 => ADIV_A::_00,
ClockDivisor::_2 => ADIV_A::_01,
ClockDivisor::_4 => ADIV_A::_10,
_ => ADIV_A::_11,
})
.adlpc()
.bit(config.low_power)
});
// It looks like SCGC has to be set before touching the peripheral
// at all, else hardfault. Go back later to confirm that if using external clock
// scgc can be cleared.
// w.adc().variant(match config.clock_source {
// AdcClocks::Bus => ADC_A::_1,
// _ => ADC_A::_0,
// })
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
}
impl Adc<Disabled> {
/// Connects the bus clock to the adc via the SIM peripheral, allowing
/// read and write access to ADC registers.
///
/// Any attempt to access ADC registers while disabled results in a
/// HardFault, generated by hardware.
///
/// This also enables the bandgap voltage reference.
pub fn enable(self) -> Adc<Enabled> {
cortex_m::interrupt::free(|_| {
unsafe { &(*pac::SIM::ptr()) }.scgc.modify(|_, w| {
use pac::sim::scgc::ADC_A;
w.adc().variant(ADC_A::_1)
});
|
// Bandgap. Grab directly, Currently the bandgap isn't implemented
// in [system::PMC]. We will eventually have to pass in the pmc
// peripheral handle as a variable.
unsafe { &(*pac::PMC::ptr()) }
.spmsc1
.modify(|_, w| w.bgbe()._1());
});
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
/// Set the ADC's configuration
///
/// This is a sugar method for calling [Adc<Disabled>::enable] followed by
/// [Adc<Enabled>::configure]
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.enable().configure(config)
}
}
impl<Mode> Adc<Mode> {
/// Not Implemented
pub fn into_interrupt(self) -> Adc<Mode> {
unimplemented!("Interrupt is not yet implemented");
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
}
/// Not Implemented
pub fn into_fifo(self, _depth: u8) -> Adc<Mode> {
// self.peripheral
// .sc4
// .modify(|_r, w| w.afdep().bits(depth & 0x7));
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
unimplemented!("FIFO is not yet implemented");
}
/// Not Implemented
pub fn into_continuous(self) -> Adc<Mode> {
unimplemented!("Continuous Conversion mode not yet implemented");
}
}
impl OnChipChannels {
/// Request an instance of an on-chip [Vss] channel.
pub fn vss(&mut self) -> Result<Analog<Vss<Input>>, Error> {
self.vss.take().ok_or(Error::Moved)
}
/// Return the instance of [Vss]
pub fn return_vss(&mut self, inst: Analog<Vss<Input>>) {
self.vss.replace(inst);
}
/// Try to grab an instance of the onchip [TempSense] channel.
pub fn tempsense(&mut self) -> Result<Analog<TempSense<Input>>, Error> {
self.temp_sense.take().ok_or(Error::Moved)
}
/// Return the instance of [TempSense]
pub fn return_tempsense(&mut self, inst: Analog<TempSense<Input>>) {
self.temp_sense.replace(inst);
}
/// Try to grab an instance of the onchip [Bandgap] channel.
///
/// The bandgap reference is a fixed 1.16V (nom, Factory trimmed to +/-
/// 0.02V at Vdd=5.0 at 125C) signal that is available to the ADC Module.
/// It can be used as a voltage reference for the ACMP and as an [Analog]
/// channel that can be used to (roughly) check the VDD voltage
pub fn bandgap(&mut self) -> Result<Analog<Bandgap<Input>>, Error> {
self.bandgap.take().ok_or(Error::Moved)
}
/// Return the instance of [Bandgap]
pub fn return_bandgap(&mut self, inst: Analog<Bandgap<Input>>) {
self.bandgap.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference High ([VrefH]) channel.
pub fn vref_h(&mut self) -> Result<Analog<VrefH<Input>>, Error> {
self.vref_h.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefH]
pub fn return_vref_h(&mut self, inst: Analog<VrefH<Input>>) {
self.vref_h.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference Low ([VrefL]) channel.
pub fn vref_l(&mut self) -> Result<Analog<VrefL<Input>>, Error> {
self.vref_l.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefL]
pub fn return_vref_l(&mut self, inst: Analog<VrefL<Input>>) {
self.vref_l.replace(inst);
}
/// Grab a [DummyDisable] instance. Multiple Instances possible.
pub fn dummy_disable(&self) -> Analog<DummyDisable<Input>> {
Analog {
pin: DummyDisable::<Input> { _mode: PhantomData },
}
}
}
/// Holds On-Chip ADC Channel inputs and provides an interface to grab and return them.
// These have to have the Input dummy type to allow them to have the Channel
// trait.
pub struct OnChipChannels {
vss: Option<Analog<Vss<Input>>>,
temp_sense: Option<Analog<TempSense<Input>>>,
bandgap: Option<Analog<Bandgap<Input>>>,
vref_h: Option<Analog<VrefH<Input>>>,
vref_l: Option<Analog<VrefL<Input>>>,
}
/// Dummy type state for on-chip ADC input channels
pub struct Input;
/// Adc Input Channel, measures ground (should be 0?)
pub struct Vss<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, measures internal temperature sensor
pub struct TempSense<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Bandgap internal voltage reference
pub struct Bandgap<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, High
pub struct VrefH<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, Low
pub struct VrefL<Input> {
_mode: PhantomData<Input>,
}
/// Dummy Channel that temporarily disables the Adc Module.
pub struct DummyDisable<Input> {
_mode: PhantomData<Input>,
}
macro_rules! adc_input_channels {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> Channel<Adc<Enabled>> for Analog<$Pin<OldMode>> {
type ID = u8;
fn channel() -> u8 { $Chan }
}
)+
};
}
use crate::gpio::{gpioa::*, gpiob::*};
adc_input_channels! (
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
16_u8 => Vss,
22_u8 => TempSense,
23_u8 => Bandgap,
24_u8 => VrefH,
25_u8 => VrefL,
0x1F_u8 => DummyDisable,
);
macro_rules! impl_analog_pin {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> $Pin<OldMode> {
/// Convert Pin into the [Analog] state for use by the ADC.
///
/// This implementation provides the GPIO interface a method to
/// give an eligible pin to the ADC peripheral for conversion
/// into an Analog pin. This method is only implemented in
/// eligible pins. The ADC peripheral disables the GPIO and
/// PORT control over the pin and connects it to the ADC mux
/// (controlled by [Adc::set_channel].
///
/// Note: The [Analog::outof_analog] method must be used to
/// return the pin to a normal Input/Output typestate. The pin
/// will be returned in the same typestate as it was received.
pub fn into_analog(self) -> Analog<$Pin<OldMode>> {
unsafe {
(*ADC::ptr())
.apctl1
.modify(|r, w| w.adpc().bits(r.adpc().bits() | (1 << $Chan)));
}
Analog { pin: self }
}
}
impl<OldMode> Analog<$Pin<OldMode>> {
/// Return Analog state Pin to normal GPIO-state interface.
///
/// The Pin will be in the same state that it was when it
/// entered the Analog type state.
pub fn outof_analog(self) -> $Pin<OldMode> {
let adc = unsafe { &(*ADC::ptr()) };
adc.apctl1
.modify(|r, w| unsafe { w.adpc().bits(r.adpc().bits() & !(1 << $Chan)) });
self.pin
}
}
)+
};
}
impl_analog_pin!(
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
);
impl<Pin> OneShot<Adc<Enabled>, u16, Pin> for Adc<Enabled>
where
Pin: Channel<Adc<Enabled>, ID = u8>,
{
type Error = Infallible;
fn read(&mut self, pin: &mut Pin) -> nb::Result<u16, Self::Error> {
self.set_channel(pin);
while !self.is_done() {}
let ret_val = Ok(self.result());
let disable = self.onchip_channels.dummy_disable();
self.set_channel(&disable);
ret_val
}
} | // Don't start a conversion (set channel to DummyDisable)
self.peripheral.sc1.modify(|_, w| w.adch()._11111()); | random_line_split |
adc.rs | //! The ADC Interface
//!
//! The ADC is disabled at startup and must be enabled (by calling
//! [Adc<Disabled>::enable]) before any of its registers can be accessed
//! (read or write). Attempts to access these registers will trigger a hardware
//! generated HardFault, which by default resets the microcontroller.
//!
//! The ADC can be polled for conversion completion with [Adc::is_done].
//! Completion will trigger an ADC Interrupt if enabled. See
//! [Adc::into_interrupt]
//!
//! ## Input Modes
//!
//! The Adc peripheral can operate in either single input or FIFO modes. Single
//! input mode is the mode most commonly thought of when using an ADC. A
//! multiplexer (via Adc::set_channel) is used to connect a single channel to
//! the ADC, and when the conversion is complete the hardware makes the results
//! available in the results register. The software must call
//! [Adc::set_channel] again to either select a new channel or to restart the
//! conversion on the same channel.
//!
//! The FIFO mode sets up a hardware buffer of selectable depth (2-8 channels).
//! Once the buffer is filled the Adc peripheral shoves the buffer contents
//! into the multiplexer channel by channel. Likewise, as each conversion is
//! completed the results are buffered into the result register in the same
//! order as the channel select buffer.
//!
//! Note: FIFO mode is not yet implemented in this HAL
//!
//! ## Conversion Modes
//!
//! The Adc peripheral offers 2 conversion modes, OneShot and Continuous. In
//! OneShot mode, the conversion is started when the channel is selected (or
//! when the channel select buffer is filled in FIFO mode). After completion no
//! new conversion is started until the channel is set again, even if the same
//! channel is used.
//!
//! In Continuous mode a new conversion is started immediately
//! after the previous one is completed. Changing the channel interrupts the
//! conversion and immediately begins conversion on the new channel (unless the
//! new channel is [DummyDisable], then the conversion is allowed to complete,
//! but no new conversion is started). In FIFO mode the input FIFO is reloaded
//! after completion, in other words the same N values are converted on a loop.
//!
//! Note: Continuous mode is not yet implemented in this HAL
//!
//! ## Comparison Mode
//!
//! Note: Comparison mode is not yet implemented in this HAL
//!
//! Comparison mode is a hardware feature of the Adc Peripheral. If set, the
//! conversion result is compared to the comparison value. If the result
//! is greater than or less than (depending on configuration) the comparison
//! value the result is moved into the result register. Otherwise, the result
//! is discarded \[Note: Unsure if the conversion is restarted in OneShot
//! mode\].
//!
//! A common use case for comparison mode is to enter a low power state with
//! the Adc configured to use the asynchronous clock source and to generate an
//! interrupt on completion. When the input channel crosses the comparison
//! threshold the interrupt is triggered, waking the MCU.
//!
//! ## Clocking
//!
//! The ADC requires a clock signal (ADCK), which is generated from the bus
//! clock, the bus clock divided by 2, the output of the OSC peripheral
//! (OSC_OUT), or an internal asynchronous clock, which, when selected,
//! operates in wait and stop modes. With any of these clock sources a
//! multi-value divider is provided to further divide the incoming clock by 1
//! (i.e. 1:1), 2, 4, or 8.
//!
//! The clock frequency must fall within 400kHz to 8MHz (4MHz in low power
//! mode), This is the same for all KEA MCUs. Ideally, the HAL will only
//! present valid options, but that is not yet implemented (pending clocks
//! improvements to output frequencies). For now you are trusted to input the
//! correct frequency.
//!
//! *Note:* When using the FIFO mode with FIFO scan mode disabled, the bus
//! clock must be faster than half the ADC clock (ADCK). Bus clock >= ADCK / 2.
//!
//! ## Pin Control
//!
//! This functionality is implemented in the GPIO module. See [Analog]
//! for details.
//!
//! ## Conversion Width
//!
//! The ADC can be run in 8, 10, or 12 bit modes. These modes are enumerated in
//! [AdcResolution].
//!
//! ## Hardware Trigger
//!
//! The ADC conversions can be started by a hardware trigger. This is not
//! implemented in all KEA chips, so implementation here will be Delayed. Use
//! the PAC. Enable is ADC_SC2\[ADTRG\] = 1, and trigger is the ADHWT source.
//!
//! ## Usage
//!
//! ### AdcConfig struct
//!
//! [AdcConfig] offers public fields to allow for creation in-place. The
//! [AdcConfig::calculate_divisor] method allows the user to specify the
//! desired Adc Clock frequency (given the clock source frequency). The clock
//! divider which gets the closest to that frequency is chosen.
//!
//! The AdcConfig structure also implements the [Default] trait.
//!
//! ```rust
//! let config: AdcConfig = Default::default();
//!
//! config.calculate_divisor(20_u32.MHz(), 2_u32.MHz());
//! assert!(matches!(config.clock_divisor, ClockDivisor::_8));
//! ```
use crate::hal::adc::{Channel, OneShot};
use crate::{pac::ADC, HALExt};
use core::{convert::Infallible, marker::PhantomData};
use embedded_time::rate::*;
/// Error Enumeration for this module
#[derive(Debug)]
pub enum Error {
/// The Channel has already been moved
Moved,
}
/// Analog type state for a GPIO pin.
///
/// This mode "gives" the pin to the ADC hardware peripheral.
/// The ADC Peripheral can take the GPIO pins in any state. The Peripheral will
/// reconfigure the pin to turn off any output drivers, disable input buffers
/// (reading the pin after configuring as analog will return a zero), and
/// disable the pullup. Electrically, an Analog pin that is not currently under
/// conversion is effectively HighImpedence.
///
/// Once a pin is released from the ADC, it will return to its previous state.
/// The previous state includes output enabled, input enabled, pullup enabled,
/// and level (for outputs). Note to accomplish this the pin implements the
/// outof_analog method, which is semantically different from the other type
/// states.
///
/// For example, [crate::gpio::gpioa::PTA0] is configured to be a Output that is set high is
/// converted into the analog mode with the [crate::gpio::gpioa::PTA0::into_analog] method.
/// Once measurements from that pin are completed it will be returned to an
/// Output that is set high by calling the [Analog::outof_analog] method.
///
/// ```rust
/// let pta0 = gpioa.pta0.into_push_pull_output();
/// pta0.set_high();
/// let mut pta0 = pta0.into_analog(); // pta0 is hi-Z
/// let value = adc.read(&mut pta0).unwrap_or(0);
/// let pta0 = pta0.outof_analog(); // pta0 is push-pull output, set high.
/// ```
///
/// Note: This is a hardware feature that requires effectively no clock cycles
/// to complete. "Manually" reconfiguring the pins to HighImpedence before
/// calling into_analog() is discouraged, but it would not hurt anything.
pub struct Analog<Pin> {
pin: Pin,
}
/// Interface for ADC Peripheral.
///
/// Returned by calling [HALExt::split] on the pac [ADC] structure. Holds state
/// of peripheral.
pub struct Adc<State> {
peripheral: ADC,
_state: PhantomData<State>,
/// Contains the On-Chip ADC Channels, like the MCU's temperature sensor.
pub onchip_channels: OnChipChannels,
}
impl HALExt for ADC {
type T = Adc<Disabled>;
fn split(self) -> Adc<Disabled> {
Adc {
peripheral: self,
_state: PhantomData,
onchip_channels: OnChipChannels {
vss: Some(Analog {
pin: Vss::<Input> { _mode: PhantomData },
}),
temp_sense: Some(Analog {
pin: TempSense::<Input> { _mode: PhantomData },
}),
bandgap: Some(Analog {
pin: Bandgap::<Input> { _mode: PhantomData },
}),
vref_h: Some(Analog {
pin: VrefH::<Input> { _mode: PhantomData },
}),
vref_l: Some(Analog {
pin: VrefL::<Input> { _mode: PhantomData },
}),
},
}
}
}
/// Configuration struct for Adc peripheral.
pub struct AdcConfig {
/// Determines the clock source for the ADC peripheral
///
/// Default is [AdcClocks::Bus]
pub clock_source: AdcClocks,
/// Divides the clock source to get the ADC clock into it's usable range of
/// 400kHz - 8MHz (4MHz in low power mode).
///
/// Default is [ClockDivisor::_1] (no divison)
pub clock_divisor: ClockDivisor,
/// Set the resolution of ADC conversion
///
/// Default is [AdcResolution::_8bit]
pub resolution: AdcResolution,
/// Set ADC sample time.
///
/// Default is [AdcSampleTime::Short]
pub sample_time: AdcSampleTime,
/// Set low power mode
///
/// Default is false.
pub low_power: bool,
}
impl AdcConfig {
/// Calculate the ADC clock divisor
///
/// Uses the current clock source and clock frequency to determine
/// the best divisor to use in order to have minimal error between
/// the ADC clock rate and the desired ADC clock rate.
///
/// Note: This relies on trustworthy values for source_freq and valid
/// values for req_adc_freq. In the future this should know or
/// determine what the current clock frequency is instead of relying
/// on the user to provide it.
pub fn calculate_divisor(&mut self, source_freq: Hertz, req_adc_freq: Hertz) {
let denom: u8 = (source_freq.integer() / req_adc_freq.integer()) as u8;
let mut output: u8 = 1;
let mut err: i8 = (denom - output) as i8;
let mut err_old: i8 = err;
let max_divisor = match self.clock_source {
AdcClocks::Bus => 16,
_ => 8,
};
while output < max_divisor {
err = (denom - (output << 1)) as i8;
if err.is_negative() {
err = err.abs();
}
if err <= err_old {
output <<= 1;
err_old = err;
} else {
break;
}
}
// I am of the mind that this assert is okay, at least until the input
// clock can be known at compile time.
let ad_clock = source_freq.integer() / output as u32;
assert!(400_000 <= ad_clock);
assert!(
ad_clock
<= match self.low_power {
false => 8_000_000,
true => 4_000_000,
}
);
self.clock_divisor = match output {
1 => ClockDivisor::_1,
2 => ClockDivisor::_2,
4 => ClockDivisor::_4,
8 => ClockDivisor::_8,
_ => ClockDivisor::_16,
}
}
/// Set the divisor directly. panics if divisor isn't supported by the
/// clock source.
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_divisor(&mut self, divisor: ClockDivisor) {
// divisor can't be 16 unless using the Bus clock
assert!(
!(!matches!(self.clock_source, AdcClocks::Bus) && matches!(divisor, ClockDivisor::_16))
);
self.clock_divisor = divisor;
}
/// Sets the clock source, panics if divisor isn't supported
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_clock_source(&mut self, clock: AdcClocks) {
// Panic if setting the clock to anything other than Bus if the divisor
// is set to 16
assert!(
!matches!(clock, AdcClocks::Bus) && matches!(self.clock_divisor, ClockDivisor::_16)
);
self.clock_source = clock;
}
}
impl Default for AdcConfig {
fn default() -> AdcConfig {
AdcConfig {
clock_source: AdcClocks::Bus,
clock_divisor: ClockDivisor::_1,
resolution: AdcResolution::_12bit,
sample_time: AdcSampleTime::Short,
low_power: false,
}
}
}
/// Clock types available to the Adc peripheral
///
/// Dividers will be chosen appropriately to suit requested clock rate.
pub enum AdcClocks {
/// Use the incoming Bus Clock
Bus,
/// jkl
External,
/// Available in Wait AND Stop Mode
Async,
}
/// This enum represents the availabe ADC resolutions
///
/// Regardless of resolution chosen, results are always right justified
#[repr(u8)]
pub enum AdcResolution {
/// 8 bit AD conversion mode
_8bit = 0,
/// 10 bit AD conversion mode
_10bit = 1,
/// 12 bit AD conversion mode
_12bit = 2,
}
/// Adc sample time
pub enum AdcSampleTime {
/// Sample for 3.5 ADC clock (ADCK) cycles.
Short = 0,
/// Sample for 23.5 ADC clock (ADCK) cycles.
///
/// Required for high impedence (>2k @ADCK > 4MHz, >5k @ ADCK < 4MHz)
/// inputs.
Long = 1,
}
/// Adc Clock Divisors
///
/// Note 1/16 divisor is only usable for the Bus clock
pub enum ClockDivisor {
/// Source / 1, No divison
_1 = 0,
/// Source / 2
_2 = 1,
/// Source / 4
_4 = 2,
/// Source / 8
_8 = 3,
/// Source / 16
_16 = 4,
}
/// Enabled state
pub struct Enabled;
/// Disabled state
pub struct Disabled;
impl Adc<Enabled> {
/// Poll to determine if ADC conversion is complete.
///
/// Note: This flag is cleared when the sampling mode is changed,
/// interrupts are enabled, [Adc::set_channel] is called, and when [Adc::result] is
/// called (including [Adc::try_result])
pub fn is_done(&self) -> bool {
self.peripheral.sc1.read().coco().bit()
}
/// Poll to determine if ADC conversion is underway
pub fn is_converting(&self) -> bool {
self.peripheral.sc2.read().adact().bit()
}
/// Grab the last ADC conversion result.
pub fn result(&self) -> u16 {
self.peripheral.r.read().adr().bits()
}
/// Poll for conversion completion, if done return the result.
pub fn try_result(&self) -> Option<u16> {
if self.is_done() {
Some(self.result())
} else {
None
}
}
/// Set ADC target channel.
///
/// In Single conversion mode (OneShot), setting the channel begins the conversion. In FIFO mode
/// the channel is added to the FIFO buffer.
///
/// Note: If the channel is changed while a conversion is in progress the
/// current conversion will be cancelled. If in FIFO mode, conversion will
/// resume once the FIFO channels are refilled.
pub fn set_channel<T: Channel<Adc<Enabled>, ID = u8>>(&self, _pin: &T) {
self.peripheral
.sc1
.modify(|_, w| unsafe { w.adch().bits(T::channel()) });
}
/// Set the ADC's configuration
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.peripheral.sc3.modify(|_, w| {
use pac::adc::sc3::{ADICLK_A, ADIV_A, ADLSMP_A, MODE_A};
w.adiclk()
.variant(match config.clock_source {
AdcClocks::Bus =>
// If divisor is 16, use the Bus / 2 clock source, else use
// the 1:1 Bus clock source
{
match config.clock_divisor {
ClockDivisor::_16 => ADICLK_A::_01,
_ => ADICLK_A::_00,
}
}
AdcClocks::External => ADICLK_A::_10,
AdcClocks::Async => ADICLK_A::_11,
})
.mode()
.variant(match config.resolution {
AdcResolution::_8bit => MODE_A::_00,
AdcResolution::_10bit => MODE_A::_01,
AdcResolution::_12bit => MODE_A::_10,
})
.adlsmp()
.variant(match config.sample_time {
AdcSampleTime::Short => ADLSMP_A::_0,
AdcSampleTime::Long => ADLSMP_A::_1,
})
.adiv()
.variant(match config.clock_divisor {
ClockDivisor::_1 => ADIV_A::_00,
ClockDivisor::_2 => ADIV_A::_01,
ClockDivisor::_4 => ADIV_A::_10,
_ => ADIV_A::_11,
})
.adlpc()
.bit(config.low_power)
});
// It looks like SCGC has to be set before touching the peripheral
// at all, else hardfault. Go back later to confirm that if using external clock
// scgc can be cleared.
// w.adc().variant(match config.clock_source {
// AdcClocks::Bus => ADC_A::_1,
// _ => ADC_A::_0,
// })
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
}
impl Adc<Disabled> {
/// Connects the bus clock to the adc via the SIM peripheral, allowing
/// read and write access to ADC registers.
///
/// Any attempt to access ADC registers while disabled results in a
/// HardFault, generated by hardware.
///
/// This also enables the bandgap voltage reference.
pub fn enable(self) -> Adc<Enabled> {
cortex_m::interrupt::free(|_| {
unsafe { &(*pac::SIM::ptr()) }.scgc.modify(|_, w| {
use pac::sim::scgc::ADC_A;
w.adc().variant(ADC_A::_1)
});
// Don't start a conversion (set channel to DummyDisable)
self.peripheral.sc1.modify(|_, w| w.adch()._11111());
// Bandgap. Grab directly, Currently the bandgap isn't implemented
// in [system::PMC]. We will eventually have to pass in the pmc
// peripheral handle as a variable.
unsafe { &(*pac::PMC::ptr()) }
.spmsc1
.modify(|_, w| w.bgbe()._1());
});
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
/// Set the ADC's configuration
///
/// This is a sugar method for calling [Adc<Disabled>::enable] followed by
/// [Adc<Enabled>::configure]
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.enable().configure(config)
}
}
impl<Mode> Adc<Mode> {
/// Not Implemented
pub fn into_interrupt(self) -> Adc<Mode> {
unimplemented!("Interrupt is not yet implemented");
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
}
/// Not Implemented
pub fn into_fifo(self, _depth: u8) -> Adc<Mode> {
// self.peripheral
// .sc4
// .modify(|_r, w| w.afdep().bits(depth & 0x7));
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
unimplemented!("FIFO is not yet implemented");
}
/// Not Implemented
pub fn into_continuous(self) -> Adc<Mode> {
unimplemented!("Continuous Conversion mode not yet implemented");
}
}
impl OnChipChannels {
/// Request an instance of an on-chip [Vss] channel.
pub fn vss(&mut self) -> Result<Analog<Vss<Input>>, Error> {
self.vss.take().ok_or(Error::Moved)
}
/// Return the instance of [Vss]
pub fn return_vss(&mut self, inst: Analog<Vss<Input>>) {
self.vss.replace(inst);
}
/// Try to grab an instance of the onchip [TempSense] channel.
pub fn tempsense(&mut self) -> Result<Analog<TempSense<Input>>, Error> {
self.temp_sense.take().ok_or(Error::Moved)
}
/// Return the instance of [TempSense]
pub fn return_tempsense(&mut self, inst: Analog<TempSense<Input>>) {
self.temp_sense.replace(inst);
}
/// Try to grab an instance of the onchip [Bandgap] channel.
///
/// The bandgap reference is a fixed 1.16V (nom, Factory trimmed to +/-
/// 0.02V at Vdd=5.0 at 125C) signal that is available to the ADC Module.
/// It can be used as a voltage reference for the ACMP and as an [Analog]
/// channel that can be used to (roughly) check the VDD voltage
pub fn bandgap(&mut self) -> Result<Analog<Bandgap<Input>>, Error> {
self.bandgap.take().ok_or(Error::Moved)
}
/// Return the instance of [Bandgap]
pub fn return_bandgap(&mut self, inst: Analog<Bandgap<Input>>) |
/// Try to grab an instance of the onchip Voltage Reference High ([VrefH]) channel.
pub fn vref_h(&mut self) -> Result<Analog<VrefH<Input>>, Error> {
self.vref_h.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefH]
pub fn return_vref_h(&mut self, inst: Analog<VrefH<Input>>) {
self.vref_h.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference Low ([VrefL]) channel.
pub fn vref_l(&mut self) -> Result<Analog<VrefL<Input>>, Error> {
self.vref_l.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefL]
pub fn return_vref_l(&mut self, inst: Analog<VrefL<Input>>) {
self.vref_l.replace(inst);
}
/// Grab a [DummyDisable] instance. Multiple Instances possible.
pub fn dummy_disable(&self) -> Analog<DummyDisable<Input>> {
Analog {
pin: DummyDisable::<Input> { _mode: PhantomData },
}
}
}
/// Holds On-Chip ADC Channel inputs and provides an interface to grab and return them.
// These have to have the Input dummy type to allow them to have the Channel
// trait.
pub struct OnChipChannels {
vss: Option<Analog<Vss<Input>>>,
temp_sense: Option<Analog<TempSense<Input>>>,
bandgap: Option<Analog<Bandgap<Input>>>,
vref_h: Option<Analog<VrefH<Input>>>,
vref_l: Option<Analog<VrefL<Input>>>,
}
/// Dummy type state for on-chip ADC input channels
pub struct Input;
/// Adc Input Channel, measures ground (should be 0?)
pub struct Vss<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, measures internal temperature sensor
pub struct TempSense<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Bandgap internal voltage reference
pub struct Bandgap<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, High
pub struct VrefH<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, Low
pub struct VrefL<Input> {
_mode: PhantomData<Input>,
}
/// Dummy Channel that temporarily disables the Adc Module.
pub struct DummyDisable<Input> {
_mode: PhantomData<Input>,
}
macro_rules! adc_input_channels {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> Channel<Adc<Enabled>> for Analog<$Pin<OldMode>> {
type ID = u8;
fn channel() -> u8 { $Chan }
}
)+
};
}
use crate::gpio::{gpioa::*, gpiob::*};
adc_input_channels! (
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
16_u8 => Vss,
22_u8 => TempSense,
23_u8 => Bandgap,
24_u8 => VrefH,
25_u8 => VrefL,
0x1F_u8 => DummyDisable,
);
macro_rules! impl_analog_pin {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> $Pin<OldMode> {
/// Convert Pin into the [Analog] state for use by the ADC.
///
/// This implementation provides the GPIO interface a method to
/// give an eligible pin to the ADC peripheral for conversion
/// into an Analog pin. This method is only implemented in
/// eligible pins. The ADC peripheral disables the GPIO and
/// PORT control over the pin and connects it to the ADC mux
/// (controlled by [Adc::set_channel].
///
/// Note: The [Analog::outof_analog] method must be used to
/// return the pin to a normal Input/Output typestate. The pin
/// will be returned in the same typestate as it was received.
pub fn into_analog(self) -> Analog<$Pin<OldMode>> {
unsafe {
(*ADC::ptr())
.apctl1
.modify(|r, w| w.adpc().bits(r.adpc().bits() | (1 << $Chan)));
}
Analog { pin: self }
}
}
impl<OldMode> Analog<$Pin<OldMode>> {
/// Return Analog state Pin to normal GPIO-state interface.
///
/// The Pin will be in the same state that it was when it
/// entered the Analog type state.
pub fn outof_analog(self) -> $Pin<OldMode> {
let adc = unsafe { &(*ADC::ptr()) };
adc.apctl1
.modify(|r, w| unsafe { w.adpc().bits(r.adpc().bits() & !(1 << $Chan)) });
self.pin
}
}
)+
};
}
impl_analog_pin!(
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
);
impl<Pin> OneShot<Adc<Enabled>, u16, Pin> for Adc<Enabled>
where
Pin: Channel<Adc<Enabled>, ID = u8>,
{
type Error = Infallible;
fn read(&mut self, pin: &mut Pin) -> nb::Result<u16, Self::Error> {
self.set_channel(pin);
while !self.is_done() {}
let ret_val = Ok(self.result());
let disable = self.onchip_channels.dummy_disable();
self.set_channel(&disable);
ret_val
}
}
| {
self.bandgap.replace(inst);
} | identifier_body |
adc.rs | //! The ADC Interface
//!
//! The ADC is disabled at startup and must be enabled (by calling
//! [Adc<Disabled>::enable]) before any of its registers can be accessed
//! (read or write). Attempts to access these registers will trigger a hardware
//! generated HardFault, which by default resets the microcontroller.
//!
//! The ADC can be polled for conversion completion with [Adc::is_done].
//! Completion will trigger an ADC Interrupt if enabled. See
//! [Adc::into_interrupt]
//!
//! ## Input Modes
//!
//! The Adc peripheral can operate in either single input or FIFO modes. Single
//! input mode is the mode most commonly thought of when using an ADC. A
//! multiplexer (via Adc::set_channel) is used to connect a single channel to
//! the ADC, and when the conversion is complete the hardware makes the results
//! available in the results register. The software must call
//! [Adc::set_channel] again to either select a new channel or to restart the
//! conversion on the same channel.
//!
//! The FIFO mode sets up a hardware buffer of selectable depth (2-8 channels).
//! Once the buffer is filled the Adc peripheral shoves the buffer contents
//! into the multiplexer channel by channel. Likewise, as each conversion is
//! completed the results are buffered into the result register in the same
//! order as the channel select buffer.
//!
//! Note: FIFO mode is not yet implemented in this HAL
//!
//! ## Conversion Modes
//!
//! The Adc peripheral offers 2 conversion modes, OneShot and Continuous. In
//! OneShot mode, the conversion is started when the channel is selected (or
//! when the channel select buffer is filled in FIFO mode). After completion no
//! new conversion is started until the channel is set again, even if the same
//! channel is used.
//!
//! In Continuous mode a new conversion is started immediately
//! after the previous one is completed. Changing the channel interrupts the
//! conversion and immediately begins conversion on the new channel (unless the
//! new channel is [DummyDisable], then the conversion is allowed to complete,
//! but no new conversion is started). In FIFO mode the input FIFO is reloaded
//! after completion, in other words the same N values are converted on a loop.
//!
//! Note: Continuous mode is not yet implemented in this HAL
//!
//! ## Comparison Mode
//!
//! Note: Comparison mode is not yet implemented in this HAL
//!
//! Comparison mode is a hardware feature of the Adc Peripheral. If set, the
//! conversion result is compared to the comparison value. If the result
//! is greater than or less than (depending on configuration) the comparison
//! value the result is moved into the result register. Otherwise, the result
//! is discarded \[Note: Unsure if the conversion is restarted in OneShot
//! mode\].
//!
//! A common use case for comparison mode is to enter a low power state with
//! the Adc configured to use the asynchronous clock source and to generate an
//! interrupt on completion. When the input channel crosses the comparison
//! threshold the interrupt is triggered, waking the MCU.
//!
//! ## Clocking
//!
//! The ADC requires a clock signal (ADCK), which is generated from the bus
//! clock, the bus clock divided by 2, the output of the OSC peripheral
//! (OSC_OUT), or an internal asynchronous clock, which, when selected,
//! operates in wait and stop modes. With any of these clock sources a
//! multi-value divider is provided to further divide the incoming clock by 1
//! (i.e. 1:1), 2, 4, or 8.
//!
//! The clock frequency must fall within 400kHz to 8MHz (4MHz in low power
//! mode), This is the same for all KEA MCUs. Ideally, the HAL will only
//! present valid options, but that is not yet implemented (pending clocks
//! improvements to output frequencies). For now you are trusted to input the
//! correct frequency.
//!
//! *Note:* When using the FIFO mode with FIFO scan mode disabled, the bus
//! clock must be faster than half the ADC clock (ADCK). Bus clock >= ADCK / 2.
//!
//! ## Pin Control
//!
//! This functionality is implemented in the GPIO module. See [Analog]
//! for details.
//!
//! ## Conversion Width
//!
//! The ADC can be run in 8, 10, or 12 bit modes. These modes are enumerated in
//! [AdcResolution].
//!
//! ## Hardware Trigger
//!
//! The ADC conversions can be started by a hardware trigger. This is not
//! implemented in all KEA chips, so implementation here will be Delayed. Use
//! the PAC. Enable is ADC_SC2\[ADTRG\] = 1, and trigger is the ADHWT source.
//!
//! ## Usage
//!
//! ### AdcConfig struct
//!
//! [AdcConfig] offers public fields to allow for creation in-place. The
//! [AdcConfig::calculate_divisor] method allows the user to specify the
//! desired Adc Clock frequency (given the clock source frequency). The clock
//! divider which gets the closest to that frequency is chosen.
//!
//! The AdcConfig structure also implements the [Default] trait.
//!
//! ```rust
//! let config: AdcConfig = Default::default();
//!
//! config.calculate_divisor(20_u32.MHz(), 2_u32.MHz());
//! assert!(matches!(config.clock_divisor, ClockDivisor::_8));
//! ```
use crate::hal::adc::{Channel, OneShot};
use crate::{pac::ADC, HALExt};
use core::{convert::Infallible, marker::PhantomData};
use embedded_time::rate::*;
/// Error Enumeration for this module
#[derive(Debug)]
pub enum Error {
/// The Channel has already been moved
Moved,
}
/// Analog type state for a GPIO pin.
///
/// This mode "gives" the pin to the ADC hardware peripheral.
/// The ADC Peripheral can take the GPIO pins in any state. The Peripheral will
/// reconfigure the pin to turn off any output drivers, disable input buffers
/// (reading the pin after configuring as analog will return a zero), and
/// disable the pullup. Electrically, an Analog pin that is not currently under
/// conversion is effectively HighImpedence.
///
/// Once a pin is released from the ADC, it will return to its previous state.
/// The previous state includes output enabled, input enabled, pullup enabled,
/// and level (for outputs). Note to accomplish this the pin implements the
/// outof_analog method, which is semantically different from the other type
/// states.
///
/// For example, [crate::gpio::gpioa::PTA0] is configured to be a Output that is set high is
/// converted into the analog mode with the [crate::gpio::gpioa::PTA0::into_analog] method.
/// Once measurements from that pin are completed it will be returned to an
/// Output that is set high by calling the [Analog::outof_analog] method.
///
/// ```rust
/// let pta0 = gpioa.pta0.into_push_pull_output();
/// pta0.set_high();
/// let mut pta0 = pta0.into_analog(); // pta0 is hi-Z
/// let value = adc.read(&mut pta0).unwrap_or(0);
/// let pta0 = pta0.outof_analog(); // pta0 is push-pull output, set high.
/// ```
///
/// Note: This is a hardware feature that requires effectively no clock cycles
/// to complete. "Manually" reconfiguring the pins to HighImpedence before
/// calling into_analog() is discouraged, but it would not hurt anything.
pub struct Analog<Pin> {
pin: Pin,
}
/// Interface for ADC Peripheral.
///
/// Returned by calling [HALExt::split] on the pac [ADC] structure. Holds state
/// of peripheral.
pub struct Adc<State> {
peripheral: ADC,
_state: PhantomData<State>,
/// Contains the On-Chip ADC Channels, like the MCU's temperature sensor.
pub onchip_channels: OnChipChannels,
}
impl HALExt for ADC {
type T = Adc<Disabled>;
fn split(self) -> Adc<Disabled> {
Adc {
peripheral: self,
_state: PhantomData,
onchip_channels: OnChipChannels {
vss: Some(Analog {
pin: Vss::<Input> { _mode: PhantomData },
}),
temp_sense: Some(Analog {
pin: TempSense::<Input> { _mode: PhantomData },
}),
bandgap: Some(Analog {
pin: Bandgap::<Input> { _mode: PhantomData },
}),
vref_h: Some(Analog {
pin: VrefH::<Input> { _mode: PhantomData },
}),
vref_l: Some(Analog {
pin: VrefL::<Input> { _mode: PhantomData },
}),
},
}
}
}
/// Configuration struct for Adc peripheral.
pub struct AdcConfig {
/// Determines the clock source for the ADC peripheral
///
/// Default is [AdcClocks::Bus]
pub clock_source: AdcClocks,
/// Divides the clock source to get the ADC clock into it's usable range of
/// 400kHz - 8MHz (4MHz in low power mode).
///
/// Default is [ClockDivisor::_1] (no divison)
pub clock_divisor: ClockDivisor,
/// Set the resolution of ADC conversion
///
/// Default is [AdcResolution::_8bit]
pub resolution: AdcResolution,
/// Set ADC sample time.
///
/// Default is [AdcSampleTime::Short]
pub sample_time: AdcSampleTime,
/// Set low power mode
///
/// Default is false.
pub low_power: bool,
}
impl AdcConfig {
/// Calculate the ADC clock divisor
///
/// Uses the current clock source and clock frequency to determine
/// the best divisor to use in order to have minimal error between
/// the ADC clock rate and the desired ADC clock rate.
///
/// Note: This relies on trustworthy values for source_freq and valid
/// values for req_adc_freq. In the future this should know or
/// determine what the current clock frequency is instead of relying
/// on the user to provide it.
pub fn calculate_divisor(&mut self, source_freq: Hertz, req_adc_freq: Hertz) {
let denom: u8 = (source_freq.integer() / req_adc_freq.integer()) as u8;
let mut output: u8 = 1;
let mut err: i8 = (denom - output) as i8;
let mut err_old: i8 = err;
let max_divisor = match self.clock_source {
AdcClocks::Bus => 16,
_ => 8,
};
while output < max_divisor {
err = (denom - (output << 1)) as i8;
if err.is_negative() {
err = err.abs();
}
if err <= err_old {
output <<= 1;
err_old = err;
} else {
break;
}
}
// I am of the mind that this assert is okay, at least until the input
// clock can be known at compile time.
let ad_clock = source_freq.integer() / output as u32;
assert!(400_000 <= ad_clock);
assert!(
ad_clock
<= match self.low_power {
false => 8_000_000,
true => 4_000_000,
}
);
self.clock_divisor = match output {
1 => ClockDivisor::_1,
2 => ClockDivisor::_2,
4 => ClockDivisor::_4,
8 => ClockDivisor::_8,
_ => ClockDivisor::_16,
}
}
/// Set the divisor directly. panics if divisor isn't supported by the
/// clock source.
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_divisor(&mut self, divisor: ClockDivisor) {
// divisor can't be 16 unless using the Bus clock
assert!(
!(!matches!(self.clock_source, AdcClocks::Bus) && matches!(divisor, ClockDivisor::_16))
);
self.clock_divisor = divisor;
}
/// Sets the clock source, panics if divisor isn't supported
///
/// TODO: Refactor to remove assert. Add Clock Source as a type state
pub fn set_clock_source(&mut self, clock: AdcClocks) {
// Panic if setting the clock to anything other than Bus if the divisor
// is set to 16
assert!(
!matches!(clock, AdcClocks::Bus) && matches!(self.clock_divisor, ClockDivisor::_16)
);
self.clock_source = clock;
}
}
impl Default for AdcConfig {
fn default() -> AdcConfig {
AdcConfig {
clock_source: AdcClocks::Bus,
clock_divisor: ClockDivisor::_1,
resolution: AdcResolution::_12bit,
sample_time: AdcSampleTime::Short,
low_power: false,
}
}
}
/// Clock types available to the Adc peripheral
///
/// Dividers will be chosen appropriately to suit requested clock rate.
pub enum AdcClocks {
/// Use the incoming Bus Clock
Bus,
/// jkl
External,
/// Available in Wait AND Stop Mode
Async,
}
/// This enum represents the availabe ADC resolutions
///
/// Regardless of resolution chosen, results are always right justified
#[repr(u8)]
pub enum AdcResolution {
/// 8 bit AD conversion mode
_8bit = 0,
/// 10 bit AD conversion mode
_10bit = 1,
/// 12 bit AD conversion mode
_12bit = 2,
}
/// Adc sample time
pub enum AdcSampleTime {
/// Sample for 3.5 ADC clock (ADCK) cycles.
Short = 0,
/// Sample for 23.5 ADC clock (ADCK) cycles.
///
/// Required for high impedence (>2k @ADCK > 4MHz, >5k @ ADCK < 4MHz)
/// inputs.
Long = 1,
}
/// Adc Clock Divisors
///
/// Note 1/16 divisor is only usable for the Bus clock
pub enum ClockDivisor {
/// Source / 1, No divison
_1 = 0,
/// Source / 2
_2 = 1,
/// Source / 4
_4 = 2,
/// Source / 8
_8 = 3,
/// Source / 16
_16 = 4,
}
/// Enabled state
pub struct Enabled;
/// Disabled state
pub struct Disabled;
impl Adc<Enabled> {
/// Poll to determine if ADC conversion is complete.
///
/// Note: This flag is cleared when the sampling mode is changed,
/// interrupts are enabled, [Adc::set_channel] is called, and when [Adc::result] is
/// called (including [Adc::try_result])
pub fn is_done(&self) -> bool {
self.peripheral.sc1.read().coco().bit()
}
/// Poll to determine if ADC conversion is underway
pub fn is_converting(&self) -> bool {
self.peripheral.sc2.read().adact().bit()
}
/// Grab the last ADC conversion result.
pub fn result(&self) -> u16 {
self.peripheral.r.read().adr().bits()
}
/// Poll for conversion completion, if done return the result.
pub fn try_result(&self) -> Option<u16> {
if self.is_done() {
Some(self.result())
} else |
}
/// Set ADC target channel.
///
/// In Single conversion mode (OneShot), setting the channel begins the conversion. In FIFO mode
/// the channel is added to the FIFO buffer.
///
/// Note: If the channel is changed while a conversion is in progress the
/// current conversion will be cancelled. If in FIFO mode, conversion will
/// resume once the FIFO channels are refilled.
pub fn set_channel<T: Channel<Adc<Enabled>, ID = u8>>(&self, _pin: &T) {
self.peripheral
.sc1
.modify(|_, w| unsafe { w.adch().bits(T::channel()) });
}
/// Set the ADC's configuration
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.peripheral.sc3.modify(|_, w| {
use pac::adc::sc3::{ADICLK_A, ADIV_A, ADLSMP_A, MODE_A};
w.adiclk()
.variant(match config.clock_source {
AdcClocks::Bus =>
// If divisor is 16, use the Bus / 2 clock source, else use
// the 1:1 Bus clock source
{
match config.clock_divisor {
ClockDivisor::_16 => ADICLK_A::_01,
_ => ADICLK_A::_00,
}
}
AdcClocks::External => ADICLK_A::_10,
AdcClocks::Async => ADICLK_A::_11,
})
.mode()
.variant(match config.resolution {
AdcResolution::_8bit => MODE_A::_00,
AdcResolution::_10bit => MODE_A::_01,
AdcResolution::_12bit => MODE_A::_10,
})
.adlsmp()
.variant(match config.sample_time {
AdcSampleTime::Short => ADLSMP_A::_0,
AdcSampleTime::Long => ADLSMP_A::_1,
})
.adiv()
.variant(match config.clock_divisor {
ClockDivisor::_1 => ADIV_A::_00,
ClockDivisor::_2 => ADIV_A::_01,
ClockDivisor::_4 => ADIV_A::_10,
_ => ADIV_A::_11,
})
.adlpc()
.bit(config.low_power)
});
// It looks like SCGC has to be set before touching the peripheral
// at all, else hardfault. Go back later to confirm that if using external clock
// scgc can be cleared.
// w.adc().variant(match config.clock_source {
// AdcClocks::Bus => ADC_A::_1,
// _ => ADC_A::_0,
// })
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
}
impl Adc<Disabled> {
/// Connects the bus clock to the adc via the SIM peripheral, allowing
/// read and write access to ADC registers.
///
/// Any attempt to access ADC registers while disabled results in a
/// HardFault, generated by hardware.
///
/// This also enables the bandgap voltage reference.
pub fn enable(self) -> Adc<Enabled> {
cortex_m::interrupt::free(|_| {
unsafe { &(*pac::SIM::ptr()) }.scgc.modify(|_, w| {
use pac::sim::scgc::ADC_A;
w.adc().variant(ADC_A::_1)
});
// Don't start a conversion (set channel to DummyDisable)
self.peripheral.sc1.modify(|_, w| w.adch()._11111());
// Bandgap. Grab directly, Currently the bandgap isn't implemented
// in [system::PMC]. We will eventually have to pass in the pmc
// peripheral handle as a variable.
unsafe { &(*pac::PMC::ptr()) }
.spmsc1
.modify(|_, w| w.bgbe()._1());
});
Adc {
peripheral: self.peripheral,
_state: PhantomData,
onchip_channels: self.onchip_channels,
}
}
/// Set the ADC's configuration
///
/// This is a sugar method for calling [Adc<Disabled>::enable] followed by
/// [Adc<Enabled>::configure]
pub fn configure(self, config: AdcConfig) -> Adc<Enabled> {
self.enable().configure(config)
}
}
impl<Mode> Adc<Mode> {
/// Not Implemented
pub fn into_interrupt(self) -> Adc<Mode> {
unimplemented!("Interrupt is not yet implemented");
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
}
/// Not Implemented
pub fn into_fifo(self, _depth: u8) -> Adc<Mode> {
// self.peripheral
// .sc4
// .modify(|_r, w| w.afdep().bits(depth & 0x7));
// Adc::<Mode> {
// peripheral: self.peripheral,
// _state: PhantomData,
// onchip_channels: self.onchip_channels,
// }
unimplemented!("FIFO is not yet implemented");
}
/// Not Implemented
pub fn into_continuous(self) -> Adc<Mode> {
unimplemented!("Continuous Conversion mode not yet implemented");
}
}
impl OnChipChannels {
/// Request an instance of an on-chip [Vss] channel.
pub fn vss(&mut self) -> Result<Analog<Vss<Input>>, Error> {
self.vss.take().ok_or(Error::Moved)
}
/// Return the instance of [Vss]
pub fn return_vss(&mut self, inst: Analog<Vss<Input>>) {
self.vss.replace(inst);
}
/// Try to grab an instance of the onchip [TempSense] channel.
pub fn tempsense(&mut self) -> Result<Analog<TempSense<Input>>, Error> {
self.temp_sense.take().ok_or(Error::Moved)
}
/// Return the instance of [TempSense]
pub fn return_tempsense(&mut self, inst: Analog<TempSense<Input>>) {
self.temp_sense.replace(inst);
}
/// Try to grab an instance of the onchip [Bandgap] channel.
///
/// The bandgap reference is a fixed 1.16V (nom, Factory trimmed to +/-
/// 0.02V at Vdd=5.0 at 125C) signal that is available to the ADC Module.
/// It can be used as a voltage reference for the ACMP and as an [Analog]
/// channel that can be used to (roughly) check the VDD voltage
pub fn bandgap(&mut self) -> Result<Analog<Bandgap<Input>>, Error> {
self.bandgap.take().ok_or(Error::Moved)
}
/// Return the instance of [Bandgap]
pub fn return_bandgap(&mut self, inst: Analog<Bandgap<Input>>) {
self.bandgap.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference High ([VrefH]) channel.
pub fn vref_h(&mut self) -> Result<Analog<VrefH<Input>>, Error> {
self.vref_h.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefH]
pub fn return_vref_h(&mut self, inst: Analog<VrefH<Input>>) {
self.vref_h.replace(inst);
}
/// Try to grab an instance of the onchip Voltage Reference Low ([VrefL]) channel.
pub fn vref_l(&mut self) -> Result<Analog<VrefL<Input>>, Error> {
self.vref_l.take().ok_or(Error::Moved)
}
/// Return the instance of [VrefL]
pub fn return_vref_l(&mut self, inst: Analog<VrefL<Input>>) {
self.vref_l.replace(inst);
}
/// Grab a [DummyDisable] instance. Multiple Instances possible.
pub fn dummy_disable(&self) -> Analog<DummyDisable<Input>> {
Analog {
pin: DummyDisable::<Input> { _mode: PhantomData },
}
}
}
/// Holds On-Chip ADC Channel inputs and provides an interface to grab and return them.
// These have to have the Input dummy type to allow them to have the Channel
// trait.
pub struct OnChipChannels {
vss: Option<Analog<Vss<Input>>>,
temp_sense: Option<Analog<TempSense<Input>>>,
bandgap: Option<Analog<Bandgap<Input>>>,
vref_h: Option<Analog<VrefH<Input>>>,
vref_l: Option<Analog<VrefL<Input>>>,
}
/// Dummy type state for on-chip ADC input channels
pub struct Input;
/// Adc Input Channel, measures ground (should be 0?)
pub struct Vss<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, measures internal temperature sensor
pub struct TempSense<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Bandgap internal voltage reference
pub struct Bandgap<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, High
pub struct VrefH<Input> {
_mode: PhantomData<Input>,
}
/// Adc Input Channel, Voltage Reference, Low
pub struct VrefL<Input> {
_mode: PhantomData<Input>,
}
/// Dummy Channel that temporarily disables the Adc Module.
pub struct DummyDisable<Input> {
_mode: PhantomData<Input>,
}
macro_rules! adc_input_channels {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> Channel<Adc<Enabled>> for Analog<$Pin<OldMode>> {
type ID = u8;
fn channel() -> u8 { $Chan }
}
)+
};
}
use crate::gpio::{gpioa::*, gpiob::*};
adc_input_channels! (
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
16_u8 => Vss,
22_u8 => TempSense,
23_u8 => Bandgap,
24_u8 => VrefH,
25_u8 => VrefL,
0x1F_u8 => DummyDisable,
);
macro_rules! impl_analog_pin {
( $($Chan:expr => $Pin:ident),+ $(,)*) => {
$(
impl<OldMode> $Pin<OldMode> {
/// Convert Pin into the [Analog] state for use by the ADC.
///
/// This implementation provides the GPIO interface a method to
/// give an eligible pin to the ADC peripheral for conversion
/// into an Analog pin. This method is only implemented in
/// eligible pins. The ADC peripheral disables the GPIO and
/// PORT control over the pin and connects it to the ADC mux
/// (controlled by [Adc::set_channel].
///
/// Note: The [Analog::outof_analog] method must be used to
/// return the pin to a normal Input/Output typestate. The pin
/// will be returned in the same typestate as it was received.
pub fn into_analog(self) -> Analog<$Pin<OldMode>> {
unsafe {
(*ADC::ptr())
.apctl1
.modify(|r, w| w.adpc().bits(r.adpc().bits() | (1 << $Chan)));
}
Analog { pin: self }
}
}
impl<OldMode> Analog<$Pin<OldMode>> {
/// Return Analog state Pin to normal GPIO-state interface.
///
/// The Pin will be in the same state that it was when it
/// entered the Analog type state.
pub fn outof_analog(self) -> $Pin<OldMode> {
let adc = unsafe { &(*ADC::ptr()) };
adc.apctl1
.modify(|r, w| unsafe { w.adpc().bits(r.adpc().bits() & !(1 << $Chan)) });
self.pin
}
}
)+
};
}
impl_analog_pin!(
0_u8 => PTA0,
1_u8 => PTA1,
2_u8 => PTA6,
3_u8 => PTA7,
4_u8 => PTB0,
5_u8 => PTB1,
6_u8 => PTB2,
7_u8 => PTB3,
8_u8 => PTC0,
9_u8 => PTC1,
10_u8 => PTC2,
11_u8 => PTC3,
12_u8 => PTF4,
13_u8 => PTF5,
14_u8 => PTF6,
15_u8 => PTF7,
);
impl<Pin> OneShot<Adc<Enabled>, u16, Pin> for Adc<Enabled>
where
Pin: Channel<Adc<Enabled>, ID = u8>,
{
type Error = Infallible;
fn read(&mut self, pin: &mut Pin) -> nb::Result<u16, Self::Error> {
self.set_channel(pin);
while !self.is_done() {}
let ret_val = Ok(self.result());
let disable = self.onchip_channels.dummy_disable();
self.set_channel(&disable);
ret_val
}
}
| {
None
} | conditional_block |
TimeSeries.js | //parse dates - have to be formatted as d3 datetime in order to create a time scale
var parse_dates = function(date){
//first 4 chars = YYYY, last 2 chars = MM (e.g. 1986-01)
var new_date = date.slice(4,) + "-" + date.slice(0,4);
//parse to d3 datetime object
var parse_time = d3.timeParse("%m-%Y");
return parse_time(new_date);
};
var make_TimeSeries = function(dispatch_statechange){
//Set up margins of graph axes - need to make room for tick labels
var margin = {top: 0, right: 10, bottom: 6, left: 25};
//Set up dimensions of the graph
var width = document.getElementById("bottom").offsetWidth - margin.left - margin.right;
var height = document.getElementById("bottom").offsetHeight - margin.top - margin.bottom;
//set up data
var data = [];
//parse the dates/prices and chop off last 6 entries to match the import data timeline
var dates = Object.keys(crude_prices).map(function(date){ return parse_dates(date); });
dates = dates.slice(0,dates.length-6);
var prices = Object.values(crude_prices);
prices = prices.slice(0,prices.length-6);
//append each month in the time series as a new object pair to the data variable
for(i=0; i<Object.keys(dates).length; i++){
var new_entry = {};
new_entry.date = dates[i];
new_entry.price = Object.values(crude_prices)[i];
data.push(new_entry);
}
//set up the x and y values - may need to parse dates from YYYYMM to MM-YYYY
var x = d3.scaleTime()
.domain(d3.extent(dates)) //domain of inputs
.range([0, width]); //range of outputs
var y = d3.scaleLinear()
.domain([0, d3.max(prices)+10]) //domain of inputs
.range([height, 0]); //range of outputs
var line = d3.line()
.x(function(d){ return x(d.date); })
.y(function(d){ return y(d.price); });
//append an SVG element to the bottom bar, reshape it to the bottom dimensions, and append <g> tag with margins
var TS_svg = d3.select("#bottom")
.append("svg")
.attr("width", width)
.attr("height", height)
.append("g")
.attr("class","line-chart")
.attr("id", "WTI-chart")
.attr("transform", "translate(" + margin.left + "," + "-"+margin.bottom + ")");
//apend Y Axis
TS_svg.append("g")
.attr("class","y-axis")
.attr("id", "WTI-axis")
.attr("height", height)
.attr("transform", "translate(0,"+margin.top+")")
.call(d3.axisLeft(y));
//append X Axis
TS_svg.append("g")
.attr("class", "x-axis")
.attr("transform", "translate(0,"+margin.top+")")
.attr("stroke","white")
.call(d3.axisBottom(x));
//append the actual time series line
TS_svg.append("path")
.data(data)
.attr("class", "line")
.attr("d", line(data));
//create invisible popup tip box to show on mouseover of timeseries
var tip_box = d3.select("body")
.append("div")
.attr("id", "tip-box")
.style("opacity", 0);
//create invisible dots on the timeline - when moused over, will give the date & price in popup
var dot_labels = function(){
TS_svg.selectAll(".dot")
.data(data)
.enter().append("circle")
.attr("class","dot")
.attr("cx", function(d){ return x(d.date); })
.attr("cy", function(d){ return y(d.price); })
.attr("r", "4px")
.style("opacity",0.0)
.on("mouseover", function(d){
var h = document.getElementById("tip-box").offsetHeight;
var f = d3.timeFormat("%b-%Y");
tip_box.transition()
.duration(200)
.style("opacity",0.9);
tip_box.html(f(d.date) + "<br/>" + d.price.toFixed(3))
.style("left", (d3.event.pageX) + "px")
.style("top", (d3.event.pageY - h) + "px");
})
.on("mouseout",function(d){
tip_box.transition()
.duration(200)
.style("opacity", 0);
});
};
//remove every other Y Axis label to avoid cluttering
d3.select("#WTI-axis").selectAll(".tick text")
.attr("stroke-width", "1px")
.attr("stroke","white")
.attr("class",function(d,i){
//remove
if(i%3 != 0){
d3.select(this).remove();
}
});
//append the marker line that indicates the time state of the model
TS_svg.append("line")
.attr("x1",x(dates[0]))
.attr("y1",0)
.attr("x2",x(dates[0]))
.attr("y1",height)
.attr("stroke-width","4px")
.attr("class","marker-line")
.attr("id","marker-line");
//transition the marker line across of the time series
var marker_transition = function(start){
var T = 0;
for(i=start; i<dates.length; i++){
d3.select(".marker-line")
.transition()
.duration(1500)
.delay(1500*T)
.ease(d3.easeLinear)
.attr("x1", x(dates[i]) )
.attr("x2", x(dates[i]) );
T++;
}
};
marker_transition(1);
//find the index of the nearest value when marker is dragged/dropped on the timeline
var find_nearest = function(dragged_x){
//get the x-axis coordinate for all the dates
var x_dates = dates.map(function(d){ return x(d); });
//get the distance between each coordinate and the dragged_x
var dist = x_dates.map(function(d){ return Math.abs(d - dragged_x); });
//get the index of the smallest distance
return dist.indexOf(Math.min.apply(null,dist));
};
/*
* When the line is dragged, events need to be dispatched to:
* 1) The bar chart
* 2) The map circles
* 3) The Date: MM-YYYY
*/
//make marker line clickable and dragable (needs to also return its time state)
var drag_line = d3.drag()
.on("start",function(d){
//Stop previous transition
d3.select(".marker-line")
.transition()
.duration(0);
//make the line actively clickable
d3.select(this)
.raise()
.classed("active", true);
})
.on("drag",function(d){
//get the date closest to the new x
time_state = dates[find_nearest(d3.event.x)];
//set the x values to the x value of the closest x
d3.select(this)
.attr("x1", x(time_state))
.attr("x2", x(time_state));
//delete and remake circles as the marker line moves
var index = find_nearest(this.getAttribute("x1"));
//propogate the index to the global variable current_timestate
window.current_timestate = index;
call_dispatch(index);
})
.on("end",function(d){
//restart the transition using that nearest index
var index = find_nearest(this.getAttribute("x1"));
//propogate the index to the global variable current_timestate
window.current_timestate = index;
//marker starts moving again when drag stops
marker_transition(index);
//make dot labels again
dot_labels();
//deactivate marker
d3.select(this)
.classed("active",false);
});
d3.select(".marker-line")
.call(drag_line);
dot_labels();
};
var make_Import_TS = function(selected_city){
//Set up margins of graph axes - need to make room for tick labels
var margin = {top: 0, right: 10, bottom: 6, left: 25};
//Set up dimensions of the graph
var width = document.getElementById("bottom").offsetWidth - margin.left - margin.right;
var height = document.getElementById("bottom").offsetHeight - margin.top - margin.bottom;
//get the imports time series for the selected port
var import_data;
Data.objects.forEach(function(circle){
if(circle.circle.City == selected_city){
import_data = circle.circle.Imports;
}
});
var dates = Object.keys(crude_prices).map(function(date){ return parse_dates(date); });
dates = dates.slice(0,dates.length-6); | for(i=0; i<Object.keys(dates).length; i++){
var new_entry = {};
new_entry.date = dates[i];
new_entry.imports = Object.values(import_data)[i];
data.push(new_entry);
}
var imports_x = d3.scaleTime()
.domain(d3.extent(dates))
.range([0, width]);
var imports_scale = d3.scaleLinear()
.domain([d3.min(import_data), d3.max(import_data)])
.range([height, 0]);
var line = d3.line()
.x(function(d){ return imports_x(d.date); })
.y(function(d){ return imports_scale(d.imports); });
var Import_svg = d3.select(".bottom")
.select("svg")
.append("g")
.attr("id","imports-chart")
.attr("transform", "translate(" + margin.left + "," + "-"+margin.bottom + ")");
Import_svg.append("g")
.attr("class","y-axis")
.attr("id", "Imports-axis")
.attr("height", height)
.attr("transform", "translate("+(width-margin.left)+","+margin.top+")")
.call(d3.axisRight(imports_scale));
Import_svg.append("path")
.data(data)
.attr("id","imports-line")
.attr("class", "line")
.attr("d", line(data));
d3.select("#Imports-axis").selectAll(".tick text")
.attr("class",function(d,i){
//remove
if(i%2 != 0){
d3.select(this).remove();
}
});
};
var make_Rain_TS = function(selected_city){
//Set up margins of graph axes - need to make room for tick labels
var margin = {top: 0, right: 10, bottom: 6, left: 25};
//Set up dimensions of the graph
var width = document.getElementById("bottom").offsetWidth - margin.left - margin.right;
var height = document.getElementById("bottom").offsetHeight - margin.top - margin.bottom;
//get the imports time series for the selected port
var rain_data;
Data.objects.forEach(function(circle){
if(circle.circle.City == selected_city){
rain_data = circle.circle.Precip;
}
});
var dates = Object.keys(crude_prices).map(function(date){ return parse_dates(date); });
dates = dates.slice(0,dates.length-6);
var data =[];
//append each month in the time series as a new object pair to the data variable
for(i=0; i<Object.keys(dates).length; i++){
var new_entry = {};
new_entry.date = dates[i];
new_entry.rain = Object.values(rain_data)[i];
data.push(new_entry);
}
var rain_x = d3.scaleTime()
.domain(d3.extent(dates))
.range([0, width]);
var rain_scale = d3.scaleLinear()
.domain([d3.min(rain_data),d3.max(rain_data)])
.range([height, 0]);
var line = d3.line()
.x(function(d){ return rain_x(d.date); })
.y(function(d){ return rain_scale(d.rain); });
var Rain_svg = d3.select(".bottom")
.select("svg")
.append("g")
.attr("id", "rain-chart")
.attr("transform", "translate(" + margin.left + "," + "-"+margin.bottom + ")");
Rain_svg.append("g")
.attr("class","y-axis")
.attr("id", "Rain-axis")
.attr("height", height)
.attr("transform", "translate("+(width-margin.left)+","+margin.top+")")
.call(d3.axisRight(rain_scale));
Rain_svg.append("path")
.data(data)
.attr("id","rain-line")
.attr("class", "line")
.attr("d", line(data));
d3.select("#Rain-axis").selectAll(".tick text")
.attr("class",function(d,i){
//remove
if(i%2 != 0){
d3.select(this).remove();
}
});
}; |
var data =[];
//append each month in the time series as a new object pair to the data variable | random_line_split |
TimeSeries.js | //parse dates - have to be formatted as d3 datetime in order to create a time scale
var parse_dates = function(date){
//first 4 chars = YYYY, last 2 chars = MM (e.g. 1986-01)
var new_date = date.slice(4,) + "-" + date.slice(0,4);
//parse to d3 datetime object
var parse_time = d3.timeParse("%m-%Y");
return parse_time(new_date);
};
var make_TimeSeries = function(dispatch_statechange){
//Set up margins of graph axes - need to make room for tick labels
var margin = {top: 0, right: 10, bottom: 6, left: 25};
//Set up dimensions of the graph
var width = document.getElementById("bottom").offsetWidth - margin.left - margin.right;
var height = document.getElementById("bottom").offsetHeight - margin.top - margin.bottom;
//set up data
var data = [];
//parse the dates/prices and chop off last 6 entries to match the import data timeline
var dates = Object.keys(crude_prices).map(function(date){ return parse_dates(date); });
dates = dates.slice(0,dates.length-6);
var prices = Object.values(crude_prices);
prices = prices.slice(0,prices.length-6);
//append each month in the time series as a new object pair to the data variable
for(i=0; i<Object.keys(dates).length; i++){
var new_entry = {};
new_entry.date = dates[i];
new_entry.price = Object.values(crude_prices)[i];
data.push(new_entry);
}
//set up the x and y values - may need to parse dates from YYYYMM to MM-YYYY
var x = d3.scaleTime()
.domain(d3.extent(dates)) //domain of inputs
.range([0, width]); //range of outputs
var y = d3.scaleLinear()
.domain([0, d3.max(prices)+10]) //domain of inputs
.range([height, 0]); //range of outputs
var line = d3.line()
.x(function(d){ return x(d.date); })
.y(function(d){ return y(d.price); });
//append an SVG element to the bottom bar, reshape it to the bottom dimensions, and append <g> tag with margins
var TS_svg = d3.select("#bottom")
.append("svg")
.attr("width", width)
.attr("height", height)
.append("g")
.attr("class","line-chart")
.attr("id", "WTI-chart")
.attr("transform", "translate(" + margin.left + "," + "-"+margin.bottom + ")");
//apend Y Axis
TS_svg.append("g")
.attr("class","y-axis")
.attr("id", "WTI-axis")
.attr("height", height)
.attr("transform", "translate(0,"+margin.top+")")
.call(d3.axisLeft(y));
//append X Axis
TS_svg.append("g")
.attr("class", "x-axis")
.attr("transform", "translate(0,"+margin.top+")")
.attr("stroke","white")
.call(d3.axisBottom(x));
//append the actual time series line
TS_svg.append("path")
.data(data)
.attr("class", "line")
.attr("d", line(data));
//create invisible popup tip box to show on mouseover of timeseries
var tip_box = d3.select("body")
.append("div")
.attr("id", "tip-box")
.style("opacity", 0);
//create invisible dots on the timeline - when moused over, will give the date & price in popup
var dot_labels = function(){
TS_svg.selectAll(".dot")
.data(data)
.enter().append("circle")
.attr("class","dot")
.attr("cx", function(d){ return x(d.date); })
.attr("cy", function(d){ return y(d.price); })
.attr("r", "4px")
.style("opacity",0.0)
.on("mouseover", function(d){
var h = document.getElementById("tip-box").offsetHeight;
var f = d3.timeFormat("%b-%Y");
tip_box.transition()
.duration(200)
.style("opacity",0.9);
tip_box.html(f(d.date) + "<br/>" + d.price.toFixed(3))
.style("left", (d3.event.pageX) + "px")
.style("top", (d3.event.pageY - h) + "px");
})
.on("mouseout",function(d){
tip_box.transition()
.duration(200)
.style("opacity", 0);
});
};
//remove every other Y Axis label to avoid cluttering
d3.select("#WTI-axis").selectAll(".tick text")
.attr("stroke-width", "1px")
.attr("stroke","white")
.attr("class",function(d,i){
//remove
if(i%3 != 0){
d3.select(this).remove();
}
});
//append the marker line that indicates the time state of the model
TS_svg.append("line")
.attr("x1",x(dates[0]))
.attr("y1",0)
.attr("x2",x(dates[0]))
.attr("y1",height)
.attr("stroke-width","4px")
.attr("class","marker-line")
.attr("id","marker-line");
//transition the marker line across of the time series
var marker_transition = function(start){
var T = 0;
for(i=start; i<dates.length; i++){
d3.select(".marker-line")
.transition()
.duration(1500)
.delay(1500*T)
.ease(d3.easeLinear)
.attr("x1", x(dates[i]) )
.attr("x2", x(dates[i]) );
T++;
}
};
marker_transition(1);
//find the index of the nearest value when marker is dragged/dropped on the timeline
var find_nearest = function(dragged_x){
//get the x-axis coordinate for all the dates
var x_dates = dates.map(function(d){ return x(d); });
//get the distance between each coordinate and the dragged_x
var dist = x_dates.map(function(d){ return Math.abs(d - dragged_x); });
//get the index of the smallest distance
return dist.indexOf(Math.min.apply(null,dist));
};
/*
* When the line is dragged, events need to be dispatched to:
* 1) The bar chart
* 2) The map circles
* 3) The Date: MM-YYYY
*/
//make marker line clickable and dragable (needs to also return its time state)
var drag_line = d3.drag()
.on("start",function(d){
//Stop previous transition
d3.select(".marker-line")
.transition()
.duration(0);
//make the line actively clickable
d3.select(this)
.raise()
.classed("active", true);
})
.on("drag",function(d){
//get the date closest to the new x
time_state = dates[find_nearest(d3.event.x)];
//set the x values to the x value of the closest x
d3.select(this)
.attr("x1", x(time_state))
.attr("x2", x(time_state));
//delete and remake circles as the marker line moves
var index = find_nearest(this.getAttribute("x1"));
//propogate the index to the global variable current_timestate
window.current_timestate = index;
call_dispatch(index);
})
.on("end",function(d){
//restart the transition using that nearest index
var index = find_nearest(this.getAttribute("x1"));
//propogate the index to the global variable current_timestate
window.current_timestate = index;
//marker starts moving again when drag stops
marker_transition(index);
//make dot labels again
dot_labels();
//deactivate marker
d3.select(this)
.classed("active",false);
});
d3.select(".marker-line")
.call(drag_line);
dot_labels();
};
var make_Import_TS = function(selected_city){
//Set up margins of graph axes - need to make room for tick labels
var margin = {top: 0, right: 10, bottom: 6, left: 25};
//Set up dimensions of the graph
var width = document.getElementById("bottom").offsetWidth - margin.left - margin.right;
var height = document.getElementById("bottom").offsetHeight - margin.top - margin.bottom;
//get the imports time series for the selected port
var import_data;
Data.objects.forEach(function(circle){
if(circle.circle.City == selected_city) |
});
var dates = Object.keys(crude_prices).map(function(date){ return parse_dates(date); });
dates = dates.slice(0,dates.length-6);
var data =[];
//append each month in the time series as a new object pair to the data variable
for(i=0; i<Object.keys(dates).length; i++){
var new_entry = {};
new_entry.date = dates[i];
new_entry.imports = Object.values(import_data)[i];
data.push(new_entry);
}
var imports_x = d3.scaleTime()
.domain(d3.extent(dates))
.range([0, width]);
var imports_scale = d3.scaleLinear()
.domain([d3.min(import_data), d3.max(import_data)])
.range([height, 0]);
var line = d3.line()
.x(function(d){ return imports_x(d.date); })
.y(function(d){ return imports_scale(d.imports); });
var Import_svg = d3.select(".bottom")
.select("svg")
.append("g")
.attr("id","imports-chart")
.attr("transform", "translate(" + margin.left + "," + "-"+margin.bottom + ")");
Import_svg.append("g")
.attr("class","y-axis")
.attr("id", "Imports-axis")
.attr("height", height)
.attr("transform", "translate("+(width-margin.left)+","+margin.top+")")
.call(d3.axisRight(imports_scale));
Import_svg.append("path")
.data(data)
.attr("id","imports-line")
.attr("class", "line")
.attr("d", line(data));
d3.select("#Imports-axis").selectAll(".tick text")
.attr("class",function(d,i){
//remove
if(i%2 != 0){
d3.select(this).remove();
}
});
};
var make_Rain_TS = function(selected_city){
//Set up margins of graph axes - need to make room for tick labels
var margin = {top: 0, right: 10, bottom: 6, left: 25};
//Set up dimensions of the graph
var width = document.getElementById("bottom").offsetWidth - margin.left - margin.right;
var height = document.getElementById("bottom").offsetHeight - margin.top - margin.bottom;
//get the imports time series for the selected port
var rain_data;
Data.objects.forEach(function(circle){
if(circle.circle.City == selected_city){
rain_data = circle.circle.Precip;
}
});
var dates = Object.keys(crude_prices).map(function(date){ return parse_dates(date); });
dates = dates.slice(0,dates.length-6);
var data =[];
//append each month in the time series as a new object pair to the data variable
for(i=0; i<Object.keys(dates).length; i++){
var new_entry = {};
new_entry.date = dates[i];
new_entry.rain = Object.values(rain_data)[i];
data.push(new_entry);
}
var rain_x = d3.scaleTime()
.domain(d3.extent(dates))
.range([0, width]);
var rain_scale = d3.scaleLinear()
.domain([d3.min(rain_data),d3.max(rain_data)])
.range([height, 0]);
var line = d3.line()
.x(function(d){ return rain_x(d.date); })
.y(function(d){ return rain_scale(d.rain); });
var Rain_svg = d3.select(".bottom")
.select("svg")
.append("g")
.attr("id", "rain-chart")
.attr("transform", "translate(" + margin.left + "," + "-"+margin.bottom + ")");
Rain_svg.append("g")
.attr("class","y-axis")
.attr("id", "Rain-axis")
.attr("height", height)
.attr("transform", "translate("+(width-margin.left)+","+margin.top+")")
.call(d3.axisRight(rain_scale));
Rain_svg.append("path")
.data(data)
.attr("id","rain-line")
.attr("class", "line")
.attr("d", line(data));
d3.select("#Rain-axis").selectAll(".tick text")
.attr("class",function(d,i){
//remove
if(i%2 != 0){
d3.select(this).remove();
}
});
};
| {
import_data = circle.circle.Imports;
} | conditional_block |
installed.rs | // Copyright (c) 2016 Google Inc (lewinb@google.com).
//
// Refer to the project root for licensing information.
//
extern crate serde_json;
extern crate url;
use std::borrow::BorrowMut;
use std::convert::AsRef;
use std::error::Error;
use std::io;
use std::io::Read;
use std::sync::Mutex;
use std::sync::mpsc::{channel, Receiver, Sender};
use hyper;
use hyper::{client, header, server, status, uri};
use serde_json::error;
use url::form_urlencoded;
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use types::{ApplicationSecret, Token};
use authenticator_delegate::AuthenticatorDelegate;
const OOB_REDIRECT_URI: &'static str = "urn:ietf:wg:oauth:2.0:oob";
/// Assembles a URL to request an authorization token (with user interaction).
/// Note that the redirect_uri here has to be either None or some variation of
/// http://localhost:{port}, or the authorization won't work (error "redirect_uri_mismatch")
fn build_authentication_request_url<'a, T, I>(auth_uri: &str,
client_id: &str,
scopes: I,
redirect_uri: Option<String>)
-> String
where T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>
{
let mut url = String::new();
let mut scopes_string = scopes.into_iter().fold(String::new(), |mut acc, sc| {
acc.push_str(sc.as_ref());
acc.push_str(" ");
acc
});
// Remove last space
scopes_string.pop();
url.push_str(auth_uri);
vec![format!("?scope={}", scopes_string),
format!("&redirect_uri={}",
redirect_uri.unwrap_or(OOB_REDIRECT_URI.to_string())),
format!("&response_type=code"),
format!("&client_id={}", client_id)]
.into_iter()
.fold(url, |mut u, param| {
u.push_str(&percent_encode(param.as_ref(), QUERY_ENCODE_SET));
u
})
}
pub struct InstalledFlow<C> {
client: C,
server: Option<server::Listening>,
port: Option<u32>,
auth_code_rcv: Option<Receiver<String>>,
}
/// cf. https://developers.google.com/identity/protocols/OAuth2InstalledApp#choosingredirecturi
pub enum InstalledFlowReturnMethod {
/// Involves showing a URL to the user and asking to copy a code from their browser
/// (default)
Interactive,
/// Involves spinning up a local HTTP server and Google redirecting the browser to
/// the server with a URL containing the code (preferred, but not as reliable). The
/// parameter is the port to listen on.
HTTPRedirect(u32),
}
impl<C> InstalledFlow<C>
where C: BorrowMut<hyper::Client>
{
/// Starts a new Installed App auth flow.
/// If HTTPRedirect is chosen as method and the server can't be started, the flow falls
/// back to Interactive.
pub fn new(client: C, method: Option<InstalledFlowReturnMethod>) -> InstalledFlow<C> {
let default = InstalledFlow {
client: client,
server: None,
port: None,
auth_code_rcv: None,
};
match method {
None => default,
Some(InstalledFlowReturnMethod::Interactive) => default,
// Start server on localhost to accept auth code.
Some(InstalledFlowReturnMethod::HTTPRedirect(port)) => {
let server = server::Server::http(format!("127.0.0.1:{}", port).as_str());
match server {
Result::Err(_) => default,
Result::Ok(server) => {
let (tx, rx) = channel();
let listening =
server.handle(InstalledFlowHandler { auth_code_snd: Mutex::new(tx) });
match listening {
Result::Err(_) => default,
Result::Ok(listening) => {
InstalledFlow {
client: default.client,
server: Some(listening),
port: Some(port),
auth_code_rcv: Some(rx),
}
}
}
}
}
}
}
}
/// Handles the token request flow; it consists of the following steps:
/// . Obtain a auhorization code with user cooperation or internal redirect.
/// . Obtain a token and refresh token using that code.
/// . Return that token
///
/// It's recommended not to use the DefaultAuthenticatorDelegate, but a specialized one.
pub fn obtain_token<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<Token, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let authcode = try!(self.get_authorization_code(auth_delegate, &appsecret, scopes));
let tokens = try!(self.request_token(&appsecret, &authcode));
// Successful response
if tokens.access_token.is_some() {
let mut token = Token {
access_token: tokens.access_token.unwrap(),
refresh_token: tokens.refresh_token.unwrap(),
token_type: tokens.token_type.unwrap(),
expires_in: tokens.expires_in,
expires_in_timestamp: None,
};
token.set_expiry_absolute();
Result::Ok(token)
} else {
let err = io::Error::new(io::ErrorKind::Other,
format!("Token API error: {} {}",
tokens.error.unwrap_or("<unknown err>".to_string()),
tokens.error_description
.unwrap_or("".to_string()))
.as_str());
Result::Err(Box::new(err))
}
}
/// Obtains an authorization code either interactively or via HTTP redirect (see
/// InstalledFlowReturnMethod).
fn get_authorization_code<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<String, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let result: Result<String, Box<Error>> = match self.server {
None => {
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
None);
match auth_delegate.present_user_url(&url, true /* need_code */) {
None => {
Result::Err(Box::new(io::Error::new(io::ErrorKind::UnexpectedEof,
"couldn't read code")))
}
// Remove newline
Some(mut code) => {
code.pop();
Result::Ok(code)
}
}
}
Some(_) => {
// The redirect URI must be this very localhost URL, otherwise Google refuses
// authorization.
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
Some(format!("http://localhost:{}",
self.port
.unwrap_or(8080))));
auth_delegate.present_user_url(&url, false /* need_code */);
match self.auth_code_rcv.as_ref().unwrap().recv() {
Result::Err(e) => Result::Err(Box::new(e)),
Result::Ok(s) => Result::Ok(s),
}
}
};
self.server.as_mut().map(|l| l.close()).is_some();
result
}
/// Sends the authorization code to the provider in order to obtain access and refresh tokens.
fn request_token(&mut self,
appsecret: &ApplicationSecret,
authcode: &str)
-> Result<JSONTokenResponse, Box<Error>> {
let redirect_uri;
match self.port {
None => redirect_uri = OOB_REDIRECT_URI.to_string(),
Some(p) => redirect_uri = format!("http://localhost:{}", p),
}
let body = form_urlencoded::serialize(vec![("code".to_string(), authcode.to_string()),
("client_id".to_string(),
appsecret.client_id.clone()),
("client_secret".to_string(),
appsecret.client_secret.clone()),
("redirect_uri".to_string(), redirect_uri),
("grant_type".to_string(),
"authorization_code".to_string())]);
let result: Result<client::Response, hyper::Error> = self.client
.borrow_mut()
.post(&appsecret.token_uri)
.body(&body)
.header(header::ContentType("application/x-www-form-urlencoded".parse().unwrap()))
.send();
let mut resp = String::new();
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(mut response) => {
let result = response.read_to_string(&mut resp);
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(_) => (),
}
}
}
let token_resp: Result<JSONTokenResponse, error::Error> = serde_json::from_str(&resp);
match token_resp {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(tok) => Result::Ok(tok) as Result<JSONTokenResponse, Box<Error>>,
}
}
}
#[derive(Deserialize)]
struct JSONTokenResponse {
access_token: Option<String>,
refresh_token: Option<String>,
token_type: Option<String>,
expires_in: Option<i64>,
error: Option<String>,
error_description: Option<String>,
}
/// HTTP handler handling the redirect from the provider.
struct InstalledFlowHandler {
auth_code_snd: Mutex<Sender<String>>,
}
impl server::Handler for InstalledFlowHandler {
fn handle(&self, rq: server::Request, mut rp: server::Response) {
match rq.uri {
uri::RequestUri::AbsolutePath(path) => |
_ => {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Invalid Request!".as_ref());
}
}
}
}
impl InstalledFlowHandler {
fn handle_url(&self, url: hyper::Url) {
// Google redirects to the specified localhost URL, appending the authorization
// code, like this: http://localhost:8080/xyz/?code=4/731fJ3BheyCouCniPufAd280GHNV5Ju35yYcGs
// We take that code and send it to the get_authorization_code() function that
// waits for it.
for (param, val) in url.query_pairs().into_owned() {
if param == "code".to_string() {
let _ = self.auth_code_snd.lock().unwrap().send(val);
}
}
}
}
#[cfg(test)]
mod tests {
use super::build_authentication_request_url;
use super::InstalledFlowHandler;
use std::sync::Mutex;
use std::sync::mpsc::channel;
use hyper::Url;
#[test]
fn test_request_url_builder() {
assert_eq!("https://accounts.google.\
com/o/oauth2/auth?scope=email%20profile&redirect_uri=urn:ietf:wg:oauth:2.0:\
oob&response_type=code&client_id=812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5amr\
f.apps.googleusercontent.com",
build_authentication_request_url("https://accounts.google.com/o/oauth2/auth",
"812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5am\
rf.apps.googleusercontent.com",
vec![&"email".to_string(),
&"profile".to_string()],
None));
}
#[test]
fn test_http_handle_url() {
let (tx, rx) = channel();
let handler = InstalledFlowHandler { auth_code_snd: Mutex::new(tx) };
// URLs are usually a bit botched
let url = Url::parse("http://example.com:1234/?code=ab/c%2Fd#").unwrap();
handler.handle_url(url);
assert_eq!(rx.recv().unwrap(), "ab/c/d".to_string());
}
}
| {
// We use a fake URL because the redirect goes to a URL, meaning we
// can't use the url form decode (because there's slashes and hashes and stuff in
// it).
let url = hyper::Url::parse(&format!("http://example.com{}", path));
if url.is_err() {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Unparseable URL".as_ref());
} else {
self.handle_url(url.unwrap());
*rp.status_mut() = status::StatusCode::Ok;
let _ =
rp.send("<html><head><title>Success</title></head><body>You may now \
close this window.</body></html>"
.as_ref());
}
} | conditional_block |
installed.rs | // Copyright (c) 2016 Google Inc (lewinb@google.com).
//
// Refer to the project root for licensing information.
//
extern crate serde_json;
extern crate url;
use std::borrow::BorrowMut;
use std::convert::AsRef;
use std::error::Error;
use std::io;
use std::io::Read;
use std::sync::Mutex;
use std::sync::mpsc::{channel, Receiver, Sender};
use hyper;
use hyper::{client, header, server, status, uri};
use serde_json::error;
use url::form_urlencoded;
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use types::{ApplicationSecret, Token};
use authenticator_delegate::AuthenticatorDelegate;
const OOB_REDIRECT_URI: &'static str = "urn:ietf:wg:oauth:2.0:oob";
/// Assembles a URL to request an authorization token (with user interaction).
/// Note that the redirect_uri here has to be either None or some variation of
/// http://localhost:{port}, or the authorization won't work (error "redirect_uri_mismatch")
fn build_authentication_request_url<'a, T, I>(auth_uri: &str,
client_id: &str,
scopes: I,
redirect_uri: Option<String>)
-> String
where T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>
{
let mut url = String::new();
let mut scopes_string = scopes.into_iter().fold(String::new(), |mut acc, sc| {
acc.push_str(sc.as_ref());
acc.push_str(" ");
acc
});
// Remove last space
scopes_string.pop();
url.push_str(auth_uri);
vec![format!("?scope={}", scopes_string),
format!("&redirect_uri={}",
redirect_uri.unwrap_or(OOB_REDIRECT_URI.to_string())),
format!("&response_type=code"),
format!("&client_id={}", client_id)]
.into_iter()
.fold(url, |mut u, param| {
u.push_str(&percent_encode(param.as_ref(), QUERY_ENCODE_SET));
u
})
}
pub struct InstalledFlow<C> {
client: C,
server: Option<server::Listening>,
port: Option<u32>,
auth_code_rcv: Option<Receiver<String>>,
}
/// cf. https://developers.google.com/identity/protocols/OAuth2InstalledApp#choosingredirecturi
pub enum InstalledFlowReturnMethod {
/// Involves showing a URL to the user and asking to copy a code from their browser
/// (default)
Interactive,
/// Involves spinning up a local HTTP server and Google redirecting the browser to
/// the server with a URL containing the code (preferred, but not as reliable). The
/// parameter is the port to listen on.
HTTPRedirect(u32),
}
impl<C> InstalledFlow<C>
where C: BorrowMut<hyper::Client>
{
/// Starts a new Installed App auth flow.
/// If HTTPRedirect is chosen as method and the server can't be started, the flow falls
/// back to Interactive.
pub fn new(client: C, method: Option<InstalledFlowReturnMethod>) -> InstalledFlow<C> {
let default = InstalledFlow {
client: client,
server: None,
port: None,
auth_code_rcv: None,
};
match method {
None => default,
Some(InstalledFlowReturnMethod::Interactive) => default,
// Start server on localhost to accept auth code.
Some(InstalledFlowReturnMethod::HTTPRedirect(port)) => {
let server = server::Server::http(format!("127.0.0.1:{}", port).as_str());
match server {
Result::Err(_) => default,
Result::Ok(server) => {
let (tx, rx) = channel();
let listening =
server.handle(InstalledFlowHandler { auth_code_snd: Mutex::new(tx) });
match listening {
Result::Err(_) => default,
Result::Ok(listening) => {
InstalledFlow {
client: default.client,
server: Some(listening),
port: Some(port),
auth_code_rcv: Some(rx),
}
}
}
}
}
}
}
}
/// Handles the token request flow; it consists of the following steps:
/// . Obtain a auhorization code with user cooperation or internal redirect.
/// . Obtain a token and refresh token using that code.
/// . Return that token
///
/// It's recommended not to use the DefaultAuthenticatorDelegate, but a specialized one.
pub fn obtain_token<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<Token, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let authcode = try!(self.get_authorization_code(auth_delegate, &appsecret, scopes));
let tokens = try!(self.request_token(&appsecret, &authcode));
// Successful response
if tokens.access_token.is_some() {
let mut token = Token {
access_token: tokens.access_token.unwrap(),
refresh_token: tokens.refresh_token.unwrap(),
token_type: tokens.token_type.unwrap(),
expires_in: tokens.expires_in,
expires_in_timestamp: None,
};
token.set_expiry_absolute();
Result::Ok(token)
} else {
let err = io::Error::new(io::ErrorKind::Other,
format!("Token API error: {} {}",
tokens.error.unwrap_or("<unknown err>".to_string()),
tokens.error_description
.unwrap_or("".to_string()))
.as_str());
Result::Err(Box::new(err))
}
}
/// Obtains an authorization code either interactively or via HTTP redirect (see
/// InstalledFlowReturnMethod).
fn get_authorization_code<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<String, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let result: Result<String, Box<Error>> = match self.server {
None => {
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
None);
match auth_delegate.present_user_url(&url, true /* need_code */) {
None => {
Result::Err(Box::new(io::Error::new(io::ErrorKind::UnexpectedEof,
"couldn't read code")))
}
// Remove newline
Some(mut code) => {
code.pop();
Result::Ok(code)
}
}
}
Some(_) => {
// The redirect URI must be this very localhost URL, otherwise Google refuses
// authorization.
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
Some(format!("http://localhost:{}",
self.port
.unwrap_or(8080))));
auth_delegate.present_user_url(&url, false /* need_code */);
match self.auth_code_rcv.as_ref().unwrap().recv() {
Result::Err(e) => Result::Err(Box::new(e)),
Result::Ok(s) => Result::Ok(s),
}
}
};
self.server.as_mut().map(|l| l.close()).is_some();
result
}
/// Sends the authorization code to the provider in order to obtain access and refresh tokens.
fn request_token(&mut self,
appsecret: &ApplicationSecret,
authcode: &str)
-> Result<JSONTokenResponse, Box<Error>> {
let redirect_uri;
match self.port {
None => redirect_uri = OOB_REDIRECT_URI.to_string(),
Some(p) => redirect_uri = format!("http://localhost:{}", p),
}
let body = form_urlencoded::serialize(vec![("code".to_string(), authcode.to_string()),
("client_id".to_string(),
appsecret.client_id.clone()),
("client_secret".to_string(),
appsecret.client_secret.clone()),
("redirect_uri".to_string(), redirect_uri),
("grant_type".to_string(),
"authorization_code".to_string())]);
let result: Result<client::Response, hyper::Error> = self.client
.borrow_mut()
.post(&appsecret.token_uri)
.body(&body)
.header(header::ContentType("application/x-www-form-urlencoded".parse().unwrap()))
.send();
let mut resp = String::new();
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(mut response) => {
let result = response.read_to_string(&mut resp);
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(_) => (),
}
}
}
let token_resp: Result<JSONTokenResponse, error::Error> = serde_json::from_str(&resp);
match token_resp {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(tok) => Result::Ok(tok) as Result<JSONTokenResponse, Box<Error>>,
}
}
}
#[derive(Deserialize)]
struct JSONTokenResponse {
access_token: Option<String>,
refresh_token: Option<String>,
token_type: Option<String>,
expires_in: Option<i64>,
error: Option<String>,
error_description: Option<String>,
}
/// HTTP handler handling the redirect from the provider. | impl server::Handler for InstalledFlowHandler {
fn handle(&self, rq: server::Request, mut rp: server::Response) {
match rq.uri {
uri::RequestUri::AbsolutePath(path) => {
// We use a fake URL because the redirect goes to a URL, meaning we
// can't use the url form decode (because there's slashes and hashes and stuff in
// it).
let url = hyper::Url::parse(&format!("http://example.com{}", path));
if url.is_err() {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Unparseable URL".as_ref());
} else {
self.handle_url(url.unwrap());
*rp.status_mut() = status::StatusCode::Ok;
let _ =
rp.send("<html><head><title>Success</title></head><body>You may now \
close this window.</body></html>"
.as_ref());
}
}
_ => {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Invalid Request!".as_ref());
}
}
}
}
impl InstalledFlowHandler {
fn handle_url(&self, url: hyper::Url) {
// Google redirects to the specified localhost URL, appending the authorization
// code, like this: http://localhost:8080/xyz/?code=4/731fJ3BheyCouCniPufAd280GHNV5Ju35yYcGs
// We take that code and send it to the get_authorization_code() function that
// waits for it.
for (param, val) in url.query_pairs().into_owned() {
if param == "code".to_string() {
let _ = self.auth_code_snd.lock().unwrap().send(val);
}
}
}
}
#[cfg(test)]
mod tests {
use super::build_authentication_request_url;
use super::InstalledFlowHandler;
use std::sync::Mutex;
use std::sync::mpsc::channel;
use hyper::Url;
#[test]
fn test_request_url_builder() {
assert_eq!("https://accounts.google.\
com/o/oauth2/auth?scope=email%20profile&redirect_uri=urn:ietf:wg:oauth:2.0:\
oob&response_type=code&client_id=812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5amr\
f.apps.googleusercontent.com",
build_authentication_request_url("https://accounts.google.com/o/oauth2/auth",
"812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5am\
rf.apps.googleusercontent.com",
vec![&"email".to_string(),
&"profile".to_string()],
None));
}
#[test]
fn test_http_handle_url() {
let (tx, rx) = channel();
let handler = InstalledFlowHandler { auth_code_snd: Mutex::new(tx) };
// URLs are usually a bit botched
let url = Url::parse("http://example.com:1234/?code=ab/c%2Fd#").unwrap();
handler.handle_url(url);
assert_eq!(rx.recv().unwrap(), "ab/c/d".to_string());
}
} | struct InstalledFlowHandler {
auth_code_snd: Mutex<Sender<String>>,
}
| random_line_split |
installed.rs | // Copyright (c) 2016 Google Inc (lewinb@google.com).
//
// Refer to the project root for licensing information.
//
extern crate serde_json;
extern crate url;
use std::borrow::BorrowMut;
use std::convert::AsRef;
use std::error::Error;
use std::io;
use std::io::Read;
use std::sync::Mutex;
use std::sync::mpsc::{channel, Receiver, Sender};
use hyper;
use hyper::{client, header, server, status, uri};
use serde_json::error;
use url::form_urlencoded;
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use types::{ApplicationSecret, Token};
use authenticator_delegate::AuthenticatorDelegate;
const OOB_REDIRECT_URI: &'static str = "urn:ietf:wg:oauth:2.0:oob";
/// Assembles a URL to request an authorization token (with user interaction).
/// Note that the redirect_uri here has to be either None or some variation of
/// http://localhost:{port}, or the authorization won't work (error "redirect_uri_mismatch")
fn build_authentication_request_url<'a, T, I>(auth_uri: &str,
client_id: &str,
scopes: I,
redirect_uri: Option<String>)
-> String
where T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>
{
let mut url = String::new();
let mut scopes_string = scopes.into_iter().fold(String::new(), |mut acc, sc| {
acc.push_str(sc.as_ref());
acc.push_str(" ");
acc
});
// Remove last space
scopes_string.pop();
url.push_str(auth_uri);
vec![format!("?scope={}", scopes_string),
format!("&redirect_uri={}",
redirect_uri.unwrap_or(OOB_REDIRECT_URI.to_string())),
format!("&response_type=code"),
format!("&client_id={}", client_id)]
.into_iter()
.fold(url, |mut u, param| {
u.push_str(&percent_encode(param.as_ref(), QUERY_ENCODE_SET));
u
})
}
pub struct InstalledFlow<C> {
client: C,
server: Option<server::Listening>,
port: Option<u32>,
auth_code_rcv: Option<Receiver<String>>,
}
/// cf. https://developers.google.com/identity/protocols/OAuth2InstalledApp#choosingredirecturi
pub enum InstalledFlowReturnMethod {
/// Involves showing a URL to the user and asking to copy a code from their browser
/// (default)
Interactive,
/// Involves spinning up a local HTTP server and Google redirecting the browser to
/// the server with a URL containing the code (preferred, but not as reliable). The
/// parameter is the port to listen on.
HTTPRedirect(u32),
}
impl<C> InstalledFlow<C>
where C: BorrowMut<hyper::Client>
{
/// Starts a new Installed App auth flow.
/// If HTTPRedirect is chosen as method and the server can't be started, the flow falls
/// back to Interactive.
pub fn new(client: C, method: Option<InstalledFlowReturnMethod>) -> InstalledFlow<C> |
/// Handles the token request flow; it consists of the following steps:
/// . Obtain a auhorization code with user cooperation or internal redirect.
/// . Obtain a token and refresh token using that code.
/// . Return that token
///
/// It's recommended not to use the DefaultAuthenticatorDelegate, but a specialized one.
pub fn obtain_token<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<Token, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let authcode = try!(self.get_authorization_code(auth_delegate, &appsecret, scopes));
let tokens = try!(self.request_token(&appsecret, &authcode));
// Successful response
if tokens.access_token.is_some() {
let mut token = Token {
access_token: tokens.access_token.unwrap(),
refresh_token: tokens.refresh_token.unwrap(),
token_type: tokens.token_type.unwrap(),
expires_in: tokens.expires_in,
expires_in_timestamp: None,
};
token.set_expiry_absolute();
Result::Ok(token)
} else {
let err = io::Error::new(io::ErrorKind::Other,
format!("Token API error: {} {}",
tokens.error.unwrap_or("<unknown err>".to_string()),
tokens.error_description
.unwrap_or("".to_string()))
.as_str());
Result::Err(Box::new(err))
}
}
/// Obtains an authorization code either interactively or via HTTP redirect (see
/// InstalledFlowReturnMethod).
fn get_authorization_code<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<String, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let result: Result<String, Box<Error>> = match self.server {
None => {
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
None);
match auth_delegate.present_user_url(&url, true /* need_code */) {
None => {
Result::Err(Box::new(io::Error::new(io::ErrorKind::UnexpectedEof,
"couldn't read code")))
}
// Remove newline
Some(mut code) => {
code.pop();
Result::Ok(code)
}
}
}
Some(_) => {
// The redirect URI must be this very localhost URL, otherwise Google refuses
// authorization.
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
Some(format!("http://localhost:{}",
self.port
.unwrap_or(8080))));
auth_delegate.present_user_url(&url, false /* need_code */);
match self.auth_code_rcv.as_ref().unwrap().recv() {
Result::Err(e) => Result::Err(Box::new(e)),
Result::Ok(s) => Result::Ok(s),
}
}
};
self.server.as_mut().map(|l| l.close()).is_some();
result
}
/// Sends the authorization code to the provider in order to obtain access and refresh tokens.
fn request_token(&mut self,
appsecret: &ApplicationSecret,
authcode: &str)
-> Result<JSONTokenResponse, Box<Error>> {
let redirect_uri;
match self.port {
None => redirect_uri = OOB_REDIRECT_URI.to_string(),
Some(p) => redirect_uri = format!("http://localhost:{}", p),
}
let body = form_urlencoded::serialize(vec![("code".to_string(), authcode.to_string()),
("client_id".to_string(),
appsecret.client_id.clone()),
("client_secret".to_string(),
appsecret.client_secret.clone()),
("redirect_uri".to_string(), redirect_uri),
("grant_type".to_string(),
"authorization_code".to_string())]);
let result: Result<client::Response, hyper::Error> = self.client
.borrow_mut()
.post(&appsecret.token_uri)
.body(&body)
.header(header::ContentType("application/x-www-form-urlencoded".parse().unwrap()))
.send();
let mut resp = String::new();
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(mut response) => {
let result = response.read_to_string(&mut resp);
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(_) => (),
}
}
}
let token_resp: Result<JSONTokenResponse, error::Error> = serde_json::from_str(&resp);
match token_resp {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(tok) => Result::Ok(tok) as Result<JSONTokenResponse, Box<Error>>,
}
}
}
#[derive(Deserialize)]
struct JSONTokenResponse {
access_token: Option<String>,
refresh_token: Option<String>,
token_type: Option<String>,
expires_in: Option<i64>,
error: Option<String>,
error_description: Option<String>,
}
/// HTTP handler handling the redirect from the provider.
struct InstalledFlowHandler {
auth_code_snd: Mutex<Sender<String>>,
}
impl server::Handler for InstalledFlowHandler {
fn handle(&self, rq: server::Request, mut rp: server::Response) {
match rq.uri {
uri::RequestUri::AbsolutePath(path) => {
// We use a fake URL because the redirect goes to a URL, meaning we
// can't use the url form decode (because there's slashes and hashes and stuff in
// it).
let url = hyper::Url::parse(&format!("http://example.com{}", path));
if url.is_err() {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Unparseable URL".as_ref());
} else {
self.handle_url(url.unwrap());
*rp.status_mut() = status::StatusCode::Ok;
let _ =
rp.send("<html><head><title>Success</title></head><body>You may now \
close this window.</body></html>"
.as_ref());
}
}
_ => {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Invalid Request!".as_ref());
}
}
}
}
impl InstalledFlowHandler {
fn handle_url(&self, url: hyper::Url) {
// Google redirects to the specified localhost URL, appending the authorization
// code, like this: http://localhost:8080/xyz/?code=4/731fJ3BheyCouCniPufAd280GHNV5Ju35yYcGs
// We take that code and send it to the get_authorization_code() function that
// waits for it.
for (param, val) in url.query_pairs().into_owned() {
if param == "code".to_string() {
let _ = self.auth_code_snd.lock().unwrap().send(val);
}
}
}
}
#[cfg(test)]
mod tests {
use super::build_authentication_request_url;
use super::InstalledFlowHandler;
use std::sync::Mutex;
use std::sync::mpsc::channel;
use hyper::Url;
#[test]
fn test_request_url_builder() {
assert_eq!("https://accounts.google.\
com/o/oauth2/auth?scope=email%20profile&redirect_uri=urn:ietf:wg:oauth:2.0:\
oob&response_type=code&client_id=812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5amr\
f.apps.googleusercontent.com",
build_authentication_request_url("https://accounts.google.com/o/oauth2/auth",
"812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5am\
rf.apps.googleusercontent.com",
vec![&"email".to_string(),
&"profile".to_string()],
None));
}
#[test]
fn test_http_handle_url() {
let (tx, rx) = channel();
let handler = InstalledFlowHandler { auth_code_snd: Mutex::new(tx) };
// URLs are usually a bit botched
let url = Url::parse("http://example.com:1234/?code=ab/c%2Fd#").unwrap();
handler.handle_url(url);
assert_eq!(rx.recv().unwrap(), "ab/c/d".to_string());
}
}
| {
let default = InstalledFlow {
client: client,
server: None,
port: None,
auth_code_rcv: None,
};
match method {
None => default,
Some(InstalledFlowReturnMethod::Interactive) => default,
// Start server on localhost to accept auth code.
Some(InstalledFlowReturnMethod::HTTPRedirect(port)) => {
let server = server::Server::http(format!("127.0.0.1:{}", port).as_str());
match server {
Result::Err(_) => default,
Result::Ok(server) => {
let (tx, rx) = channel();
let listening =
server.handle(InstalledFlowHandler { auth_code_snd: Mutex::new(tx) });
match listening {
Result::Err(_) => default,
Result::Ok(listening) => {
InstalledFlow {
client: default.client,
server: Some(listening),
port: Some(port),
auth_code_rcv: Some(rx),
}
}
}
}
}
}
}
} | identifier_body |
installed.rs | // Copyright (c) 2016 Google Inc (lewinb@google.com).
//
// Refer to the project root for licensing information.
//
extern crate serde_json;
extern crate url;
use std::borrow::BorrowMut;
use std::convert::AsRef;
use std::error::Error;
use std::io;
use std::io::Read;
use std::sync::Mutex;
use std::sync::mpsc::{channel, Receiver, Sender};
use hyper;
use hyper::{client, header, server, status, uri};
use serde_json::error;
use url::form_urlencoded;
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use types::{ApplicationSecret, Token};
use authenticator_delegate::AuthenticatorDelegate;
const OOB_REDIRECT_URI: &'static str = "urn:ietf:wg:oauth:2.0:oob";
/// Assembles a URL to request an authorization token (with user interaction).
/// Note that the redirect_uri here has to be either None or some variation of
/// http://localhost:{port}, or the authorization won't work (error "redirect_uri_mismatch")
fn build_authentication_request_url<'a, T, I>(auth_uri: &str,
client_id: &str,
scopes: I,
redirect_uri: Option<String>)
-> String
where T: AsRef<str> + 'a,
I: IntoIterator<Item = &'a T>
{
let mut url = String::new();
let mut scopes_string = scopes.into_iter().fold(String::new(), |mut acc, sc| {
acc.push_str(sc.as_ref());
acc.push_str(" ");
acc
});
// Remove last space
scopes_string.pop();
url.push_str(auth_uri);
vec![format!("?scope={}", scopes_string),
format!("&redirect_uri={}",
redirect_uri.unwrap_or(OOB_REDIRECT_URI.to_string())),
format!("&response_type=code"),
format!("&client_id={}", client_id)]
.into_iter()
.fold(url, |mut u, param| {
u.push_str(&percent_encode(param.as_ref(), QUERY_ENCODE_SET));
u
})
}
pub struct InstalledFlow<C> {
client: C,
server: Option<server::Listening>,
port: Option<u32>,
auth_code_rcv: Option<Receiver<String>>,
}
/// cf. https://developers.google.com/identity/protocols/OAuth2InstalledApp#choosingredirecturi
pub enum | {
/// Involves showing a URL to the user and asking to copy a code from their browser
/// (default)
Interactive,
/// Involves spinning up a local HTTP server and Google redirecting the browser to
/// the server with a URL containing the code (preferred, but not as reliable). The
/// parameter is the port to listen on.
HTTPRedirect(u32),
}
impl<C> InstalledFlow<C>
where C: BorrowMut<hyper::Client>
{
/// Starts a new Installed App auth flow.
/// If HTTPRedirect is chosen as method and the server can't be started, the flow falls
/// back to Interactive.
pub fn new(client: C, method: Option<InstalledFlowReturnMethod>) -> InstalledFlow<C> {
let default = InstalledFlow {
client: client,
server: None,
port: None,
auth_code_rcv: None,
};
match method {
None => default,
Some(InstalledFlowReturnMethod::Interactive) => default,
// Start server on localhost to accept auth code.
Some(InstalledFlowReturnMethod::HTTPRedirect(port)) => {
let server = server::Server::http(format!("127.0.0.1:{}", port).as_str());
match server {
Result::Err(_) => default,
Result::Ok(server) => {
let (tx, rx) = channel();
let listening =
server.handle(InstalledFlowHandler { auth_code_snd: Mutex::new(tx) });
match listening {
Result::Err(_) => default,
Result::Ok(listening) => {
InstalledFlow {
client: default.client,
server: Some(listening),
port: Some(port),
auth_code_rcv: Some(rx),
}
}
}
}
}
}
}
}
/// Handles the token request flow; it consists of the following steps:
/// . Obtain a auhorization code with user cooperation or internal redirect.
/// . Obtain a token and refresh token using that code.
/// . Return that token
///
/// It's recommended not to use the DefaultAuthenticatorDelegate, but a specialized one.
pub fn obtain_token<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<Token, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let authcode = try!(self.get_authorization_code(auth_delegate, &appsecret, scopes));
let tokens = try!(self.request_token(&appsecret, &authcode));
// Successful response
if tokens.access_token.is_some() {
let mut token = Token {
access_token: tokens.access_token.unwrap(),
refresh_token: tokens.refresh_token.unwrap(),
token_type: tokens.token_type.unwrap(),
expires_in: tokens.expires_in,
expires_in_timestamp: None,
};
token.set_expiry_absolute();
Result::Ok(token)
} else {
let err = io::Error::new(io::ErrorKind::Other,
format!("Token API error: {} {}",
tokens.error.unwrap_or("<unknown err>".to_string()),
tokens.error_description
.unwrap_or("".to_string()))
.as_str());
Result::Err(Box::new(err))
}
}
/// Obtains an authorization code either interactively or via HTTP redirect (see
/// InstalledFlowReturnMethod).
fn get_authorization_code<'a, AD: AuthenticatorDelegate, S, T>(&mut self,
auth_delegate: &mut AD,
appsecret: &ApplicationSecret,
scopes: S)
-> Result<String, Box<Error>>
where T: AsRef<str> + 'a,
S: Iterator<Item = &'a T>
{
let result: Result<String, Box<Error>> = match self.server {
None => {
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
None);
match auth_delegate.present_user_url(&url, true /* need_code */) {
None => {
Result::Err(Box::new(io::Error::new(io::ErrorKind::UnexpectedEof,
"couldn't read code")))
}
// Remove newline
Some(mut code) => {
code.pop();
Result::Ok(code)
}
}
}
Some(_) => {
// The redirect URI must be this very localhost URL, otherwise Google refuses
// authorization.
let url = build_authentication_request_url(&appsecret.auth_uri,
&appsecret.client_id,
scopes,
Some(format!("http://localhost:{}",
self.port
.unwrap_or(8080))));
auth_delegate.present_user_url(&url, false /* need_code */);
match self.auth_code_rcv.as_ref().unwrap().recv() {
Result::Err(e) => Result::Err(Box::new(e)),
Result::Ok(s) => Result::Ok(s),
}
}
};
self.server.as_mut().map(|l| l.close()).is_some();
result
}
/// Sends the authorization code to the provider in order to obtain access and refresh tokens.
fn request_token(&mut self,
appsecret: &ApplicationSecret,
authcode: &str)
-> Result<JSONTokenResponse, Box<Error>> {
let redirect_uri;
match self.port {
None => redirect_uri = OOB_REDIRECT_URI.to_string(),
Some(p) => redirect_uri = format!("http://localhost:{}", p),
}
let body = form_urlencoded::serialize(vec![("code".to_string(), authcode.to_string()),
("client_id".to_string(),
appsecret.client_id.clone()),
("client_secret".to_string(),
appsecret.client_secret.clone()),
("redirect_uri".to_string(), redirect_uri),
("grant_type".to_string(),
"authorization_code".to_string())]);
let result: Result<client::Response, hyper::Error> = self.client
.borrow_mut()
.post(&appsecret.token_uri)
.body(&body)
.header(header::ContentType("application/x-www-form-urlencoded".parse().unwrap()))
.send();
let mut resp = String::new();
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(mut response) => {
let result = response.read_to_string(&mut resp);
match result {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(_) => (),
}
}
}
let token_resp: Result<JSONTokenResponse, error::Error> = serde_json::from_str(&resp);
match token_resp {
Result::Err(e) => return Result::Err(Box::new(e)),
Result::Ok(tok) => Result::Ok(tok) as Result<JSONTokenResponse, Box<Error>>,
}
}
}
#[derive(Deserialize)]
struct JSONTokenResponse {
access_token: Option<String>,
refresh_token: Option<String>,
token_type: Option<String>,
expires_in: Option<i64>,
error: Option<String>,
error_description: Option<String>,
}
/// HTTP handler handling the redirect from the provider.
struct InstalledFlowHandler {
auth_code_snd: Mutex<Sender<String>>,
}
impl server::Handler for InstalledFlowHandler {
fn handle(&self, rq: server::Request, mut rp: server::Response) {
match rq.uri {
uri::RequestUri::AbsolutePath(path) => {
// We use a fake URL because the redirect goes to a URL, meaning we
// can't use the url form decode (because there's slashes and hashes and stuff in
// it).
let url = hyper::Url::parse(&format!("http://example.com{}", path));
if url.is_err() {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Unparseable URL".as_ref());
} else {
self.handle_url(url.unwrap());
*rp.status_mut() = status::StatusCode::Ok;
let _ =
rp.send("<html><head><title>Success</title></head><body>You may now \
close this window.</body></html>"
.as_ref());
}
}
_ => {
*rp.status_mut() = status::StatusCode::BadRequest;
let _ = rp.send("Invalid Request!".as_ref());
}
}
}
}
impl InstalledFlowHandler {
fn handle_url(&self, url: hyper::Url) {
// Google redirects to the specified localhost URL, appending the authorization
// code, like this: http://localhost:8080/xyz/?code=4/731fJ3BheyCouCniPufAd280GHNV5Ju35yYcGs
// We take that code and send it to the get_authorization_code() function that
// waits for it.
for (param, val) in url.query_pairs().into_owned() {
if param == "code".to_string() {
let _ = self.auth_code_snd.lock().unwrap().send(val);
}
}
}
}
#[cfg(test)]
mod tests {
use super::build_authentication_request_url;
use super::InstalledFlowHandler;
use std::sync::Mutex;
use std::sync::mpsc::channel;
use hyper::Url;
#[test]
fn test_request_url_builder() {
assert_eq!("https://accounts.google.\
com/o/oauth2/auth?scope=email%20profile&redirect_uri=urn:ietf:wg:oauth:2.0:\
oob&response_type=code&client_id=812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5amr\
f.apps.googleusercontent.com",
build_authentication_request_url("https://accounts.google.com/o/oauth2/auth",
"812741506391-h38jh0j4fv0ce1krdkiq0hfvt6n5am\
rf.apps.googleusercontent.com",
vec![&"email".to_string(),
&"profile".to_string()],
None));
}
#[test]
fn test_http_handle_url() {
let (tx, rx) = channel();
let handler = InstalledFlowHandler { auth_code_snd: Mutex::new(tx) };
// URLs are usually a bit botched
let url = Url::parse("http://example.com:1234/?code=ab/c%2Fd#").unwrap();
handler.handle_url(url);
assert_eq!(rx.recv().unwrap(), "ab/c/d".to_string());
}
}
| InstalledFlowReturnMethod | identifier_name |
types.go | /*
* Copyright (c) 2018. LuCongyao <6congyao@gmail.com> .
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this work except in compliance with the License.
* You may obtain a copy of the License in the LICENSE file, or at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package v2
import (
"net"
"time"
)
// Metadata field can be used to provide additional information about the route.
// It can be used for configuration, stats, and logging.
// The metadata should go under the filter namespace that will need it.
type Metadata map[string]string
// Network Filter's Type
const (
CONNECTION_MANAGER = "connection_manager"
DEFAULT_NETWORK_FILTER = "proxy"
CONTROLLER_NETWORK_FILTER = "controller"
DELEGATION = "delegation"
TCP_PROXY = "tcp_proxy"
)
// Stream Filter's Type
const (
COMMON = "common"
MIXER = "mixer"
FAULT_INJECT_STREAM_FILTER = "fault_inject"
AUTH = "auth"
)
// ClusterType
type ClusterType string
// Group of cluster type
const (
STATIC_CLUSTER ClusterType = "STATIC"
SIMPLE_CLUSTER ClusterType = "SIMPLE"
DYNAMIC_CLUSTER ClusterType = "DYNAMIC"
EDS_CLUSTER ClusterType = "EDS"
)
// LbType
type LbType string
// Group of load balancer type
const (
LB_RANDOM LbType = "LB_RANDOM"
LB_ROUNDROBIN LbType = "LB_ROUNDROBIN"
)
// RoutingPriority
type RoutingPriority string
// Group of routing priority
const (
DEFAULT RoutingPriority = "DEFAULT"
HIGH RoutingPriority = "HIGH"
)
// Cluster represents a cluster's information
type Cluster struct {
Name string `json:"name"`
ClusterType ClusterType `json:"type"`
SubType string `json:"sub_type"` //not used yet
LbType LbType `json:"lb_type"`
MaxRequestPerConn uint32 `json:"max_request_per_conn"`
ConnBufferLimitBytes uint32 `json:"conn_buffer_limit_bytes"`
CirBreThresholds CircuitBreakers `json:"circuit_breakers,omitempty"`
OutlierDetection OutlierDetection `json:"outlier_detection,omitempty"` //not used yet
HealthCheck HealthCheck `json:"health_check,omitempty"`
Spec ClusterSpecInfo `json:"spec,omitempty"`
LBSubSetConfig LBSubsetConfig `json:"lb_subset_config,omitempty"`
TLS TLSConfig `json:"tls_context,omitempty"`
Hosts []Host `json:"hosts"`
}
// HealthCheck is a configuration of health check
// use DurationConfig to parse string to time.Duration
type HealthCheck struct {
HealthCheckConfig
ProtocolCode byte `json:"-"`
Timeout time.Duration `json:"-"`
Interval time.Duration `json:"-"`
IntervalJitter time.Duration `json:"-"`
}
// Host represenets a host information
type Host struct {
HostConfig
MetaData Metadata `json:"-"`
}
// Listener contains the listener's information
type Listener struct {
ListenerConfig
Addr net.Addr `json:"-"`
ListenerTag uint64 `json:"-"`
ListenerScope string `json:"-"`
PerConnBufferLimitBytes uint32 `json:"-"` // do not support config
InheritListener *net.TCPListener `json:"-"`
Remain bool `json:"-"`
LogLevel uint8 `json:"-"`
DisableConnIo bool `json:"-"`
}
// TCPRoute
type TCPRoute struct {
Cluster string
SourceAddrs []CidrRange
DestinationAddrs []CidrRange
SourcePort string
DestinationPort string
}
// CidrRange
type CidrRange struct {
Address string
Length uint32
IpNet *net.IPNet
}
// HealthCheckFilter
type HealthCheckFilter struct {
HealthCheckFilterConfig
CacheTime time.Duration `json:"-"`
}
// FaultInject
type FaultInject struct {
FaultInjectConfig
DelayDuration uint64 `json:"-"`
}
// StreamFaultInject
type StreamFaultInject struct {
Delay *DelayInject `json:"delay"`
Abort *AbortInject `json:"abort"`
UpstreamCluster string `json:"upstream_cluster"`
Headers []HeaderMatcher `json:"headers"`
}
type DelayInject struct {
DelayInjectConfig
Delay time.Duration `json:"-"`
}
type AbortInject struct {
Status int `json:"status"`
Percent uint32 `json:"percentage"`
}
// Router, the list of routes that will be matched, in order, for incoming requests.
// The first route that matches will be used.
type Router struct {
RouterConfig
Metadata Metadata `json:"-"`
}
// RouteAction represents the information of route request to upstream clusters
type RouteAction struct {
RouterActionConfig
MetadataMatch Metadata `json:"-"`
Timeout time.Duration `json:"-"`
}
// Decorator
type Decorator string
// ClusterWeight.
// clusters along with weights that indicate the percentage
// of traffic to be forwarded to each cluster
type ClusterWeight struct {
ClusterWeightConfig
MetadataMatch Metadata `json:"-"`
}
// RetryPolicy represents the retry parameters
type RetryPolicy struct {
RetryPolicyConfig
RetryTimeout time.Duration `json:"-"`
}
// CircuitBreakers is a configuration of circuit breakers
// CircuitBreakers implements json.Marshaler and json.Unmarshaler
type CircuitBreakers struct {
Thresholds []Thresholds
}
type Thresholds struct {
Priority RoutingPriority `json:"priority"`
MaxConnections uint32 `json:"max_connections"`
MaxPendingRequests uint32 `json:"max_pending_requests"`
MaxRequests uint32 `json:"max_requests"`
MaxRetries uint32 `json:"max_retries"`
}
// OutlierDetection not used yet
type OutlierDetection struct {
Consecutive5xx uint32
Interval time.Duration
BaseEjectionTime time.Duration
MaxEjectionPercent uint32
ConsecutiveGatewayFailure uint32
EnforcingConsecutive5xx uint32
EnforcingConsecutiveGatewayFailure uint32
EnforcingSuccessRate uint32
SuccessRateMinimumHosts uint32
SuccessRateRequestVolume uint32
SuccessRateStdevFactor uint32
}
// ClusterSpecInfo is a configuration of subscribe
type ClusterSpecInfo struct {
Subscribes []SubscribeSpec `json:"subscribe,omitempty"`
}
// SubscribeSpec describes the subscribe server
type SubscribeSpec struct {
ServiceName string `json:"service_name,omitempty"`
}
// LBSubsetConfig is a configuration of load balance subset
type LBSubsetConfig struct {
FallBackPolicy uint8 `json:"fall_back_policy"`
DefaultSubset map[string]string `json:"default_subset"`
SubsetSelectors [][]string `json:"subset_selectors"`
}
// TLSConfig is a configuration of tls context
type TLSConfig struct {
Status bool `json:"status"`
Type string `json:"type"`
ServerName string `json:"server_name,omitempty"`
CACert string `json:"ca_cert,omitempty"`
CertChain string `json:"cert_chain,omitempty"`
PrivateKey string `json:"private_key,omitempty"`
VerifyClient bool `json:"verify_client,omitempty"`
InsecureSkip bool `json:"insecure_skip,omitempty"`
CipherSuites string `json:"cipher_suites,omitempty"`
EcdhCurves string `json:"ecdh_curves,omitempty"`
MinVersion string `json:"min_version,omitempty"`
MaxVersion string `json:"max_version,omitempty"`
ALPN string `json:"alpn,omitempty"`
Ticket string `json:"ticket,omitempty"`
Fallback bool `json:"fall_back, omitempty"`
ExtendVerify map[string]interface{} `json:"extend_verify,omitempty"`
}
// AccessLog for making up access log
type AccessLog struct {
Path string `json:"log_path,omitempty"`
Format string `json:"log_format,omitempty"`
}
// FilterChain wraps a set of match criteria, an option TLS context,
// a set of filters, and various other parameters.
type FilterChain struct {
FilterChainMatch string `json:"match,omitempty"`
TLS TLSConfig `json:"tls_context,omitempty"`
Filters []Filter `json:"filters"` // "proxy" and "connection_manager" used at this time
}
// Filter is a config to make up a filter
type Filter struct {
Type string `json:"type,omitempty"`
Config map[string]interface{} `json:"config,omitempty"` |
// TCPProxy
type TCPProxy struct {
StatPrefix string `json:"stat_prefix,omitempty"`
Cluster string `json:"cluster,omitempty"`
IdleTimeout *time.Duration `json:"idle_timeout,omitempty"`
MaxConnectAttempts uint32 `json:"max_connect_attempts,omitempty"`
Routes []*TCPRoute `json:"routes,omitempty"`
}
// WebSocketProxy
type WebSocketProxy struct {
StatPrefix string
IdleTimeout *time.Duration
MaxConnectAttempts uint32
}
// Proxy
type Proxy struct {
Name string `json:"name"`
DownstreamProtocol string `json:"downstream_protocol"`
UpstreamProtocol string `json:"upstream_protocol"`
RouterConfigName string `json:"router_config_name"`
ValidateClusters bool `json:"validate_clusters"`
ExtendConfig map[string]interface{} `json:"extend_config"`
}
// HeaderValueOption is header name/value pair plus option to control append behavior.
type HeaderValueOption struct {
Header *HeaderValue `json:"header"`
Append *bool `json:"append"`
}
// HeaderValue is header name/value pair.
type HeaderValue struct {
Key string `json:"key"`
Value string `json:"value"`
}
// RouterConfiguration is a filter for routers
// Filter type is: "CONNECTION_MANAGER"
type RouterConfiguration struct {
RouterConfigName string `json:"router_config_name"`
VirtualHosts []*VirtualHost `json:"virtual_hosts"`
RequestHeadersToAdd []*HeaderValueOption `json:"request_headers_to_add"`
ResponseHeadersToAdd []*HeaderValueOption `json:"response_headers_to_add"`
ResponseHeadersToRemove []string `json:"response_headers_to_remove"`
}
// VirtualHost is used to make up the route table
type VirtualHost struct {
Name string `json:"name"`
Domains []string `json:"domains"`
VirtualClusters []VirtualCluster `json:"virtual_clusters"`
Routers []Router `json:"routers"`
RequireTLS string `json:"require_tls"` // not used yet
RequestHeadersToAdd []*HeaderValueOption `json:"request_headers_to_add"`
ResponseHeadersToAdd []*HeaderValueOption `json:"response_headers_to_add"`
ResponseHeadersToRemove []string `json:"response_headers_to_remove"`
}
// VirtualCluster is a way of specifying a regex matching rule against certain important endpoints
// such that statistics are generated explicitly for the matched requests
type VirtualCluster struct {
Pattern string `json:"pattern"`
Name string `json:"name"`
Method string `json:"method"`
}
// RouterMatch represents the route matching parameters
type RouterMatch struct {
Prefix string `json:"prefix"`
Path string `json:"path"`
Regex string `json:"regex"`
CaseSensitive bool `json:"case_sensitive"`
Runtime RuntimeUInt32 `json:"runtime"`
Headers []HeaderMatcher `json:"headers"`
}
// RedirectAction represents the redirect parameters
type RedirectAction struct {
HostRedirect string `json:"host_redirect"`
PathRedirect string `json:"path_redirect"`
ResponseCode uint32 `json:"response_code"`
}
// DirectResponseAction represents the direct response parameters
type DirectResponseAction struct {
StatusCode int `json:"status"`
Body string `json:"body"`
}
// WeightedCluster.
// Multiple upstream clusters unsupport stream filter type: healthcheckcan be specified for a given route.
// The request is routed to one of the upstream
// clusters based on weights assigned to each cluster
type WeightedCluster struct {
Cluster ClusterWeight `json:"cluster"`
RuntimeKeyPrefix string `json:"runtime_key_prefix"` // not used currently
}
// RuntimeUInt32 indicates that the route should additionally match on a runtime key
type RuntimeUInt32 struct {
DefaultValue uint32 `json:"default_value"`
RuntimeKey string `json:"runtime_key"`
}
// HeaderMatcher specifies a set of headers that the route should match on.
type HeaderMatcher struct {
Name string `json:"name"`
Value string `json:"value"`
Regex bool `json:"regex"`
}
// XProxyExtendConfig
type XProxyExtendConfig struct {
SubProtocol string `json:"sub_protocol"`
UpstreamAddress string `json:"upstream_address"`
PushControlAddress string `json:"push_control_address"`
}
// ServiceRegistryInfo
type ServiceRegistryInfo struct {
ServiceAppInfo ApplicationInfo `json:"application"`
ServicePubInfo []PublishInfo `json:"publish_info,omitempty"`
}
type ApplicationInfo struct {
AntShareCloud bool `json:"ant_share_cloud"`
DataCenter string `json:"data_center,omitempty"`
AppName string `json:"app_name,omitempty"`
Zone string `json:"zone"`
}
// PublishInfo implements json.Marshaler and json.Unmarshaler
type PublishInfo struct {
Pub PublishContent
}
type PublishContent struct {
ServiceName string `json:"service_name,omitempty"`
PubData string `json:"pub_data,omitempty"`
}
type Delegation struct {
AgentName string `json:"agent_name,omitempty"`
AgentType string `json:"agent_type,omitempty"`
}
// Controller
type Controller struct {
Name string `json:"name"`
SourceProtocol string `json:"source_protocol"`
Scope []string `json:"scope"`
} | }
// Implements of filter config | random_line_split |
server.go | package shardkv
import (
"bytes"
"context"
"encoding/json"
"fmt"
"labgob"
"labrpc"
"raft"
"reflect"
"shardmaster"
"sync"
"time"
)
func init() {
labgob.Register(OpArgs{})
labgob.Register(GetArgs{})
labgob.Register(PutAppendArgs{})
labgob.Register(shardmaster.Config{})
labgob.Register(CheckMigrateShardReply{})
labgob.Register(MigrateShardReply{})
}
type ShardKV struct {
mu sync.RWMutex
me int
rf *raft.Raft
applyCh chan raft.ApplyMsg
make_end func(string) *labrpc.ClientEnd
gid int
masters *shardmaster.Clerk
maxraftstate int // snapshot if log grows this big
persister *raft.Persister
ctx context.Context
close context.CancelFunc
lastApplied int
configs []shardmaster.Config
database *ShardDatabase
applyWait *Wait
lastOpId map[int64]int64 // store client last op id
waitMigration *WaitMigration // store migration shard
waitClean *WaitClean // store clean shard
}
//
// the tester calls Kill() when a ShardKV instance won't
// be needed again. you are not required to do anything
// in Kill(), but it might be convenient to (for example)
// turn off debug output from this instance.
//
func (kv *ShardKV) Kill() {
kv.rf.Kill()
kv.close()
// Your code here, if desired.
}
//
// servers[] contains the ports of the servers in this group.
//
// me is the index of the current server in servers[].
//
// the k/v server should store snapshots through the underlying Raft
// implementation, which should call persister.SaveStateAndSnapshot() to
// atomically save the Raft state along with the snapshot.
//
// the k/v server should snapshot when Raft's saved state exceeds
// maxraftstate bytes, in order to allow Raft to garbage-collect its
// log. if maxraftstate is -1, you don't need to snapshot.
//
// gid is this group's GID, for interacting with the shardmaster.
//
// pass masters[] to shardmaster.MakeClerk() so you can send
// RPCs to the shardmaster.
//
// make_end(servername) turns a server name from a
// Config.Groups[gid][i] into a labrpc.ClientEnd on which you can
// send RPCs. You'll need this to send RPCs to other groups.
//
// look at client.go for examples of how to use masters[]
// and make_end() to send RPCs to the group owning a specific shard.
//
// StartServer() must return quickly, so it should start goroutines
// for any long-running work.
//
func StartServer(servers []*labrpc.ClientEnd, me int, persister *raft.Persister, maxraftstate int, gid int, masters []*labrpc.ClientEnd, make_end func(string) *labrpc.ClientEnd) *ShardKV {
// call labgob.Register on structures you want
// Go's RPC library to marshall/unmarshall.
kv := new(ShardKV)
kv.me = me
kv.maxraftstate = maxraftstate
kv.make_end = make_end
kv.gid = gid
kv.masters = shardmaster.MakeClerk(masters)
kv.applyCh = make(chan raft.ApplyMsg)
kv.applyWait = NewWait()
kv.persister = persister
ctx, cancel := context.WithCancel(context.Background())
kv.ctx = ctx
kv.close = cancel
kv.init()
kv.rf = raft.Make(servers, me, persister, kv.applyCh)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
go kv.newConfigLearner()
go kv.stateMachine()
return kv
}
func (kv *ShardKV) start(args interface{}) (result string, value string) {
var op OpArgs
if getArgs, ok := args.(GetArgs); ok {
op = OpArgs{
ConfigNum: getArgs.ConfigNum,
ClientId: getArgs.ClientId,
OpId: getArgs.OpId,
Key: getArgs.Key,
Value: "",
OpType: "Get",
}
} else if putAppendArgs, ok := args.(PutAppendArgs); ok {
op = OpArgs{
ConfigNum: putAppendArgs.ConfigNum,
ClientId: putAppendArgs.ClientId,
OpId: putAppendArgs.OpId,
Key: putAppendArgs.Key,
Value: putAppendArgs.Value,
OpType: putAppendArgs.Op,
}
} else {
return fmt.Sprintf("ErrArgsType:%+v", args), ""
}
resultCh := kv.applyWait.Register(op)
defer kv.applyWait.Unregister(op)
_, _, isLeader := kv.rf.Start(op)
if !isLeader {
return ErrWrongLeader, ""
}
t := time.NewTimer(OpTimeout)
select {
case <-kv.ctx.Done():
return ErrShardKVClosed, ""
case <-t.C:
return ErrOpTimeout, ""
case opResult := <-resultCh:
//DPrintf("ShardKV %d return client %d by resultCh result = %+v\n", kv.me, op.ClientId, opResult)
return opResult.Result, opResult.Value
}
}
func (kv *ShardKV) newConfigLearner() {
t := time.NewTicker(100 * time.Millisecond)
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if !kv.waitClean.IsEmpty() || !kv.waitMigration.IsEmpty() {
// is applying new Config
kv.mu.RUnlock()
continue
}
latest := kv.configs[len(kv.configs)-1]
kv.mu.RUnlock()
config := kv.masters.Query(latest.Num + 1)
if latest.Num < config.Num {
kv.rf.Start(config)
}
}
}
}
func (kv *ShardKV) migrationChecker() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitClean.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitClean.GetGidShard()
config := kv.waitClean.GetConfig()
if time.Now().Sub(start) > WaitCleanTimeOut {
kv.rf.Start(CheckMigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitCleanTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := CheckMigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args CheckMigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply CheckMigrateShardReply
ok := srv.Call("ShardKV.CheckMigrateShard", &args, &reply)
/*if ok {
DPrintf("ShardKV %d (gid = %d) CheckMigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
}*/
if ok && reply.Result == OK {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) migrationHelper() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitMigration.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitMigration.GetGidShards()
config := kv.waitMigration.GetConfig()
if time.Now().Sub(start) > WaitMigrationTimeOut {
kv.rf.Start(MigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitMigrationTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := MigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args MigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply MigrateShardReply
ok := srv.Call("ShardKV.MigrateShard", &args, &reply)
DPrintf("ShardKV %d (gid = %d) MigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
if ok && (reply.Result == OK || reply.Result == ErrShardHasBeenCleaned) {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) init() {
kv.mu.Lock()
defer kv.mu.Unlock()
data := kv.persister.ReadSnapshot()
if len(data) == 0 {
kv.lastApplied = 0
kv.database = NewShardDatabase()
kv.lastOpId = make(map[int64]int64)
kv.configs = make([]shardmaster.Config, 1)
kv.configs[0].Groups = map[int][]string{}
kv.waitClean = NewWaitClean()
kv.waitMigration = NewWaitMigration()
} else {
r := bytes.NewBuffer(data)
d := labgob.NewDecoder(r)
kv.lastApplied = 0
kv.database = nil
kv.lastOpId = nil
kv.configs = nil
kv.waitMigration = nil
kv.waitClean = nil
d.Decode(&kv.lastApplied)
d.Decode(&kv.database)
d.Decode(&kv.lastOpId)
d.Decode(&kv.configs)
d.Decode(&kv.waitClean)
d.Decode(&kv.waitMigration)
}
}
func (kv *ShardKV) saveShardKVState(force bool) {
shouldSave := kv.maxraftstate != -1 && (force || kv.persister.RaftStateSize() > kv.maxraftstate)
if shouldSave {
w := new(bytes.Buffer)
e := labgob.NewEncoder(w)
e.Encode(kv.lastApplied)
e.Encode(kv.database)
e.Encode(kv.lastOpId)
e.Encode(kv.configs)
e.Encode(kv.waitClean)
e.Encode(kv.waitMigration)
snapshot := w.Bytes()
kv.rf.SaveSnapshot(kv.lastApplied, snapshot)
}
}
func (kv *ShardKV) stateMachine() {
for {
select {
case <-kv.ctx.Done():
DPrintf("ShardKV %d (gid=%d)stateMachine closed\n", kv.me, kv.gid)
return
case applyMsg := <-kv.applyCh:
if applyMsg.CommandValid {
kv.mu.Lock()
if kv.lastApplied+1 < applyMsg.CommandIndex {
kv.mu.Unlock()
kv.rf.Replay()
} else {
if kv.lastApplied+1 == applyMsg.CommandIndex {
kv.lastApplied++
//DPrintf("ShardKV(gid=%d) %d stateMachine received command %v %+v\n", kv.me, kv.gid, reflect.TypeOf(applyMsg.Command), applyMsg)
switch command := applyMsg.Command.(type) {
case OpArgs:
op := command
result := OpResult{ClientId: op.ClientId, OpId: op.OpId}
switch op.OpType {
case "Get":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
shard := kv.database.GetShard(shardNum)
if value, ok := shard.Get(op.Key); ok {
result.Result = OK
result.Value = value
} else {
result.Result = ErrNoKey
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV(gid=%d) %d shardNum %d database= %s get key:%v result: %s\n", kv.gid, kv.me, shardNum, str, op.Key, result.Result)
kv.saveShardKVState(false)
}
go kv.applyWait.Trigger(result)
case "Put":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Put(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
case "Append":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId |
}
go kv.applyWait.Trigger(result)
default:
DPrintf("ShardKV %d (gid=%d) stateMachine received wrong opType OpArgs: %+v\n", kv.me, kv.gid, command)
}
case shardmaster.Config:
newConfig := command
oldConfig := kv.configs[len(kv.configs)-1]
if newConfig.Num > oldConfig.Num && kv.waitMigration.IsEmpty() && kv.waitClean.IsEmpty() {
kv.configs = append(kv.configs, newConfig)
if oldConfig.Num > 0 {
kv.waitMigration.Init(shardmaster.Config{
Num: newConfig.Num,
Shards: oldConfig.Shards,
Groups: oldConfig.Groups,
})
kv.waitClean.Init(newConfig)
for shardNum := 0; shardNum < shardmaster.NShards; shardNum++ {
oldGid := oldConfig.Shards[shardNum]
newGid := newConfig.Shards[shardNum]
if kv.gid == oldGid && kv.gid != newGid {
// old shard remove from this group
kv.waitClean.AddGidShard(newGid, shardNum)
}
if kv.gid != oldGid && kv.gid == newGid {
// new shard assign to this group
kv.waitMigration.AddGidShard(oldGid, shardNum)
}
}
// remove shard from kv.Database and store in waitClean
kv.waitClean.StoreCleanData(kv.database)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
}
DPrintf("ShardKV %d (gid=%d) is applying \nold Config = %+v \nnew Config = %+v\nkv.waitMigration:%+v\nkv.waitClean:%+v\n", kv.me,
kv.gid, oldConfig, newConfig, kv.waitMigration, kv.waitClean)
kv.configs = kv.configs[1:]
kv.saveShardKVState(true)
}
case MigrateShardReply:
if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == OK {
if ok := kv.waitMigration.DeleteByGid(command.Gid); ok {
for shardNum, shard := range command.Data {
kv.database.SetShard(shardNum, shard)
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV %d (gid=%d) finished MigrateShard from (gid=%d) command= %+v database= %s", kv.me, kv.gid, command.Gid, command, str)
}
} else if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitMigrationTimeOut {
if !kv.waitMigration.IsEmpty() {
kv.waitMigration.Clear()
DPrintf("ShardKV %d (gid=%d) waitMigration timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
case CheckMigrateShardReply:
if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == OK {
kv.waitClean.DeleteByGid(command.Gid)
} else if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitCleanTimeOut {
if !kv.waitClean.IsEmpty() {
kv.waitClean.Clear()
DPrintf("ShardKV %d (gid=%d) waitClean timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
default:
DPrintf("ShardKV %d stateMachine received wrong type command %+v %v\n", kv.gid, applyMsg, reflect.TypeOf(applyMsg.Command))
}
}
kv.mu.Unlock()
}
} else if command, ok := applyMsg.Command.(string); ok {
if command == raft.CommandInstallSnapshot {
DPrintf("ShardMaster %d stateMachine received InstallSnapshot %+v\n", kv.me, applyMsg)
kv.init()
kv.rf.Replay()
}
}
}
}
}
| {
shard := kv.database.GetShard(shardNum)
shard.Append(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
} | conditional_block |
server.go | package shardkv
import (
"bytes"
"context"
"encoding/json"
"fmt"
"labgob"
"labrpc"
"raft"
"reflect"
"shardmaster"
"sync"
"time"
)
func init() {
labgob.Register(OpArgs{})
labgob.Register(GetArgs{})
labgob.Register(PutAppendArgs{})
labgob.Register(shardmaster.Config{})
labgob.Register(CheckMigrateShardReply{})
labgob.Register(MigrateShardReply{})
}
type ShardKV struct {
mu sync.RWMutex
me int
rf *raft.Raft
applyCh chan raft.ApplyMsg
make_end func(string) *labrpc.ClientEnd
gid int
masters *shardmaster.Clerk
maxraftstate int // snapshot if log grows this big
persister *raft.Persister
ctx context.Context
close context.CancelFunc
lastApplied int
configs []shardmaster.Config
database *ShardDatabase
applyWait *Wait
lastOpId map[int64]int64 // store client last op id
waitMigration *WaitMigration // store migration shard
waitClean *WaitClean // store clean shard
}
//
// the tester calls Kill() when a ShardKV instance won't
// be needed again. you are not required to do anything
// in Kill(), but it might be convenient to (for example)
// turn off debug output from this instance.
//
func (kv *ShardKV) Kill() {
kv.rf.Kill()
kv.close()
// Your code here, if desired.
}
//
// servers[] contains the ports of the servers in this group.
//
// me is the index of the current server in servers[].
//
// the k/v server should store snapshots through the underlying Raft
// implementation, which should call persister.SaveStateAndSnapshot() to
// atomically save the Raft state along with the snapshot.
//
// the k/v server should snapshot when Raft's saved state exceeds
// maxraftstate bytes, in order to allow Raft to garbage-collect its
// log. if maxraftstate is -1, you don't need to snapshot.
//
// gid is this group's GID, for interacting with the shardmaster.
//
// pass masters[] to shardmaster.MakeClerk() so you can send
// RPCs to the shardmaster.
//
// make_end(servername) turns a server name from a
// Config.Groups[gid][i] into a labrpc.ClientEnd on which you can
// send RPCs. You'll need this to send RPCs to other groups.
//
// look at client.go for examples of how to use masters[]
// and make_end() to send RPCs to the group owning a specific shard.
//
// StartServer() must return quickly, so it should start goroutines
// for any long-running work.
//
func StartServer(servers []*labrpc.ClientEnd, me int, persister *raft.Persister, maxraftstate int, gid int, masters []*labrpc.ClientEnd, make_end func(string) *labrpc.ClientEnd) *ShardKV {
// call labgob.Register on structures you want
// Go's RPC library to marshall/unmarshall.
kv := new(ShardKV)
kv.me = me
kv.maxraftstate = maxraftstate
kv.make_end = make_end
kv.gid = gid
kv.masters = shardmaster.MakeClerk(masters)
kv.applyCh = make(chan raft.ApplyMsg)
kv.applyWait = NewWait()
kv.persister = persister
ctx, cancel := context.WithCancel(context.Background())
kv.ctx = ctx
kv.close = cancel
kv.init()
kv.rf = raft.Make(servers, me, persister, kv.applyCh)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
go kv.newConfigLearner()
go kv.stateMachine()
return kv
}
func (kv *ShardKV) start(args interface{}) (result string, value string) {
var op OpArgs
if getArgs, ok := args.(GetArgs); ok {
op = OpArgs{
ConfigNum: getArgs.ConfigNum,
ClientId: getArgs.ClientId,
OpId: getArgs.OpId,
Key: getArgs.Key,
Value: "",
OpType: "Get",
}
} else if putAppendArgs, ok := args.(PutAppendArgs); ok {
op = OpArgs{
ConfigNum: putAppendArgs.ConfigNum,
ClientId: putAppendArgs.ClientId,
OpId: putAppendArgs.OpId,
Key: putAppendArgs.Key,
Value: putAppendArgs.Value,
OpType: putAppendArgs.Op,
}
} else {
return fmt.Sprintf("ErrArgsType:%+v", args), ""
}
resultCh := kv.applyWait.Register(op)
defer kv.applyWait.Unregister(op)
_, _, isLeader := kv.rf.Start(op)
if !isLeader {
return ErrWrongLeader, ""
}
t := time.NewTimer(OpTimeout)
select {
case <-kv.ctx.Done():
return ErrShardKVClosed, ""
case <-t.C:
return ErrOpTimeout, ""
case opResult := <-resultCh:
//DPrintf("ShardKV %d return client %d by resultCh result = %+v\n", kv.me, op.ClientId, opResult)
return opResult.Result, opResult.Value
}
}
func (kv *ShardKV) newConfigLearner() {
t := time.NewTicker(100 * time.Millisecond)
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if !kv.waitClean.IsEmpty() || !kv.waitMigration.IsEmpty() {
// is applying new Config
kv.mu.RUnlock()
continue
}
latest := kv.configs[len(kv.configs)-1]
kv.mu.RUnlock()
config := kv.masters.Query(latest.Num + 1)
if latest.Num < config.Num {
kv.rf.Start(config)
}
}
}
}
func (kv *ShardKV) migrationChecker() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitClean.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitClean.GetGidShard()
config := kv.waitClean.GetConfig()
if time.Now().Sub(start) > WaitCleanTimeOut {
kv.rf.Start(CheckMigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitCleanTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := CheckMigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args CheckMigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply CheckMigrateShardReply
ok := srv.Call("ShardKV.CheckMigrateShard", &args, &reply)
/*if ok {
DPrintf("ShardKV %d (gid = %d) CheckMigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
}*/
if ok && reply.Result == OK {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) migrationHelper() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitMigration.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitMigration.GetGidShards()
config := kv.waitMigration.GetConfig()
if time.Now().Sub(start) > WaitMigrationTimeOut {
kv.rf.Start(MigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitMigrationTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := MigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args MigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply MigrateShardReply
ok := srv.Call("ShardKV.MigrateShard", &args, &reply)
DPrintf("ShardKV %d (gid = %d) MigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
if ok && (reply.Result == OK || reply.Result == ErrShardHasBeenCleaned) {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) init() {
kv.mu.Lock()
defer kv.mu.Unlock()
data := kv.persister.ReadSnapshot()
if len(data) == 0 {
kv.lastApplied = 0
kv.database = NewShardDatabase()
kv.lastOpId = make(map[int64]int64)
kv.configs = make([]shardmaster.Config, 1)
kv.configs[0].Groups = map[int][]string{}
kv.waitClean = NewWaitClean()
kv.waitMigration = NewWaitMigration()
} else {
r := bytes.NewBuffer(data)
d := labgob.NewDecoder(r)
kv.lastApplied = 0
kv.database = nil
kv.lastOpId = nil
kv.configs = nil
kv.waitMigration = nil
kv.waitClean = nil
d.Decode(&kv.lastApplied)
d.Decode(&kv.database)
d.Decode(&kv.lastOpId)
d.Decode(&kv.configs)
d.Decode(&kv.waitClean)
d.Decode(&kv.waitMigration)
}
}
func (kv *ShardKV) saveShardKVState(force bool) {
shouldSave := kv.maxraftstate != -1 && (force || kv.persister.RaftStateSize() > kv.maxraftstate)
if shouldSave {
w := new(bytes.Buffer)
e := labgob.NewEncoder(w)
e.Encode(kv.lastApplied)
e.Encode(kv.database)
e.Encode(kv.lastOpId)
e.Encode(kv.configs)
e.Encode(kv.waitClean)
e.Encode(kv.waitMigration)
snapshot := w.Bytes()
kv.rf.SaveSnapshot(kv.lastApplied, snapshot)
}
}
func (kv *ShardKV) stateMachine() {
for {
select {
case <-kv.ctx.Done():
DPrintf("ShardKV %d (gid=%d)stateMachine closed\n", kv.me, kv.gid)
return
case applyMsg := <-kv.applyCh:
if applyMsg.CommandValid {
kv.mu.Lock()
if kv.lastApplied+1 < applyMsg.CommandIndex {
kv.mu.Unlock()
kv.rf.Replay()
} else {
if kv.lastApplied+1 == applyMsg.CommandIndex {
kv.lastApplied++
//DPrintf("ShardKV(gid=%d) %d stateMachine received command %v %+v\n", kv.me, kv.gid, reflect.TypeOf(applyMsg.Command), applyMsg)
switch command := applyMsg.Command.(type) {
case OpArgs:
op := command
result := OpResult{ClientId: op.ClientId, OpId: op.OpId}
switch op.OpType {
case "Get":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
shard := kv.database.GetShard(shardNum)
if value, ok := shard.Get(op.Key); ok {
result.Result = OK
result.Value = value
} else {
result.Result = ErrNoKey
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV(gid=%d) %d shardNum %d database= %s get key:%v result: %s\n", kv.gid, kv.me, shardNum, str, op.Key, result.Result)
kv.saveShardKVState(false)
}
go kv.applyWait.Trigger(result)
case "Put":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Put(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
case "Append":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Append(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
default:
DPrintf("ShardKV %d (gid=%d) stateMachine received wrong opType OpArgs: %+v\n", kv.me, kv.gid, command)
}
case shardmaster.Config:
newConfig := command
oldConfig := kv.configs[len(kv.configs)-1]
if newConfig.Num > oldConfig.Num && kv.waitMigration.IsEmpty() && kv.waitClean.IsEmpty() {
kv.configs = append(kv.configs, newConfig)
if oldConfig.Num > 0 {
kv.waitMigration.Init(shardmaster.Config{
Num: newConfig.Num,
Shards: oldConfig.Shards,
Groups: oldConfig.Groups,
})
kv.waitClean.Init(newConfig)
for shardNum := 0; shardNum < shardmaster.NShards; shardNum++ {
oldGid := oldConfig.Shards[shardNum]
newGid := newConfig.Shards[shardNum]
if kv.gid == oldGid && kv.gid != newGid {
// old shard remove from this group
kv.waitClean.AddGidShard(newGid, shardNum)
}
if kv.gid != oldGid && kv.gid == newGid {
// new shard assign to this group
kv.waitMigration.AddGidShard(oldGid, shardNum)
} | }
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
}
DPrintf("ShardKV %d (gid=%d) is applying \nold Config = %+v \nnew Config = %+v\nkv.waitMigration:%+v\nkv.waitClean:%+v\n", kv.me,
kv.gid, oldConfig, newConfig, kv.waitMigration, kv.waitClean)
kv.configs = kv.configs[1:]
kv.saveShardKVState(true)
}
case MigrateShardReply:
if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == OK {
if ok := kv.waitMigration.DeleteByGid(command.Gid); ok {
for shardNum, shard := range command.Data {
kv.database.SetShard(shardNum, shard)
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV %d (gid=%d) finished MigrateShard from (gid=%d) command= %+v database= %s", kv.me, kv.gid, command.Gid, command, str)
}
} else if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitMigrationTimeOut {
if !kv.waitMigration.IsEmpty() {
kv.waitMigration.Clear()
DPrintf("ShardKV %d (gid=%d) waitMigration timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
case CheckMigrateShardReply:
if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == OK {
kv.waitClean.DeleteByGid(command.Gid)
} else if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitCleanTimeOut {
if !kv.waitClean.IsEmpty() {
kv.waitClean.Clear()
DPrintf("ShardKV %d (gid=%d) waitClean timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
default:
DPrintf("ShardKV %d stateMachine received wrong type command %+v %v\n", kv.gid, applyMsg, reflect.TypeOf(applyMsg.Command))
}
}
kv.mu.Unlock()
}
} else if command, ok := applyMsg.Command.(string); ok {
if command == raft.CommandInstallSnapshot {
DPrintf("ShardMaster %d stateMachine received InstallSnapshot %+v\n", kv.me, applyMsg)
kv.init()
kv.rf.Replay()
}
}
}
}
} | }
// remove shard from kv.Database and store in waitClean
kv.waitClean.StoreCleanData(kv.database)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper() | random_line_split |
server.go | package shardkv
import (
"bytes"
"context"
"encoding/json"
"fmt"
"labgob"
"labrpc"
"raft"
"reflect"
"shardmaster"
"sync"
"time"
)
func init() {
labgob.Register(OpArgs{})
labgob.Register(GetArgs{})
labgob.Register(PutAppendArgs{})
labgob.Register(shardmaster.Config{})
labgob.Register(CheckMigrateShardReply{})
labgob.Register(MigrateShardReply{})
}
type ShardKV struct {
mu sync.RWMutex
me int
rf *raft.Raft
applyCh chan raft.ApplyMsg
make_end func(string) *labrpc.ClientEnd
gid int
masters *shardmaster.Clerk
maxraftstate int // snapshot if log grows this big
persister *raft.Persister
ctx context.Context
close context.CancelFunc
lastApplied int
configs []shardmaster.Config
database *ShardDatabase
applyWait *Wait
lastOpId map[int64]int64 // store client last op id
waitMigration *WaitMigration // store migration shard
waitClean *WaitClean // store clean shard
}
//
// the tester calls Kill() when a ShardKV instance won't
// be needed again. you are not required to do anything
// in Kill(), but it might be convenient to (for example)
// turn off debug output from this instance.
//
func (kv *ShardKV) Kill() {
kv.rf.Kill()
kv.close()
// Your code here, if desired.
}
//
// servers[] contains the ports of the servers in this group.
//
// me is the index of the current server in servers[].
//
// the k/v server should store snapshots through the underlying Raft
// implementation, which should call persister.SaveStateAndSnapshot() to
// atomically save the Raft state along with the snapshot.
//
// the k/v server should snapshot when Raft's saved state exceeds
// maxraftstate bytes, in order to allow Raft to garbage-collect its
// log. if maxraftstate is -1, you don't need to snapshot.
//
// gid is this group's GID, for interacting with the shardmaster.
//
// pass masters[] to shardmaster.MakeClerk() so you can send
// RPCs to the shardmaster.
//
// make_end(servername) turns a server name from a
// Config.Groups[gid][i] into a labrpc.ClientEnd on which you can
// send RPCs. You'll need this to send RPCs to other groups.
//
// look at client.go for examples of how to use masters[]
// and make_end() to send RPCs to the group owning a specific shard.
//
// StartServer() must return quickly, so it should start goroutines
// for any long-running work.
//
func StartServer(servers []*labrpc.ClientEnd, me int, persister *raft.Persister, maxraftstate int, gid int, masters []*labrpc.ClientEnd, make_end func(string) *labrpc.ClientEnd) *ShardKV {
// call labgob.Register on structures you want
// Go's RPC library to marshall/unmarshall.
kv := new(ShardKV)
kv.me = me
kv.maxraftstate = maxraftstate
kv.make_end = make_end
kv.gid = gid
kv.masters = shardmaster.MakeClerk(masters)
kv.applyCh = make(chan raft.ApplyMsg)
kv.applyWait = NewWait()
kv.persister = persister
ctx, cancel := context.WithCancel(context.Background())
kv.ctx = ctx
kv.close = cancel
kv.init()
kv.rf = raft.Make(servers, me, persister, kv.applyCh)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
go kv.newConfigLearner()
go kv.stateMachine()
return kv
}
func (kv *ShardKV) start(args interface{}) (result string, value string) {
var op OpArgs
if getArgs, ok := args.(GetArgs); ok {
op = OpArgs{
ConfigNum: getArgs.ConfigNum,
ClientId: getArgs.ClientId,
OpId: getArgs.OpId,
Key: getArgs.Key,
Value: "",
OpType: "Get",
}
} else if putAppendArgs, ok := args.(PutAppendArgs); ok {
op = OpArgs{
ConfigNum: putAppendArgs.ConfigNum,
ClientId: putAppendArgs.ClientId,
OpId: putAppendArgs.OpId,
Key: putAppendArgs.Key,
Value: putAppendArgs.Value,
OpType: putAppendArgs.Op,
}
} else {
return fmt.Sprintf("ErrArgsType:%+v", args), ""
}
resultCh := kv.applyWait.Register(op)
defer kv.applyWait.Unregister(op)
_, _, isLeader := kv.rf.Start(op)
if !isLeader {
return ErrWrongLeader, ""
}
t := time.NewTimer(OpTimeout)
select {
case <-kv.ctx.Done():
return ErrShardKVClosed, ""
case <-t.C:
return ErrOpTimeout, ""
case opResult := <-resultCh:
//DPrintf("ShardKV %d return client %d by resultCh result = %+v\n", kv.me, op.ClientId, opResult)
return opResult.Result, opResult.Value
}
}
func (kv *ShardKV) newConfigLearner() {
t := time.NewTicker(100 * time.Millisecond)
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if !kv.waitClean.IsEmpty() || !kv.waitMigration.IsEmpty() {
// is applying new Config
kv.mu.RUnlock()
continue
}
latest := kv.configs[len(kv.configs)-1]
kv.mu.RUnlock()
config := kv.masters.Query(latest.Num + 1)
if latest.Num < config.Num {
kv.rf.Start(config)
}
}
}
}
func (kv *ShardKV) migrationChecker() |
func (kv *ShardKV) migrationHelper() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitMigration.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitMigration.GetGidShards()
config := kv.waitMigration.GetConfig()
if time.Now().Sub(start) > WaitMigrationTimeOut {
kv.rf.Start(MigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitMigrationTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := MigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args MigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply MigrateShardReply
ok := srv.Call("ShardKV.MigrateShard", &args, &reply)
DPrintf("ShardKV %d (gid = %d) MigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
if ok && (reply.Result == OK || reply.Result == ErrShardHasBeenCleaned) {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) init() {
kv.mu.Lock()
defer kv.mu.Unlock()
data := kv.persister.ReadSnapshot()
if len(data) == 0 {
kv.lastApplied = 0
kv.database = NewShardDatabase()
kv.lastOpId = make(map[int64]int64)
kv.configs = make([]shardmaster.Config, 1)
kv.configs[0].Groups = map[int][]string{}
kv.waitClean = NewWaitClean()
kv.waitMigration = NewWaitMigration()
} else {
r := bytes.NewBuffer(data)
d := labgob.NewDecoder(r)
kv.lastApplied = 0
kv.database = nil
kv.lastOpId = nil
kv.configs = nil
kv.waitMigration = nil
kv.waitClean = nil
d.Decode(&kv.lastApplied)
d.Decode(&kv.database)
d.Decode(&kv.lastOpId)
d.Decode(&kv.configs)
d.Decode(&kv.waitClean)
d.Decode(&kv.waitMigration)
}
}
func (kv *ShardKV) saveShardKVState(force bool) {
shouldSave := kv.maxraftstate != -1 && (force || kv.persister.RaftStateSize() > kv.maxraftstate)
if shouldSave {
w := new(bytes.Buffer)
e := labgob.NewEncoder(w)
e.Encode(kv.lastApplied)
e.Encode(kv.database)
e.Encode(kv.lastOpId)
e.Encode(kv.configs)
e.Encode(kv.waitClean)
e.Encode(kv.waitMigration)
snapshot := w.Bytes()
kv.rf.SaveSnapshot(kv.lastApplied, snapshot)
}
}
func (kv *ShardKV) stateMachine() {
for {
select {
case <-kv.ctx.Done():
DPrintf("ShardKV %d (gid=%d)stateMachine closed\n", kv.me, kv.gid)
return
case applyMsg := <-kv.applyCh:
if applyMsg.CommandValid {
kv.mu.Lock()
if kv.lastApplied+1 < applyMsg.CommandIndex {
kv.mu.Unlock()
kv.rf.Replay()
} else {
if kv.lastApplied+1 == applyMsg.CommandIndex {
kv.lastApplied++
//DPrintf("ShardKV(gid=%d) %d stateMachine received command %v %+v\n", kv.me, kv.gid, reflect.TypeOf(applyMsg.Command), applyMsg)
switch command := applyMsg.Command.(type) {
case OpArgs:
op := command
result := OpResult{ClientId: op.ClientId, OpId: op.OpId}
switch op.OpType {
case "Get":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
shard := kv.database.GetShard(shardNum)
if value, ok := shard.Get(op.Key); ok {
result.Result = OK
result.Value = value
} else {
result.Result = ErrNoKey
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV(gid=%d) %d shardNum %d database= %s get key:%v result: %s\n", kv.gid, kv.me, shardNum, str, op.Key, result.Result)
kv.saveShardKVState(false)
}
go kv.applyWait.Trigger(result)
case "Put":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Put(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
case "Append":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Append(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
default:
DPrintf("ShardKV %d (gid=%d) stateMachine received wrong opType OpArgs: %+v\n", kv.me, kv.gid, command)
}
case shardmaster.Config:
newConfig := command
oldConfig := kv.configs[len(kv.configs)-1]
if newConfig.Num > oldConfig.Num && kv.waitMigration.IsEmpty() && kv.waitClean.IsEmpty() {
kv.configs = append(kv.configs, newConfig)
if oldConfig.Num > 0 {
kv.waitMigration.Init(shardmaster.Config{
Num: newConfig.Num,
Shards: oldConfig.Shards,
Groups: oldConfig.Groups,
})
kv.waitClean.Init(newConfig)
for shardNum := 0; shardNum < shardmaster.NShards; shardNum++ {
oldGid := oldConfig.Shards[shardNum]
newGid := newConfig.Shards[shardNum]
if kv.gid == oldGid && kv.gid != newGid {
// old shard remove from this group
kv.waitClean.AddGidShard(newGid, shardNum)
}
if kv.gid != oldGid && kv.gid == newGid {
// new shard assign to this group
kv.waitMigration.AddGidShard(oldGid, shardNum)
}
}
// remove shard from kv.Database and store in waitClean
kv.waitClean.StoreCleanData(kv.database)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
}
DPrintf("ShardKV %d (gid=%d) is applying \nold Config = %+v \nnew Config = %+v\nkv.waitMigration:%+v\nkv.waitClean:%+v\n", kv.me,
kv.gid, oldConfig, newConfig, kv.waitMigration, kv.waitClean)
kv.configs = kv.configs[1:]
kv.saveShardKVState(true)
}
case MigrateShardReply:
if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == OK {
if ok := kv.waitMigration.DeleteByGid(command.Gid); ok {
for shardNum, shard := range command.Data {
kv.database.SetShard(shardNum, shard)
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV %d (gid=%d) finished MigrateShard from (gid=%d) command= %+v database= %s", kv.me, kv.gid, command.Gid, command, str)
}
} else if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitMigrationTimeOut {
if !kv.waitMigration.IsEmpty() {
kv.waitMigration.Clear()
DPrintf("ShardKV %d (gid=%d) waitMigration timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
case CheckMigrateShardReply:
if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == OK {
kv.waitClean.DeleteByGid(command.Gid)
} else if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitCleanTimeOut {
if !kv.waitClean.IsEmpty() {
kv.waitClean.Clear()
DPrintf("ShardKV %d (gid=%d) waitClean timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
default:
DPrintf("ShardKV %d stateMachine received wrong type command %+v %v\n", kv.gid, applyMsg, reflect.TypeOf(applyMsg.Command))
}
}
kv.mu.Unlock()
}
} else if command, ok := applyMsg.Command.(string); ok {
if command == raft.CommandInstallSnapshot {
DPrintf("ShardMaster %d stateMachine received InstallSnapshot %+v\n", kv.me, applyMsg)
kv.init()
kv.rf.Replay()
}
}
}
}
}
| {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitClean.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitClean.GetGidShard()
config := kv.waitClean.GetConfig()
if time.Now().Sub(start) > WaitCleanTimeOut {
kv.rf.Start(CheckMigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitCleanTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := CheckMigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args CheckMigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply CheckMigrateShardReply
ok := srv.Call("ShardKV.CheckMigrateShard", &args, &reply)
/*if ok {
DPrintf("ShardKV %d (gid = %d) CheckMigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
}*/
if ok && reply.Result == OK {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
} | identifier_body |
server.go | package shardkv
import (
"bytes"
"context"
"encoding/json"
"fmt"
"labgob"
"labrpc"
"raft"
"reflect"
"shardmaster"
"sync"
"time"
)
func init() {
labgob.Register(OpArgs{})
labgob.Register(GetArgs{})
labgob.Register(PutAppendArgs{})
labgob.Register(shardmaster.Config{})
labgob.Register(CheckMigrateShardReply{})
labgob.Register(MigrateShardReply{})
}
type ShardKV struct {
mu sync.RWMutex
me int
rf *raft.Raft
applyCh chan raft.ApplyMsg
make_end func(string) *labrpc.ClientEnd
gid int
masters *shardmaster.Clerk
maxraftstate int // snapshot if log grows this big
persister *raft.Persister
ctx context.Context
close context.CancelFunc
lastApplied int
configs []shardmaster.Config
database *ShardDatabase
applyWait *Wait
lastOpId map[int64]int64 // store client last op id
waitMigration *WaitMigration // store migration shard
waitClean *WaitClean // store clean shard
}
//
// the tester calls Kill() when a ShardKV instance won't
// be needed again. you are not required to do anything
// in Kill(), but it might be convenient to (for example)
// turn off debug output from this instance.
//
func (kv *ShardKV) Kill() {
kv.rf.Kill()
kv.close()
// Your code here, if desired.
}
//
// servers[] contains the ports of the servers in this group.
//
// me is the index of the current server in servers[].
//
// the k/v server should store snapshots through the underlying Raft
// implementation, which should call persister.SaveStateAndSnapshot() to
// atomically save the Raft state along with the snapshot.
//
// the k/v server should snapshot when Raft's saved state exceeds
// maxraftstate bytes, in order to allow Raft to garbage-collect its
// log. if maxraftstate is -1, you don't need to snapshot.
//
// gid is this group's GID, for interacting with the shardmaster.
//
// pass masters[] to shardmaster.MakeClerk() so you can send
// RPCs to the shardmaster.
//
// make_end(servername) turns a server name from a
// Config.Groups[gid][i] into a labrpc.ClientEnd on which you can
// send RPCs. You'll need this to send RPCs to other groups.
//
// look at client.go for examples of how to use masters[]
// and make_end() to send RPCs to the group owning a specific shard.
//
// StartServer() must return quickly, so it should start goroutines
// for any long-running work.
//
func StartServer(servers []*labrpc.ClientEnd, me int, persister *raft.Persister, maxraftstate int, gid int, masters []*labrpc.ClientEnd, make_end func(string) *labrpc.ClientEnd) *ShardKV {
// call labgob.Register on structures you want
// Go's RPC library to marshall/unmarshall.
kv := new(ShardKV)
kv.me = me
kv.maxraftstate = maxraftstate
kv.make_end = make_end
kv.gid = gid
kv.masters = shardmaster.MakeClerk(masters)
kv.applyCh = make(chan raft.ApplyMsg)
kv.applyWait = NewWait()
kv.persister = persister
ctx, cancel := context.WithCancel(context.Background())
kv.ctx = ctx
kv.close = cancel
kv.init()
kv.rf = raft.Make(servers, me, persister, kv.applyCh)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
go kv.newConfigLearner()
go kv.stateMachine()
return kv
}
func (kv *ShardKV) start(args interface{}) (result string, value string) {
var op OpArgs
if getArgs, ok := args.(GetArgs); ok {
op = OpArgs{
ConfigNum: getArgs.ConfigNum,
ClientId: getArgs.ClientId,
OpId: getArgs.OpId,
Key: getArgs.Key,
Value: "",
OpType: "Get",
}
} else if putAppendArgs, ok := args.(PutAppendArgs); ok {
op = OpArgs{
ConfigNum: putAppendArgs.ConfigNum,
ClientId: putAppendArgs.ClientId,
OpId: putAppendArgs.OpId,
Key: putAppendArgs.Key,
Value: putAppendArgs.Value,
OpType: putAppendArgs.Op,
}
} else {
return fmt.Sprintf("ErrArgsType:%+v", args), ""
}
resultCh := kv.applyWait.Register(op)
defer kv.applyWait.Unregister(op)
_, _, isLeader := kv.rf.Start(op)
if !isLeader {
return ErrWrongLeader, ""
}
t := time.NewTimer(OpTimeout)
select {
case <-kv.ctx.Done():
return ErrShardKVClosed, ""
case <-t.C:
return ErrOpTimeout, ""
case opResult := <-resultCh:
//DPrintf("ShardKV %d return client %d by resultCh result = %+v\n", kv.me, op.ClientId, opResult)
return opResult.Result, opResult.Value
}
}
func (kv *ShardKV) newConfigLearner() {
t := time.NewTicker(100 * time.Millisecond)
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if !kv.waitClean.IsEmpty() || !kv.waitMigration.IsEmpty() {
// is applying new Config
kv.mu.RUnlock()
continue
}
latest := kv.configs[len(kv.configs)-1]
kv.mu.RUnlock()
config := kv.masters.Query(latest.Num + 1)
if latest.Num < config.Num {
kv.rf.Start(config)
}
}
}
}
func (kv *ShardKV) migrationChecker() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitClean.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitClean.GetGidShard()
config := kv.waitClean.GetConfig()
if time.Now().Sub(start) > WaitCleanTimeOut {
kv.rf.Start(CheckMigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitCleanTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := CheckMigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args CheckMigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply CheckMigrateShardReply
ok := srv.Call("ShardKV.CheckMigrateShard", &args, &reply)
/*if ok {
DPrintf("ShardKV %d (gid = %d) CheckMigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
}*/
if ok && reply.Result == OK {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) migrationHelper() {
t := time.NewTicker(100 * time.Millisecond)
start := time.Now()
for {
select {
case <-kv.ctx.Done():
return
case <-t.C:
kv.mu.RLock()
if kv.waitMigration.IsEmpty() {
kv.mu.RUnlock()
return
}
gidShards := kv.waitMigration.GetGidShards()
config := kv.waitMigration.GetConfig()
if time.Now().Sub(start) > WaitMigrationTimeOut {
kv.rf.Start(MigrateShardReply{
ConfigNum: config.Num,
Gid: -1,
Result: ErrWaitMigrationTimeOut,
})
} else {
for gid, shardNums := range gidShards {
servers := config.Groups[gid]
args := MigrateShardArgs{
config.Num,
gid,
shardNums,
}
go func(servers []string, args MigrateShardArgs) {
for si := 0; si < len(servers); si++ {
srv := kv.make_end(servers[si])
var reply MigrateShardReply
ok := srv.Call("ShardKV.MigrateShard", &args, &reply)
DPrintf("ShardKV %d (gid = %d) MigrateShard ok = %v args = %+v reply = %+v\n", kv.me, kv.gid, ok, args, reply)
if ok && (reply.Result == OK || reply.Result == ErrShardHasBeenCleaned) {
kv.rf.Start(reply)
return
}
}
}(servers, args)
}
}
kv.mu.RUnlock()
}
}
}
func (kv *ShardKV) init() {
kv.mu.Lock()
defer kv.mu.Unlock()
data := kv.persister.ReadSnapshot()
if len(data) == 0 {
kv.lastApplied = 0
kv.database = NewShardDatabase()
kv.lastOpId = make(map[int64]int64)
kv.configs = make([]shardmaster.Config, 1)
kv.configs[0].Groups = map[int][]string{}
kv.waitClean = NewWaitClean()
kv.waitMigration = NewWaitMigration()
} else {
r := bytes.NewBuffer(data)
d := labgob.NewDecoder(r)
kv.lastApplied = 0
kv.database = nil
kv.lastOpId = nil
kv.configs = nil
kv.waitMigration = nil
kv.waitClean = nil
d.Decode(&kv.lastApplied)
d.Decode(&kv.database)
d.Decode(&kv.lastOpId)
d.Decode(&kv.configs)
d.Decode(&kv.waitClean)
d.Decode(&kv.waitMigration)
}
}
func (kv *ShardKV) | (force bool) {
shouldSave := kv.maxraftstate != -1 && (force || kv.persister.RaftStateSize() > kv.maxraftstate)
if shouldSave {
w := new(bytes.Buffer)
e := labgob.NewEncoder(w)
e.Encode(kv.lastApplied)
e.Encode(kv.database)
e.Encode(kv.lastOpId)
e.Encode(kv.configs)
e.Encode(kv.waitClean)
e.Encode(kv.waitMigration)
snapshot := w.Bytes()
kv.rf.SaveSnapshot(kv.lastApplied, snapshot)
}
}
func (kv *ShardKV) stateMachine() {
for {
select {
case <-kv.ctx.Done():
DPrintf("ShardKV %d (gid=%d)stateMachine closed\n", kv.me, kv.gid)
return
case applyMsg := <-kv.applyCh:
if applyMsg.CommandValid {
kv.mu.Lock()
if kv.lastApplied+1 < applyMsg.CommandIndex {
kv.mu.Unlock()
kv.rf.Replay()
} else {
if kv.lastApplied+1 == applyMsg.CommandIndex {
kv.lastApplied++
//DPrintf("ShardKV(gid=%d) %d stateMachine received command %v %+v\n", kv.me, kv.gid, reflect.TypeOf(applyMsg.Command), applyMsg)
switch command := applyMsg.Command.(type) {
case OpArgs:
op := command
result := OpResult{ClientId: op.ClientId, OpId: op.OpId}
switch op.OpType {
case "Get":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
shard := kv.database.GetShard(shardNum)
if value, ok := shard.Get(op.Key); ok {
result.Result = OK
result.Value = value
} else {
result.Result = ErrNoKey
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV(gid=%d) %d shardNum %d database= %s get key:%v result: %s\n", kv.gid, kv.me, shardNum, str, op.Key, result.Result)
kv.saveShardKVState(false)
}
go kv.applyWait.Trigger(result)
case "Put":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case op.ConfigNum != latest.Num || latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Put(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
case "Append":
shardNum := key2shard(op.Key)
latest := kv.configs[len(kv.configs)-1]
switch {
case latest.Shards[shardNum] != kv.gid:
result.Result = ErrWrongGroup
case kv.waitMigration.IsMigrationShard(shardNum):
result.Result = ErrShardIsMigrating
default:
result.Result = OK
if lastOpId, ok := kv.lastOpId[op.ClientId]; !ok || op.OpId > lastOpId {
shard := kv.database.GetShard(shardNum)
shard.Append(op.Key, op.Value, op.ClientId, op.OpId)
kv.lastOpId[op.ClientId] = op.OpId
kv.saveShardKVState(false)
}
}
go kv.applyWait.Trigger(result)
default:
DPrintf("ShardKV %d (gid=%d) stateMachine received wrong opType OpArgs: %+v\n", kv.me, kv.gid, command)
}
case shardmaster.Config:
newConfig := command
oldConfig := kv.configs[len(kv.configs)-1]
if newConfig.Num > oldConfig.Num && kv.waitMigration.IsEmpty() && kv.waitClean.IsEmpty() {
kv.configs = append(kv.configs, newConfig)
if oldConfig.Num > 0 {
kv.waitMigration.Init(shardmaster.Config{
Num: newConfig.Num,
Shards: oldConfig.Shards,
Groups: oldConfig.Groups,
})
kv.waitClean.Init(newConfig)
for shardNum := 0; shardNum < shardmaster.NShards; shardNum++ {
oldGid := oldConfig.Shards[shardNum]
newGid := newConfig.Shards[shardNum]
if kv.gid == oldGid && kv.gid != newGid {
// old shard remove from this group
kv.waitClean.AddGidShard(newGid, shardNum)
}
if kv.gid != oldGid && kv.gid == newGid {
// new shard assign to this group
kv.waitMigration.AddGidShard(oldGid, shardNum)
}
}
// remove shard from kv.Database and store in waitClean
kv.waitClean.StoreCleanData(kv.database)
if !kv.waitMigration.IsEmpty() {
go kv.migrationHelper()
}
if !kv.waitClean.IsEmpty() {
go kv.migrationChecker()
}
}
DPrintf("ShardKV %d (gid=%d) is applying \nold Config = %+v \nnew Config = %+v\nkv.waitMigration:%+v\nkv.waitClean:%+v\n", kv.me,
kv.gid, oldConfig, newConfig, kv.waitMigration, kv.waitClean)
kv.configs = kv.configs[1:]
kv.saveShardKVState(true)
}
case MigrateShardReply:
if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == OK {
if ok := kv.waitMigration.DeleteByGid(command.Gid); ok {
for shardNum, shard := range command.Data {
kv.database.SetShard(shardNum, shard)
}
str, _ := json.Marshal(kv.database)
DPrintf("ShardKV %d (gid=%d) finished MigrateShard from (gid=%d) command= %+v database= %s", kv.me, kv.gid, command.Gid, command, str)
}
} else if kv.waitMigration.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitMigrationTimeOut {
if !kv.waitMigration.IsEmpty() {
kv.waitMigration.Clear()
DPrintf("ShardKV %d (gid=%d) waitMigration timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
case CheckMigrateShardReply:
if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == OK {
kv.waitClean.DeleteByGid(command.Gid)
} else if kv.waitClean.GetConfig().Num == command.ConfigNum && command.Result == ErrWaitCleanTimeOut {
if !kv.waitClean.IsEmpty() {
kv.waitClean.Clear()
DPrintf("ShardKV %d (gid=%d) waitClean timeout (configNum %d)", kv.me, kv.gid, kv.waitClean.GetConfig().Num)
}
}
kv.saveShardKVState(true)
default:
DPrintf("ShardKV %d stateMachine received wrong type command %+v %v\n", kv.gid, applyMsg, reflect.TypeOf(applyMsg.Command))
}
}
kv.mu.Unlock()
}
} else if command, ok := applyMsg.Command.(string); ok {
if command == raft.CommandInstallSnapshot {
DPrintf("ShardMaster %d stateMachine received InstallSnapshot %+v\n", kv.me, applyMsg)
kv.init()
kv.rf.Replay()
}
}
}
}
}
| saveShardKVState | identifier_name |
xor.go | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/transitional.dtd">
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Source file /src/pkg/crypto/block/xor.go</title>
<link rel="stylesheet" type="text/css" href="../../../../doc/style.css">
<script type="text/javascript" src="../../../../doc/godocs.js"></script>
</head>
<body>
<script>
// Catch 'enter' key down events and trigger the search form submission.
function codesearchKeyDown(event) {
if (event.which == 13) {
var form = document.getElementById('codesearch');
var query = document.getElementById('codesearchQuery');
form.q.value = "lang:go package:go.googlecode.com " + query.value;
document.getElementById('codesearch').submit();
} return true;
}
// Capture the submission event and construct the query parameter.
function codeSearchSubmit() {
var query = document.getElementById('codesearchQuery');
var form = document.getElementById('codesearch');
form.q.value = "lang:go package:go.googlecode.com " + query.value;
return true;
} </script>
<div id="topnav">
<table summary="">
<tr>
<td id="headerImage">
<a href="../../../../index.html"><img src="../../../../doc/logo-153x55.png" height="55" width="153" alt="Go Home Page" style="border:0" /></a>
</td>
<td>
<div id="headerDocSetTitle">The Go Programming Language</div>
</td>
<td>
<!-- <table>
<tr>
<td>
<! The input box is outside of the form because we want to add
a couple of restricts to the query before submitting. If we just
add the restricts to the text box before submitting, then they
appear in the box when the user presses 'back'. Thus we use a
hidden field in the form. However, there's no way to stop the
non-hidden text box from also submitting a value unless we move
it outside of the form
<input type="search" id="codesearchQuery" value="" size="30" onkeydown="return codesearchKeyDown(event);"/>
<form method="GET" action="http://www.google.com/codesearch" id="codesearch" class="search" onsubmit="return codeSearchSubmit();" style="display:inline;">
<input type="hidden" name="q" value=""/> | <span style="color: red">(TODO: remove for now?)</span>
</form>
</td>
</tr>
<tr>
<td>
<span style="color: gray;">(e.g. “pem” or “xml”)</span>
</td>
</tr>
</table> -->
</td>
</tr>
</table>
</div>
<div id="linkList">
<ul>
<li class="navhead"><a href="../../../../index.html">Home</a></li>
<li class="blank"> </li>
<li class="navhead">Documents</li>
<li><a href="../../../../doc/go_tutorial.html">Tutorial</a></li>
<li><a href="../../../../doc/effective_go.html">Effective Go</a></li>
<li><a href="../../../../doc/go_faq.html">FAQ</a></li>
<li><a href="../../../../doc/go_lang_faq.html">Language Design FAQ</a></li>
<li><a href="http://www.youtube.com/watch?v=rKnDgT73v8s">Tech talk (1 hour)</a> (<a href="../../../../doc/go_talk-20091030.pdf">PDF</a>)</li>
<li><a href="../../../../doc/go_spec.html">Language Specification</a></li>
<li><a href="../../../../doc/go_mem.html">Memory Model</a></li>
<li><a href="../../../../doc/go_for_cpp_programmers.html">Go for C++ Programmers</a></li>
<li class="blank"> </li>
<li class="navhead">How To</li>
<li><a href="../../../../doc/install.html">Install Go</a></li>
<li><a href="../../../../doc/contribute.html">Contribute code</a></li>
<li class="blank"> </li>
<li class="navhead">Programming</li>
<li><a href="../../../../cmd/index.html">Command documentation</a></li>
<li><a href="../../../../pkg/index.html">Package documentation</a></li>
<li><a href="../../../index.html">Source files</a></li>
<li class="blank"> </li>
<li class="navhead">Help</li>
<li>#go-nuts on irc.freenode.net</li>
<li><a href="http://groups.google.com/group/golang-nuts">Go Nuts mailing list</a></li>
<li><a href="http://code.google.com/p/go/issues/list">Issue tracker</a></li>
<li class="blank"> </li>
<li class="navhead">Go code search</li>
<form method="GET" action="http://golang.org/search" class="search">
<input type="search" name="q" value="" size="25" style="width:80%; max-width:200px" />
<input type="submit" value="Go" />
</form>
<li class="blank"> </li>
<li class="navhead">Last update</li>
<li>Thu Nov 12 15:48:37 PST 2009</li>
</ul>
</div>
<div id="content">
<h1 id="generatedHeader">Source file /src/pkg/crypto/block/xor.go</h1>
<!-- The Table of Contents is automatically inserted in this <div>.
Do not delete this <div>. -->
<div id="nav"></div>
<!-- Content is HTML-escaped elsewhere -->
<pre>
<a id="L1"></a><span class="comment">// Copyright 2009 The Go Authors. All rights reserved.</span>
<a id="L2"></a><span class="comment">// Use of this source code is governed by a BSD-style</span>
<a id="L3"></a><span class="comment">// license that can be found in the LICENSE file.</span>
<a id="L5"></a><span class="comment">// Encrypt/decrypt data by xor with a pseudo-random data stream.</span>
<a id="L7"></a>package block
<a id="L9"></a>import (
<a id="L10"></a>"io";
<a id="L11"></a>"os";
<a id="L12"></a>)
<a id="L14"></a><span class="comment">// A dataStream is an interface to an unending stream of data,</span>
<a id="L15"></a><span class="comment">// used by XorReader and XorWriter to model a pseudo-random generator.</span>
<a id="L16"></a><span class="comment">// Calls to Next() return sequential blocks of data from the stream.</span>
<a id="L17"></a><span class="comment">// Each call must return at least one byte: there is no EOF.</span>
<a id="L18"></a>type dataStream interface {
<a id="L19"></a>Next() []byte;
<a id="L20"></a>}
<a id="L22"></a>type xorReader struct {
<a id="L23"></a>r io.Reader;
<a id="L24"></a>rand dataStream; <span class="comment">// pseudo-random</span>
<a id="L25"></a>buf []byte; <span class="comment">// data available from last call to rand</span>
<a id="L26"></a>}
<a id="L28"></a>func newXorReader(rand dataStream, r io.Reader) io.Reader {
<a id="L29"></a>x := new(xorReader);
<a id="L30"></a>x.r = r;
<a id="L31"></a>x.rand = rand;
<a id="L32"></a>return x;
<a id="L33"></a>}
<a id="L35"></a>func (x *xorReader) Read(p []byte) (n int, err os.Error) {
<a id="L36"></a>n, err = x.r.Read(p);
<a id="L38"></a><span class="comment">// xor input with stream.</span>
<a id="L39"></a>bp := 0;
<a id="L40"></a>buf := x.buf;
<a id="L41"></a>for i := 0; i < n; i++ {
<a id="L42"></a>if bp >= len(buf) {
<a id="L43"></a>buf = x.rand.Next();
<a id="L44"></a>bp = 0;
<a id="L45"></a>}
<a id="L46"></a>p[i] ^= buf[bp];
<a id="L47"></a>bp++;
<a id="L48"></a>}
<a id="L49"></a>x.buf = buf[bp:len(buf)];
<a id="L50"></a>return n, err;
<a id="L51"></a>}
<a id="L53"></a>type xorWriter struct {
<a id="L54"></a>w io.Writer;
<a id="L55"></a>rand dataStream; <span class="comment">// pseudo-random</span>
<a id="L56"></a>buf []byte; <span class="comment">// last buffer returned by rand</span>
<a id="L57"></a>extra []byte; <span class="comment">// extra random data (use before buf)</span>
<a id="L58"></a>work []byte; <span class="comment">// work space</span>
<a id="L59"></a>}
<a id="L61"></a>func newXorWriter(rand dataStream, w io.Writer) io.Writer {
<a id="L62"></a>x := new(xorWriter);
<a id="L63"></a>x.w = w;
<a id="L64"></a>x.rand = rand;
<a id="L65"></a>x.work = make([]byte, 4096);
<a id="L66"></a>return x;
<a id="L67"></a>}
<a id="L69"></a>func (x *xorWriter) Write(p []byte) (n int, err os.Error) {
<a id="L70"></a>for len(p) > 0 {
<a id="L71"></a><span class="comment">// Determine next chunk of random data</span>
<a id="L72"></a><span class="comment">// and xor with p into x.work.</span>
<a id="L73"></a>var chunk []byte;
<a id="L74"></a>m := len(p);
<a id="L75"></a>if nn := len(x.extra); nn > 0 {
<a id="L76"></a><span class="comment">// extra points into work, so edit directly</span>
<a id="L77"></a>if m > nn {
<a id="L78"></a>m = nn
<a id="L79"></a>}
<a id="L80"></a>for i := 0; i < m; i++ {
<a id="L81"></a>x.extra[i] ^= p[i]
<a id="L82"></a>}
<a id="L83"></a>chunk = x.extra[0:m];
<a id="L84"></a>} else {
<a id="L85"></a><span class="comment">// xor p ^ buf into work, refreshing buf as needed</span>
<a id="L86"></a>if nn := len(x.work); m > nn {
<a id="L87"></a>m = nn
<a id="L88"></a>}
<a id="L89"></a>bp := 0;
<a id="L90"></a>buf := x.buf;
<a id="L91"></a>for i := 0; i < m; i++ {
<a id="L92"></a>if bp >= len(buf) {
<a id="L93"></a>buf = x.rand.Next();
<a id="L94"></a>bp = 0;
<a id="L95"></a>}
<a id="L96"></a>x.work[i] = buf[bp] ^ p[i];
<a id="L97"></a>bp++;
<a id="L98"></a>}
<a id="L99"></a>x.buf = buf[bp:len(buf)];
<a id="L100"></a>chunk = x.work[0:m];
<a id="L101"></a>}
<a id="L103"></a><span class="comment">// Write chunk.</span>
<a id="L104"></a>var nn int;
<a id="L105"></a>nn, err = x.w.Write(chunk);
<a id="L106"></a>if nn != len(chunk) && err == nil {
<a id="L107"></a>err = io.ErrShortWrite
<a id="L108"></a>}
<a id="L109"></a>if nn < len(chunk) {
<a id="L110"></a><span class="comment">// Reconstruct the random bits from the unwritten</span>
<a id="L111"></a><span class="comment">// data and save them for next time.</span>
<a id="L112"></a>for i := nn; i < m; i++ {
<a id="L113"></a>chunk[i] ^= p[i]
<a id="L114"></a>}
<a id="L115"></a>x.extra = chunk[nn:len(chunk)];
<a id="L116"></a>}
<a id="L117"></a>n += nn;
<a id="L118"></a>if err != nil {
<a id="L119"></a>return
<a id="L120"></a>}
<a id="L121"></a>p = p[m:len(p)];
<a id="L122"></a>}
<a id="L123"></a>return;
<a id="L124"></a>}
</pre>
</div>
<div id="footer">
<p>Except as noted, this content is
licensed under <a href="http://creativecommons.org/licenses/by/3.0/">
Creative Commons Attribution 3.0</a>.
</div>
<script type="text/javascript">
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
</script>
<script type="text/javascript">
var pageTracker = _gat._getTracker("UA-11222381-2");
pageTracker._trackPageview();
</script>
</body>
</html>
<!-- generated at Thu Nov 12 15:42:51 PST 2009 --> | <input type="submit" value="Code search" /> | random_line_split |
app.js | Ext.Loader.setConfig({
enabled : true
});
Ext.Loader.setPath('Ext.ux', 'ux');
Ext.require(['Ext.form.*', 'Ext.data.*', 'Ext.chart.*', 'Ext.grid.Panel', 'Ext.layout.container.Column', 'Ext.ux.grid.FiltersFeature']);
Ext.onReady(function() {
//use a renderer for values in the data view.
function perc(value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
var bd = Ext.getBody(), form = false, rec = false, selectedStoreItem = false,
//performs the highlight of an item in the bar series
selectItem = function(storeItem) {
var name = storeItem.get('company'), series = barChart.series.get(0), i, items, l;
series.highlight = true;
series.unHighlightItem();
series.cleanHighlights();
for ( i = 0, items = series.items, l = items.length; i < l; i++) {
if (name == items[i].storeItem.get('company')) {
selectedStoreItem = items[i].storeItem;
series.highlightItem(items[i]);
break;
}
}
series.highlight = false;
},
//updates a record modified via the form
updateRecord = function(rec) {
var name, series, i, l, items, json = [{
'Name' : 'Price',
'Data' : rec.get('price')
}, {
'Name' : 'Revenue %',
'Data' : rec.get('revenue %')
}, {
'Name' : 'Growth %',
'Data' : rec.get('growth %')
}, {
'Name' : 'Product %',
'Data' : rec.get('product %')
}, {
'Name' : 'Market %',
'Data' : rec.get('market %')
}];
chs.loadData(json);
selectItem(rec);
}, createListeners = function() {
return {
// buffer so we don't refire while the user is still typing
buffer : 200,
change : function(field, newValue, oldValue, listener) {
if (rec && form) {
if (newValue > field.maxValue) {
field.setValue(field.maxValue);
} else {
form.updateRecord(rec);
updateRecord(rec);
}
}
}
};
};
// sample static data for the store
var myData = [['3m Co'], ['Alcoa Inc'], ['Altria Group Inc'], ['American Express Company'], ['American International Group, Inc.'], ['AT&T Inc'], ['Boeing Co.'], ['Caterpillar Inc.'], ['Citigroup, Inc.'], ['E.I. du Pont de Nemours and Company'], ['Exxon Mobil Corp'], ['General Electric Company'], ['General Motors Corporation'], ['Hewlett-Packard Co'], ['Honeywell Intl Inc'], ['Intel Corporation'], ['International Business Machines'], ['Johnson & Johnson'], ['JP Morgan & Chase & Co'], ['McDonald\'s Corporation'], ['Merck & Co., Inc.'], ['Microsoft Corporation'], ['Pfizer Inc'], ['The Coca-Cola Company'], ['The Home Depot, Inc.'], ['The Procter & Gamble Company'], ['United Technologies Corporation'], ['Verizon Communications'], ['Wal-Mart Stores, Inc.']];
for (var i = 0, l = myData.length, rand = Math.random; i < l; i++) {
var data = myData[i];
data[1] = ((rand() * 10000) >> 0) / 100;
data[2] = ((rand() * 10000) >> 0) / 100;
data[3] = ((rand() * 10000) >> 0) / 100;
data[4] = ((rand() * 10000) >> 0) / 100;
data[5] = ((rand() * 10000) >> 0) / 100;
}
//create data store to be shared among the grid and bar series.
var ds = Ext.create('Ext.data.ArrayStore', {
fields : [{
name : 'company'
}, {
name : 'price',
type : 'float'
}, {
name : 'revenue %',
type : 'float'
}, {
name : 'growth %',
type : 'float'
}, {
name : 'product %',
type : 'float'
}, {
name : 'market %',
type : 'float'
}],
data : myData
});
//create radar dataset model.
var chs = Ext.create('Ext.data.JsonStore', {
fields : ['Name', 'Data'],
data : [{
'Name' : 'Price',
'Data' : 100
}, {
'Name' : 'Revenue %',
'Data' : 100
}, {
'Name' : 'Growth %',
'Data' : 100
}, {
'Name' : 'Product %',
'Data' : 100
}, {
'Name' : 'Market %',
'Data' : 100
}]
});
//Radar chart will render information for a selected company in the
//list. Selection can also be done via clicking on the bars in the series.
var radarChart = Ext.create('Ext.chart.Chart', {
margin : '0 0 0 0',
insetPadding : 20,
flex : 1.2,
animate : true,
store : chs,
axes : [{
steps : 5,
type : 'Radial',
position : 'radial',
maximum : 100
}],
series : [{
type : 'radar',
xField : 'Name',
yField : 'Data',
showInLegend : false,
showMarkers : true,
markerConfig : {
radius : 4,
size : 4
},
style : {
fill : 'rgb(194,214,240)',
opacity : 0.5,
'stroke-width' : 0.5
}
}]
});
//create a grid that will list the dataset items.
var gridPanel = Ext.create('Ext.grid.Panel', {
id : 'company-form',
flex : 0.60,
store : ds,
title : 'Company Data',
requires : ['Ext.ux.grid.filter.*'],
features : [{
ftype : 'filters',
local : true
}],
dockedItems : [{
xtype : 'toolbar',
items : [{
xtype : 'combo',
queryMode : 'local',
store : ds,
displayField : 'company',
listeners : {
change : function(me, newVal, oldVal, eOpts) {
console.log(me.getStore());
me.getStore().filter("company", newVal);
}
}
}, {
xtype: 'button',
text: 'reset',
handler: function(me){
console.log("click");
me.up('toolbar').down('combo').getStore().clearFilter();
}
}]
}],
columns : [{
id : 'company',
text : 'Company',
flex : 1,
filterable : true,
sortable : true, | } else if (record.get("price") < 25) {
metaData.style = "background-color:red;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else {
metaData.style = "background-color:blue;";
}
return '<span style="font-weight:bolder;color:white;">' + value + '</span>';
}
}, {
text : 'Price',
width : 75,
sortable : true,
dataIndex : 'price',
align : 'right',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
}, {
text : 'Revenue',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'revenue %',
renderer : perc
}, {
text : 'Growth',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'growth %',
renderer : perc
}, {
text : 'Product',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'product %',
renderer : perc
}, {
text : 'Market',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'market %',
renderer : perc
}],
listeners : {
selectionchange : function(model, records) {
var json, name, i, l, items, series, fields;
if (records[0]) {
rec = records[0];
if (!form) {
form = this.up('form').getForm();
fields = form.getFields();
fields.each(function(field) {
if (field.name != 'company') {
field.setDisabled(false);
}
});
} else {
fields = form.getFields();
}
// prevent change events from firing
fields.each(function(field) {
field.suspendEvents();
});
form.loadRecord(rec);
updateRecord(rec);
fields.each(function(field) {
field.resumeEvents();
});
}
}
}
});
//create a bar series to be at the top of the panel.
var barChart = Ext.create('Ext.chart.Chart', {
flex : 1,
shadow : true,
animate : true,
store : ds,
legend : {
position : 'right'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %'],
minimum : 0,
hidden : true
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
label : {
renderer : function(v) {
return Ext.String.ellipsis(v, 15, false);
},
font : '9px Arial',
rotate : {
degrees : 330
}
}
}],
series : [{
type : 'column',
axis : 'left',
highlight : true,
// style : {
// fill : '#456d9f'
// },
highlightCfg : {
fill : '#a2b5ca'
},
label : {
contrast : true,
display : 'insideEnd',
field : 'price',
color : '#000',
orientation : 'vertical',
'text-anchor' : 'middle'
},
listeners : {
'itemmouseup' : function(item) {
var series = barChart.series.get(0), index = Ext.Array.indexOf(series.items, item), selectionModel = gridPanel.getSelectionModel();
selectedStoreItem = item.storeItem;
selectionModel.select(index);
}
},
xField : 'name',
yField : ['price', 'revenue %', 'growth %'],
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'product %',
style : {
stroke : '#0f22f0',
'stroke-width' : 5,
fill : '#0f22f0',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'market %',
style : {
stroke : '#000000',
'stroke-width' : 5,
fill : '#000000',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}]
});
//disable highlighting by default.
barChart.series.get(0).highlight = false;
//add listener to (re)select bar item after sorting or refreshing the dataset.
barChart.addListener('beforerefresh', (function() {
var timer = false;
return function() {
clearTimeout(timer);
if (selectedStoreItem) {
timer = setTimeout(function() {
selectItem(selectedStoreItem);
}, 900);
}
};
})());
var areaChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : true,
theme : 'Category1',
legend : {
position : 'top'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
title : 'LINE CHART WIHT LEGEND',
label : {
font : '9px Verdana'
},
grid : {
odd : {
opacity : 1,
fill : '#ddd',
stroke : '#bbb',
'stroke-width' : 1
}
},
minimum : 0,
adjustMinimumByMajorUnit : 0
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
// title : 'COMPANY DETAILS',
grid : true,
label : {
font : '9px Verdana',
rotate : {
degrees : 10
}
}
}],
series : [{
type : 'area',
highlight : false,
axis : 'left',
xField : 'company',
yField : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
style : {
opacity : 0.93
}
}]
});
var pieChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : false,
theme : 'Base:gradients',
series : [{
type : 'pie',
angleField : 'price',
showInLegend : true,
tips : {
trackMouse : true,
width : 140,
height : 28,
renderer : function(storeItem, item) {
// calculate and display percentage on hover
var total = 0;
ds.each(function(rec) {
total += rec.get('price');
});
this.setTitle(storeItem.get('company') + ': ' + Math.round(storeItem.get('price') / total * 100) + '%');
}
},
highlight : {
segment : {
margin : 2
}
},
label : {
field : 'company',
display : 'rotate',
contrast : true,
font : '8px Arial'
}
}]
});
/*
* Here is where we create the Form
*/
var gridForm = Ext.create('Ext.form.Panel', {
title : 'Company data',
frame : true,
bodyPadding : 5,
width : 870,
height : 1120,
fieldDefaults : {
labelAlign : 'left',
msgTarget : 'side'
},
layout : {
type : 'vbox',
align : 'stretch'
},
items : [{
height : 200,
layout : 'fit',
margin : '0 0 5 0',
items : [barChart]
}, {
/* MY CUSTOM PANEL*/
xtype : 'container',
layout : {
type : 'hbox',
align : 'stretch'
},
margin : '0 0 5 0',
items : [{
height : 300,
layout : 'fit',
border : true,
flex : 1,
items : [areaChart]
}, {
height : 300,
width : 300,
layout : 'fit',
margin : '0 0 0 5',
border : true,
items : [pieChart]
}]
}, {
layout : {
type : 'hbox',
align : 'stretch'
},
flex : 3,
border : false,
bodyStyle : 'background-color: transparent',
items : [gridPanel, {
flex : 0.4,
layout : {
type : 'vbox',
align : 'stretch'
},
margin : '0 0 0 5',
title : 'Company Details',
items : [{
margin : '5',
xtype : 'fieldset',
flex : 1,
title : 'Company details',
defaults : {
width : 240,
labelWidth : 90,
disabled : true
},
defaultType : 'numberfield',
items : [{
fieldLabel : 'Name',
name : 'company',
xtype : 'textfield'
}, {
fieldLabel : 'Price',
name : 'price',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('price')
}, {
fieldLabel : 'Revenue %',
name : 'revenue %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('revenue %')
}, {
fieldLabel : 'Growth %',
name : 'growth %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('growth %')
}, {
fieldLabel : 'Product %',
name : 'product %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('product %')
}, {
fieldLabel : 'Market %',
name : 'market %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('market %')
}]
}, radarChart]
}]
}],
renderTo : bd
});
var gp = Ext.getCmp('company-form');
}); | dataIndex : 'company',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (record.get("price") > 50) {
metaData.style = "background-color:green;";
return '<span style="font-weight:bolder;">' + value + '</span>'; | random_line_split |
app.js | Ext.Loader.setConfig({
enabled : true
});
Ext.Loader.setPath('Ext.ux', 'ux');
Ext.require(['Ext.form.*', 'Ext.data.*', 'Ext.chart.*', 'Ext.grid.Panel', 'Ext.layout.container.Column', 'Ext.ux.grid.FiltersFeature']);
Ext.onReady(function() {
//use a renderer for values in the data view.
function perc(value, metaData, record, rowIndex, colIndex, store, view) |
var bd = Ext.getBody(), form = false, rec = false, selectedStoreItem = false,
//performs the highlight of an item in the bar series
selectItem = function(storeItem) {
var name = storeItem.get('company'), series = barChart.series.get(0), i, items, l;
series.highlight = true;
series.unHighlightItem();
series.cleanHighlights();
for ( i = 0, items = series.items, l = items.length; i < l; i++) {
if (name == items[i].storeItem.get('company')) {
selectedStoreItem = items[i].storeItem;
series.highlightItem(items[i]);
break;
}
}
series.highlight = false;
},
//updates a record modified via the form
updateRecord = function(rec) {
var name, series, i, l, items, json = [{
'Name' : 'Price',
'Data' : rec.get('price')
}, {
'Name' : 'Revenue %',
'Data' : rec.get('revenue %')
}, {
'Name' : 'Growth %',
'Data' : rec.get('growth %')
}, {
'Name' : 'Product %',
'Data' : rec.get('product %')
}, {
'Name' : 'Market %',
'Data' : rec.get('market %')
}];
chs.loadData(json);
selectItem(rec);
}, createListeners = function() {
return {
// buffer so we don't refire while the user is still typing
buffer : 200,
change : function(field, newValue, oldValue, listener) {
if (rec && form) {
if (newValue > field.maxValue) {
field.setValue(field.maxValue);
} else {
form.updateRecord(rec);
updateRecord(rec);
}
}
}
};
};
// sample static data for the store
var myData = [['3m Co'], ['Alcoa Inc'], ['Altria Group Inc'], ['American Express Company'], ['American International Group, Inc.'], ['AT&T Inc'], ['Boeing Co.'], ['Caterpillar Inc.'], ['Citigroup, Inc.'], ['E.I. du Pont de Nemours and Company'], ['Exxon Mobil Corp'], ['General Electric Company'], ['General Motors Corporation'], ['Hewlett-Packard Co'], ['Honeywell Intl Inc'], ['Intel Corporation'], ['International Business Machines'], ['Johnson & Johnson'], ['JP Morgan & Chase & Co'], ['McDonald\'s Corporation'], ['Merck & Co., Inc.'], ['Microsoft Corporation'], ['Pfizer Inc'], ['The Coca-Cola Company'], ['The Home Depot, Inc.'], ['The Procter & Gamble Company'], ['United Technologies Corporation'], ['Verizon Communications'], ['Wal-Mart Stores, Inc.']];
for (var i = 0, l = myData.length, rand = Math.random; i < l; i++) {
var data = myData[i];
data[1] = ((rand() * 10000) >> 0) / 100;
data[2] = ((rand() * 10000) >> 0) / 100;
data[3] = ((rand() * 10000) >> 0) / 100;
data[4] = ((rand() * 10000) >> 0) / 100;
data[5] = ((rand() * 10000) >> 0) / 100;
}
//create data store to be shared among the grid and bar series.
var ds = Ext.create('Ext.data.ArrayStore', {
fields : [{
name : 'company'
}, {
name : 'price',
type : 'float'
}, {
name : 'revenue %',
type : 'float'
}, {
name : 'growth %',
type : 'float'
}, {
name : 'product %',
type : 'float'
}, {
name : 'market %',
type : 'float'
}],
data : myData
});
//create radar dataset model.
var chs = Ext.create('Ext.data.JsonStore', {
fields : ['Name', 'Data'],
data : [{
'Name' : 'Price',
'Data' : 100
}, {
'Name' : 'Revenue %',
'Data' : 100
}, {
'Name' : 'Growth %',
'Data' : 100
}, {
'Name' : 'Product %',
'Data' : 100
}, {
'Name' : 'Market %',
'Data' : 100
}]
});
//Radar chart will render information for a selected company in the
//list. Selection can also be done via clicking on the bars in the series.
var radarChart = Ext.create('Ext.chart.Chart', {
margin : '0 0 0 0',
insetPadding : 20,
flex : 1.2,
animate : true,
store : chs,
axes : [{
steps : 5,
type : 'Radial',
position : 'radial',
maximum : 100
}],
series : [{
type : 'radar',
xField : 'Name',
yField : 'Data',
showInLegend : false,
showMarkers : true,
markerConfig : {
radius : 4,
size : 4
},
style : {
fill : 'rgb(194,214,240)',
opacity : 0.5,
'stroke-width' : 0.5
}
}]
});
//create a grid that will list the dataset items.
var gridPanel = Ext.create('Ext.grid.Panel', {
id : 'company-form',
flex : 0.60,
store : ds,
title : 'Company Data',
requires : ['Ext.ux.grid.filter.*'],
features : [{
ftype : 'filters',
local : true
}],
dockedItems : [{
xtype : 'toolbar',
items : [{
xtype : 'combo',
queryMode : 'local',
store : ds,
displayField : 'company',
listeners : {
change : function(me, newVal, oldVal, eOpts) {
console.log(me.getStore());
me.getStore().filter("company", newVal);
}
}
}, {
xtype: 'button',
text: 'reset',
handler: function(me){
console.log("click");
me.up('toolbar').down('combo').getStore().clearFilter();
}
}]
}],
columns : [{
id : 'company',
text : 'Company',
flex : 1,
filterable : true,
sortable : true,
dataIndex : 'company',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (record.get("price") > 50) {
metaData.style = "background-color:green;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else if (record.get("price") < 25) {
metaData.style = "background-color:red;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else {
metaData.style = "background-color:blue;";
}
return '<span style="font-weight:bolder;color:white;">' + value + '</span>';
}
}, {
text : 'Price',
width : 75,
sortable : true,
dataIndex : 'price',
align : 'right',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
}, {
text : 'Revenue',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'revenue %',
renderer : perc
}, {
text : 'Growth',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'growth %',
renderer : perc
}, {
text : 'Product',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'product %',
renderer : perc
}, {
text : 'Market',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'market %',
renderer : perc
}],
listeners : {
selectionchange : function(model, records) {
var json, name, i, l, items, series, fields;
if (records[0]) {
rec = records[0];
if (!form) {
form = this.up('form').getForm();
fields = form.getFields();
fields.each(function(field) {
if (field.name != 'company') {
field.setDisabled(false);
}
});
} else {
fields = form.getFields();
}
// prevent change events from firing
fields.each(function(field) {
field.suspendEvents();
});
form.loadRecord(rec);
updateRecord(rec);
fields.each(function(field) {
field.resumeEvents();
});
}
}
}
});
//create a bar series to be at the top of the panel.
var barChart = Ext.create('Ext.chart.Chart', {
flex : 1,
shadow : true,
animate : true,
store : ds,
legend : {
position : 'right'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %'],
minimum : 0,
hidden : true
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
label : {
renderer : function(v) {
return Ext.String.ellipsis(v, 15, false);
},
font : '9px Arial',
rotate : {
degrees : 330
}
}
}],
series : [{
type : 'column',
axis : 'left',
highlight : true,
// style : {
// fill : '#456d9f'
// },
highlightCfg : {
fill : '#a2b5ca'
},
label : {
contrast : true,
display : 'insideEnd',
field : 'price',
color : '#000',
orientation : 'vertical',
'text-anchor' : 'middle'
},
listeners : {
'itemmouseup' : function(item) {
var series = barChart.series.get(0), index = Ext.Array.indexOf(series.items, item), selectionModel = gridPanel.getSelectionModel();
selectedStoreItem = item.storeItem;
selectionModel.select(index);
}
},
xField : 'name',
yField : ['price', 'revenue %', 'growth %'],
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'product %',
style : {
stroke : '#0f22f0',
'stroke-width' : 5,
fill : '#0f22f0',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'market %',
style : {
stroke : '#000000',
'stroke-width' : 5,
fill : '#000000',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}]
});
//disable highlighting by default.
barChart.series.get(0).highlight = false;
//add listener to (re)select bar item after sorting or refreshing the dataset.
barChart.addListener('beforerefresh', (function() {
var timer = false;
return function() {
clearTimeout(timer);
if (selectedStoreItem) {
timer = setTimeout(function() {
selectItem(selectedStoreItem);
}, 900);
}
};
})());
var areaChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : true,
theme : 'Category1',
legend : {
position : 'top'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
title : 'LINE CHART WIHT LEGEND',
label : {
font : '9px Verdana'
},
grid : {
odd : {
opacity : 1,
fill : '#ddd',
stroke : '#bbb',
'stroke-width' : 1
}
},
minimum : 0,
adjustMinimumByMajorUnit : 0
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
// title : 'COMPANY DETAILS',
grid : true,
label : {
font : '9px Verdana',
rotate : {
degrees : 10
}
}
}],
series : [{
type : 'area',
highlight : false,
axis : 'left',
xField : 'company',
yField : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
style : {
opacity : 0.93
}
}]
});
var pieChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : false,
theme : 'Base:gradients',
series : [{
type : 'pie',
angleField : 'price',
showInLegend : true,
tips : {
trackMouse : true,
width : 140,
height : 28,
renderer : function(storeItem, item) {
// calculate and display percentage on hover
var total = 0;
ds.each(function(rec) {
total += rec.get('price');
});
this.setTitle(storeItem.get('company') + ': ' + Math.round(storeItem.get('price') / total * 100) + '%');
}
},
highlight : {
segment : {
margin : 2
}
},
label : {
field : 'company',
display : 'rotate',
contrast : true,
font : '8px Arial'
}
}]
});
/*
* Here is where we create the Form
*/
var gridForm = Ext.create('Ext.form.Panel', {
title : 'Company data',
frame : true,
bodyPadding : 5,
width : 870,
height : 1120,
fieldDefaults : {
labelAlign : 'left',
msgTarget : 'side'
},
layout : {
type : 'vbox',
align : 'stretch'
},
items : [{
height : 200,
layout : 'fit',
margin : '0 0 5 0',
items : [barChart]
}, {
/* MY CUSTOM PANEL*/
xtype : 'container',
layout : {
type : 'hbox',
align : 'stretch'
},
margin : '0 0 5 0',
items : [{
height : 300,
layout : 'fit',
border : true,
flex : 1,
items : [areaChart]
}, {
height : 300,
width : 300,
layout : 'fit',
margin : '0 0 0 5',
border : true,
items : [pieChart]
}]
}, {
layout : {
type : 'hbox',
align : 'stretch'
},
flex : 3,
border : false,
bodyStyle : 'background-color: transparent',
items : [gridPanel, {
flex : 0.4,
layout : {
type : 'vbox',
align : 'stretch'
},
margin : '0 0 0 5',
title : 'Company Details',
items : [{
margin : '5',
xtype : 'fieldset',
flex : 1,
title : 'Company details',
defaults : {
width : 240,
labelWidth : 90,
disabled : true
},
defaultType : 'numberfield',
items : [{
fieldLabel : 'Name',
name : 'company',
xtype : 'textfield'
}, {
fieldLabel : 'Price',
name : 'price',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('price')
}, {
fieldLabel : 'Revenue %',
name : 'revenue %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('revenue %')
}, {
fieldLabel : 'Growth %',
name : 'growth %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('growth %')
}, {
fieldLabel : 'Product %',
name : 'product %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('product %')
}, {
fieldLabel : 'Market %',
name : 'market %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('market %')
}]
}, radarChart]
}]
}],
renderTo : bd
});
var gp = Ext.getCmp('company-form');
}); | {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
} | identifier_body |
app.js | Ext.Loader.setConfig({
enabled : true
});
Ext.Loader.setPath('Ext.ux', 'ux');
Ext.require(['Ext.form.*', 'Ext.data.*', 'Ext.chart.*', 'Ext.grid.Panel', 'Ext.layout.container.Column', 'Ext.ux.grid.FiltersFeature']);
Ext.onReady(function() {
//use a renderer for values in the data view.
function perc(value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
var bd = Ext.getBody(), form = false, rec = false, selectedStoreItem = false,
//performs the highlight of an item in the bar series
selectItem = function(storeItem) {
var name = storeItem.get('company'), series = barChart.series.get(0), i, items, l;
series.highlight = true;
series.unHighlightItem();
series.cleanHighlights();
for ( i = 0, items = series.items, l = items.length; i < l; i++) |
series.highlight = false;
},
//updates a record modified via the form
updateRecord = function(rec) {
var name, series, i, l, items, json = [{
'Name' : 'Price',
'Data' : rec.get('price')
}, {
'Name' : 'Revenue %',
'Data' : rec.get('revenue %')
}, {
'Name' : 'Growth %',
'Data' : rec.get('growth %')
}, {
'Name' : 'Product %',
'Data' : rec.get('product %')
}, {
'Name' : 'Market %',
'Data' : rec.get('market %')
}];
chs.loadData(json);
selectItem(rec);
}, createListeners = function() {
return {
// buffer so we don't refire while the user is still typing
buffer : 200,
change : function(field, newValue, oldValue, listener) {
if (rec && form) {
if (newValue > field.maxValue) {
field.setValue(field.maxValue);
} else {
form.updateRecord(rec);
updateRecord(rec);
}
}
}
};
};
// sample static data for the store
var myData = [['3m Co'], ['Alcoa Inc'], ['Altria Group Inc'], ['American Express Company'], ['American International Group, Inc.'], ['AT&T Inc'], ['Boeing Co.'], ['Caterpillar Inc.'], ['Citigroup, Inc.'], ['E.I. du Pont de Nemours and Company'], ['Exxon Mobil Corp'], ['General Electric Company'], ['General Motors Corporation'], ['Hewlett-Packard Co'], ['Honeywell Intl Inc'], ['Intel Corporation'], ['International Business Machines'], ['Johnson & Johnson'], ['JP Morgan & Chase & Co'], ['McDonald\'s Corporation'], ['Merck & Co., Inc.'], ['Microsoft Corporation'], ['Pfizer Inc'], ['The Coca-Cola Company'], ['The Home Depot, Inc.'], ['The Procter & Gamble Company'], ['United Technologies Corporation'], ['Verizon Communications'], ['Wal-Mart Stores, Inc.']];
for (var i = 0, l = myData.length, rand = Math.random; i < l; i++) {
var data = myData[i];
data[1] = ((rand() * 10000) >> 0) / 100;
data[2] = ((rand() * 10000) >> 0) / 100;
data[3] = ((rand() * 10000) >> 0) / 100;
data[4] = ((rand() * 10000) >> 0) / 100;
data[5] = ((rand() * 10000) >> 0) / 100;
}
//create data store to be shared among the grid and bar series.
var ds = Ext.create('Ext.data.ArrayStore', {
fields : [{
name : 'company'
}, {
name : 'price',
type : 'float'
}, {
name : 'revenue %',
type : 'float'
}, {
name : 'growth %',
type : 'float'
}, {
name : 'product %',
type : 'float'
}, {
name : 'market %',
type : 'float'
}],
data : myData
});
//create radar dataset model.
var chs = Ext.create('Ext.data.JsonStore', {
fields : ['Name', 'Data'],
data : [{
'Name' : 'Price',
'Data' : 100
}, {
'Name' : 'Revenue %',
'Data' : 100
}, {
'Name' : 'Growth %',
'Data' : 100
}, {
'Name' : 'Product %',
'Data' : 100
}, {
'Name' : 'Market %',
'Data' : 100
}]
});
//Radar chart will render information for a selected company in the
//list. Selection can also be done via clicking on the bars in the series.
var radarChart = Ext.create('Ext.chart.Chart', {
margin : '0 0 0 0',
insetPadding : 20,
flex : 1.2,
animate : true,
store : chs,
axes : [{
steps : 5,
type : 'Radial',
position : 'radial',
maximum : 100
}],
series : [{
type : 'radar',
xField : 'Name',
yField : 'Data',
showInLegend : false,
showMarkers : true,
markerConfig : {
radius : 4,
size : 4
},
style : {
fill : 'rgb(194,214,240)',
opacity : 0.5,
'stroke-width' : 0.5
}
}]
});
//create a grid that will list the dataset items.
var gridPanel = Ext.create('Ext.grid.Panel', {
id : 'company-form',
flex : 0.60,
store : ds,
title : 'Company Data',
requires : ['Ext.ux.grid.filter.*'],
features : [{
ftype : 'filters',
local : true
}],
dockedItems : [{
xtype : 'toolbar',
items : [{
xtype : 'combo',
queryMode : 'local',
store : ds,
displayField : 'company',
listeners : {
change : function(me, newVal, oldVal, eOpts) {
console.log(me.getStore());
me.getStore().filter("company", newVal);
}
}
}, {
xtype: 'button',
text: 'reset',
handler: function(me){
console.log("click");
me.up('toolbar').down('combo').getStore().clearFilter();
}
}]
}],
columns : [{
id : 'company',
text : 'Company',
flex : 1,
filterable : true,
sortable : true,
dataIndex : 'company',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (record.get("price") > 50) {
metaData.style = "background-color:green;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else if (record.get("price") < 25) {
metaData.style = "background-color:red;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else {
metaData.style = "background-color:blue;";
}
return '<span style="font-weight:bolder;color:white;">' + value + '</span>';
}
}, {
text : 'Price',
width : 75,
sortable : true,
dataIndex : 'price',
align : 'right',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
}, {
text : 'Revenue',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'revenue %',
renderer : perc
}, {
text : 'Growth',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'growth %',
renderer : perc
}, {
text : 'Product',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'product %',
renderer : perc
}, {
text : 'Market',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'market %',
renderer : perc
}],
listeners : {
selectionchange : function(model, records) {
var json, name, i, l, items, series, fields;
if (records[0]) {
rec = records[0];
if (!form) {
form = this.up('form').getForm();
fields = form.getFields();
fields.each(function(field) {
if (field.name != 'company') {
field.setDisabled(false);
}
});
} else {
fields = form.getFields();
}
// prevent change events from firing
fields.each(function(field) {
field.suspendEvents();
});
form.loadRecord(rec);
updateRecord(rec);
fields.each(function(field) {
field.resumeEvents();
});
}
}
}
});
//create a bar series to be at the top of the panel.
var barChart = Ext.create('Ext.chart.Chart', {
flex : 1,
shadow : true,
animate : true,
store : ds,
legend : {
position : 'right'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %'],
minimum : 0,
hidden : true
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
label : {
renderer : function(v) {
return Ext.String.ellipsis(v, 15, false);
},
font : '9px Arial',
rotate : {
degrees : 330
}
}
}],
series : [{
type : 'column',
axis : 'left',
highlight : true,
// style : {
// fill : '#456d9f'
// },
highlightCfg : {
fill : '#a2b5ca'
},
label : {
contrast : true,
display : 'insideEnd',
field : 'price',
color : '#000',
orientation : 'vertical',
'text-anchor' : 'middle'
},
listeners : {
'itemmouseup' : function(item) {
var series = barChart.series.get(0), index = Ext.Array.indexOf(series.items, item), selectionModel = gridPanel.getSelectionModel();
selectedStoreItem = item.storeItem;
selectionModel.select(index);
}
},
xField : 'name',
yField : ['price', 'revenue %', 'growth %'],
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'product %',
style : {
stroke : '#0f22f0',
'stroke-width' : 5,
fill : '#0f22f0',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'market %',
style : {
stroke : '#000000',
'stroke-width' : 5,
fill : '#000000',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}]
});
//disable highlighting by default.
barChart.series.get(0).highlight = false;
//add listener to (re)select bar item after sorting or refreshing the dataset.
barChart.addListener('beforerefresh', (function() {
var timer = false;
return function() {
clearTimeout(timer);
if (selectedStoreItem) {
timer = setTimeout(function() {
selectItem(selectedStoreItem);
}, 900);
}
};
})());
var areaChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : true,
theme : 'Category1',
legend : {
position : 'top'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
title : 'LINE CHART WIHT LEGEND',
label : {
font : '9px Verdana'
},
grid : {
odd : {
opacity : 1,
fill : '#ddd',
stroke : '#bbb',
'stroke-width' : 1
}
},
minimum : 0,
adjustMinimumByMajorUnit : 0
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
// title : 'COMPANY DETAILS',
grid : true,
label : {
font : '9px Verdana',
rotate : {
degrees : 10
}
}
}],
series : [{
type : 'area',
highlight : false,
axis : 'left',
xField : 'company',
yField : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
style : {
opacity : 0.93
}
}]
});
var pieChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : false,
theme : 'Base:gradients',
series : [{
type : 'pie',
angleField : 'price',
showInLegend : true,
tips : {
trackMouse : true,
width : 140,
height : 28,
renderer : function(storeItem, item) {
// calculate and display percentage on hover
var total = 0;
ds.each(function(rec) {
total += rec.get('price');
});
this.setTitle(storeItem.get('company') + ': ' + Math.round(storeItem.get('price') / total * 100) + '%');
}
},
highlight : {
segment : {
margin : 2
}
},
label : {
field : 'company',
display : 'rotate',
contrast : true,
font : '8px Arial'
}
}]
});
/*
* Here is where we create the Form
*/
var gridForm = Ext.create('Ext.form.Panel', {
title : 'Company data',
frame : true,
bodyPadding : 5,
width : 870,
height : 1120,
fieldDefaults : {
labelAlign : 'left',
msgTarget : 'side'
},
layout : {
type : 'vbox',
align : 'stretch'
},
items : [{
height : 200,
layout : 'fit',
margin : '0 0 5 0',
items : [barChart]
}, {
/* MY CUSTOM PANEL*/
xtype : 'container',
layout : {
type : 'hbox',
align : 'stretch'
},
margin : '0 0 5 0',
items : [{
height : 300,
layout : 'fit',
border : true,
flex : 1,
items : [areaChart]
}, {
height : 300,
width : 300,
layout : 'fit',
margin : '0 0 0 5',
border : true,
items : [pieChart]
}]
}, {
layout : {
type : 'hbox',
align : 'stretch'
},
flex : 3,
border : false,
bodyStyle : 'background-color: transparent',
items : [gridPanel, {
flex : 0.4,
layout : {
type : 'vbox',
align : 'stretch'
},
margin : '0 0 0 5',
title : 'Company Details',
items : [{
margin : '5',
xtype : 'fieldset',
flex : 1,
title : 'Company details',
defaults : {
width : 240,
labelWidth : 90,
disabled : true
},
defaultType : 'numberfield',
items : [{
fieldLabel : 'Name',
name : 'company',
xtype : 'textfield'
}, {
fieldLabel : 'Price',
name : 'price',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('price')
}, {
fieldLabel : 'Revenue %',
name : 'revenue %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('revenue %')
}, {
fieldLabel : 'Growth %',
name : 'growth %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('growth %')
}, {
fieldLabel : 'Product %',
name : 'product %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('product %')
}, {
fieldLabel : 'Market %',
name : 'market %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('market %')
}]
}, radarChart]
}]
}],
renderTo : bd
});
var gp = Ext.getCmp('company-form');
}); | {
if (name == items[i].storeItem.get('company')) {
selectedStoreItem = items[i].storeItem;
series.highlightItem(items[i]);
break;
}
} | conditional_block |
app.js | Ext.Loader.setConfig({
enabled : true
});
Ext.Loader.setPath('Ext.ux', 'ux');
Ext.require(['Ext.form.*', 'Ext.data.*', 'Ext.chart.*', 'Ext.grid.Panel', 'Ext.layout.container.Column', 'Ext.ux.grid.FiltersFeature']);
Ext.onReady(function() {
//use a renderer for values in the data view.
function | (value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.number(value, '0.00 %') + '</span>';
}
var bd = Ext.getBody(), form = false, rec = false, selectedStoreItem = false,
//performs the highlight of an item in the bar series
selectItem = function(storeItem) {
var name = storeItem.get('company'), series = barChart.series.get(0), i, items, l;
series.highlight = true;
series.unHighlightItem();
series.cleanHighlights();
for ( i = 0, items = series.items, l = items.length; i < l; i++) {
if (name == items[i].storeItem.get('company')) {
selectedStoreItem = items[i].storeItem;
series.highlightItem(items[i]);
break;
}
}
series.highlight = false;
},
//updates a record modified via the form
updateRecord = function(rec) {
var name, series, i, l, items, json = [{
'Name' : 'Price',
'Data' : rec.get('price')
}, {
'Name' : 'Revenue %',
'Data' : rec.get('revenue %')
}, {
'Name' : 'Growth %',
'Data' : rec.get('growth %')
}, {
'Name' : 'Product %',
'Data' : rec.get('product %')
}, {
'Name' : 'Market %',
'Data' : rec.get('market %')
}];
chs.loadData(json);
selectItem(rec);
}, createListeners = function() {
return {
// buffer so we don't refire while the user is still typing
buffer : 200,
change : function(field, newValue, oldValue, listener) {
if (rec && form) {
if (newValue > field.maxValue) {
field.setValue(field.maxValue);
} else {
form.updateRecord(rec);
updateRecord(rec);
}
}
}
};
};
// sample static data for the store
var myData = [['3m Co'], ['Alcoa Inc'], ['Altria Group Inc'], ['American Express Company'], ['American International Group, Inc.'], ['AT&T Inc'], ['Boeing Co.'], ['Caterpillar Inc.'], ['Citigroup, Inc.'], ['E.I. du Pont de Nemours and Company'], ['Exxon Mobil Corp'], ['General Electric Company'], ['General Motors Corporation'], ['Hewlett-Packard Co'], ['Honeywell Intl Inc'], ['Intel Corporation'], ['International Business Machines'], ['Johnson & Johnson'], ['JP Morgan & Chase & Co'], ['McDonald\'s Corporation'], ['Merck & Co., Inc.'], ['Microsoft Corporation'], ['Pfizer Inc'], ['The Coca-Cola Company'], ['The Home Depot, Inc.'], ['The Procter & Gamble Company'], ['United Technologies Corporation'], ['Verizon Communications'], ['Wal-Mart Stores, Inc.']];
for (var i = 0, l = myData.length, rand = Math.random; i < l; i++) {
var data = myData[i];
data[1] = ((rand() * 10000) >> 0) / 100;
data[2] = ((rand() * 10000) >> 0) / 100;
data[3] = ((rand() * 10000) >> 0) / 100;
data[4] = ((rand() * 10000) >> 0) / 100;
data[5] = ((rand() * 10000) >> 0) / 100;
}
//create data store to be shared among the grid and bar series.
var ds = Ext.create('Ext.data.ArrayStore', {
fields : [{
name : 'company'
}, {
name : 'price',
type : 'float'
}, {
name : 'revenue %',
type : 'float'
}, {
name : 'growth %',
type : 'float'
}, {
name : 'product %',
type : 'float'
}, {
name : 'market %',
type : 'float'
}],
data : myData
});
//create radar dataset model.
var chs = Ext.create('Ext.data.JsonStore', {
fields : ['Name', 'Data'],
data : [{
'Name' : 'Price',
'Data' : 100
}, {
'Name' : 'Revenue %',
'Data' : 100
}, {
'Name' : 'Growth %',
'Data' : 100
}, {
'Name' : 'Product %',
'Data' : 100
}, {
'Name' : 'Market %',
'Data' : 100
}]
});
//Radar chart will render information for a selected company in the
//list. Selection can also be done via clicking on the bars in the series.
var radarChart = Ext.create('Ext.chart.Chart', {
margin : '0 0 0 0',
insetPadding : 20,
flex : 1.2,
animate : true,
store : chs,
axes : [{
steps : 5,
type : 'Radial',
position : 'radial',
maximum : 100
}],
series : [{
type : 'radar',
xField : 'Name',
yField : 'Data',
showInLegend : false,
showMarkers : true,
markerConfig : {
radius : 4,
size : 4
},
style : {
fill : 'rgb(194,214,240)',
opacity : 0.5,
'stroke-width' : 0.5
}
}]
});
//create a grid that will list the dataset items.
var gridPanel = Ext.create('Ext.grid.Panel', {
id : 'company-form',
flex : 0.60,
store : ds,
title : 'Company Data',
requires : ['Ext.ux.grid.filter.*'],
features : [{
ftype : 'filters',
local : true
}],
dockedItems : [{
xtype : 'toolbar',
items : [{
xtype : 'combo',
queryMode : 'local',
store : ds,
displayField : 'company',
listeners : {
change : function(me, newVal, oldVal, eOpts) {
console.log(me.getStore());
me.getStore().filter("company", newVal);
}
}
}, {
xtype: 'button',
text: 'reset',
handler: function(me){
console.log("click");
me.up('toolbar').down('combo').getStore().clearFilter();
}
}]
}],
columns : [{
id : 'company',
text : 'Company',
flex : 1,
filterable : true,
sortable : true,
dataIndex : 'company',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (record.get("price") > 50) {
metaData.style = "background-color:green;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else if (record.get("price") < 25) {
metaData.style = "background-color:red;";
return '<span style="font-weight:bolder;">' + value + '</span>';
} else {
metaData.style = "background-color:blue;";
}
return '<span style="font-weight:bolder;color:white;">' + value + '</span>';
}
}, {
text : 'Price',
width : 75,
sortable : true,
dataIndex : 'price',
align : 'right',
renderer : function(value, metaData, record, rowIndex, colIndex, store, view) {
if (value > 50) {
metaData.style = "";
return '<span style="color:green;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
} else if (value < 25) {
return '<span style="color:red;font-weight:bolder;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
return '<span style="color:blue;">' + Ext.util.Format.currency(value, ' $', 2, true) + '</span>';
}
}, {
text : 'Revenue',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'revenue %',
renderer : perc
}, {
text : 'Growth',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'growth %',
renderer : perc
}, {
text : 'Product',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'product %',
renderer : perc
}, {
text : 'Market',
width : 75,
sortable : true,
align : 'right',
dataIndex : 'market %',
renderer : perc
}],
listeners : {
selectionchange : function(model, records) {
var json, name, i, l, items, series, fields;
if (records[0]) {
rec = records[0];
if (!form) {
form = this.up('form').getForm();
fields = form.getFields();
fields.each(function(field) {
if (field.name != 'company') {
field.setDisabled(false);
}
});
} else {
fields = form.getFields();
}
// prevent change events from firing
fields.each(function(field) {
field.suspendEvents();
});
form.loadRecord(rec);
updateRecord(rec);
fields.each(function(field) {
field.resumeEvents();
});
}
}
}
});
//create a bar series to be at the top of the panel.
var barChart = Ext.create('Ext.chart.Chart', {
flex : 1,
shadow : true,
animate : true,
store : ds,
legend : {
position : 'right'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %'],
minimum : 0,
hidden : true
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
label : {
renderer : function(v) {
return Ext.String.ellipsis(v, 15, false);
},
font : '9px Arial',
rotate : {
degrees : 330
}
}
}],
series : [{
type : 'column',
axis : 'left',
highlight : true,
// style : {
// fill : '#456d9f'
// },
highlightCfg : {
fill : '#a2b5ca'
},
label : {
contrast : true,
display : 'insideEnd',
field : 'price',
color : '#000',
orientation : 'vertical',
'text-anchor' : 'middle'
},
listeners : {
'itemmouseup' : function(item) {
var series = barChart.series.get(0), index = Ext.Array.indexOf(series.items, item), selectionModel = gridPanel.getSelectionModel();
selectedStoreItem = item.storeItem;
selectionModel.select(index);
}
},
xField : 'name',
yField : ['price', 'revenue %', 'growth %'],
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'product %',
style : {
stroke : '#0f22f0',
'stroke-width' : 5,
fill : '#0f22f0',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}, {
type : 'line',
axis : ['left', 'bottom'],
xField : 'company',
yField : 'market %',
style : {
stroke : '#000000',
'stroke-width' : 5,
fill : '#000000',
opacity : 0.2
},
label : {
display : 'none',
field : 'company',
renderer : function(v) {
return v >> 0;
},
'text-anchor' : 'middle'
},
markerConfig : {
radius : 3,
size : 5
}
}]
});
//disable highlighting by default.
barChart.series.get(0).highlight = false;
//add listener to (re)select bar item after sorting or refreshing the dataset.
barChart.addListener('beforerefresh', (function() {
var timer = false;
return function() {
clearTimeout(timer);
if (selectedStoreItem) {
timer = setTimeout(function() {
selectItem(selectedStoreItem);
}, 900);
}
};
})());
var areaChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : true,
theme : 'Category1',
legend : {
position : 'top'
},
axes : [{
type : 'Numeric',
position : 'left',
fields : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
title : 'LINE CHART WIHT LEGEND',
label : {
font : '9px Verdana'
},
grid : {
odd : {
opacity : 1,
fill : '#ddd',
stroke : '#bbb',
'stroke-width' : 1
}
},
minimum : 0,
adjustMinimumByMajorUnit : 0
}, {
type : 'Category',
position : 'bottom',
fields : ['company'],
// title : 'COMPANY DETAILS',
grid : true,
label : {
font : '9px Verdana',
rotate : {
degrees : 10
}
}
}],
series : [{
type : 'area',
highlight : false,
axis : 'left',
xField : 'company',
yField : ['price', 'revenue %', 'growth %', 'product %', 'market %'],
style : {
opacity : 0.93
}
}]
});
var pieChart = Ext.create('Ext.chart.Chart', {
animate : true,
store : ds,
shadow : false,
theme : 'Base:gradients',
series : [{
type : 'pie',
angleField : 'price',
showInLegend : true,
tips : {
trackMouse : true,
width : 140,
height : 28,
renderer : function(storeItem, item) {
// calculate and display percentage on hover
var total = 0;
ds.each(function(rec) {
total += rec.get('price');
});
this.setTitle(storeItem.get('company') + ': ' + Math.round(storeItem.get('price') / total * 100) + '%');
}
},
highlight : {
segment : {
margin : 2
}
},
label : {
field : 'company',
display : 'rotate',
contrast : true,
font : '8px Arial'
}
}]
});
/*
* Here is where we create the Form
*/
var gridForm = Ext.create('Ext.form.Panel', {
title : 'Company data',
frame : true,
bodyPadding : 5,
width : 870,
height : 1120,
fieldDefaults : {
labelAlign : 'left',
msgTarget : 'side'
},
layout : {
type : 'vbox',
align : 'stretch'
},
items : [{
height : 200,
layout : 'fit',
margin : '0 0 5 0',
items : [barChart]
}, {
/* MY CUSTOM PANEL*/
xtype : 'container',
layout : {
type : 'hbox',
align : 'stretch'
},
margin : '0 0 5 0',
items : [{
height : 300,
layout : 'fit',
border : true,
flex : 1,
items : [areaChart]
}, {
height : 300,
width : 300,
layout : 'fit',
margin : '0 0 0 5',
border : true,
items : [pieChart]
}]
}, {
layout : {
type : 'hbox',
align : 'stretch'
},
flex : 3,
border : false,
bodyStyle : 'background-color: transparent',
items : [gridPanel, {
flex : 0.4,
layout : {
type : 'vbox',
align : 'stretch'
},
margin : '0 0 0 5',
title : 'Company Details',
items : [{
margin : '5',
xtype : 'fieldset',
flex : 1,
title : 'Company details',
defaults : {
width : 240,
labelWidth : 90,
disabled : true
},
defaultType : 'numberfield',
items : [{
fieldLabel : 'Name',
name : 'company',
xtype : 'textfield'
}, {
fieldLabel : 'Price',
name : 'price',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('price')
}, {
fieldLabel : 'Revenue %',
name : 'revenue %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('revenue %')
}, {
fieldLabel : 'Growth %',
name : 'growth %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('growth %')
}, {
fieldLabel : 'Product %',
name : 'product %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('product %')
}, {
fieldLabel : 'Market %',
name : 'market %',
maxValue : 100,
minValue : 0,
enforceMaxLength : true,
maxLength : 5,
listeners : createListeners('market %')
}]
}, radarChart]
}]
}],
renderTo : bd
});
var gp = Ext.getCmp('company-form');
}); | perc | identifier_name |
voice_recognition.py | #!/usr/bin/env python
import rospy
import sys
import alsaaudio, wave
import numpy as np
import psw
import gapi
import commands
from ros_mary_tts.srv import *
import baxter_interface
from baxter_interface import CHECK_VERSION
from copy import deepcopy
from std_msgs.msg import (
Empty,
Bool,
)
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_demos.msg import obj_hypotheses
def Tuck_arms(tuck):
#tuck_group.add_argument("-t", "--tuck", dest="tuck",
#tuck_group.add_argument("-u", "--untuck", dest="untuck",
rospy.loginfo("Initializing node... ")
#rospy.init_node("rsdk_tuck_arms")
rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",))
tucker = Tuck(tuck)
#rospy.on_shutdown(tucker.clean_shutdown)
tucker.supervised_tuck()
rospy.loginfo("Finished tuck")
#------------------------------------------------------------------#
class Tuck(object):
def __init__(self, tuck_cmd):
self._done = False
self._limbs = ('left', 'right')
self._arms = {
'left': baxter_interface.Limb('left'),
'right': baxter_interface.Limb('right'),
}
self._tuck = tuck_cmd
self._tuck_rate = rospy.Rate(20.0) # Hz
self._tuck_threshold = 0.2 # radians
self._peak_angle = -1.6 # radians
self._arm_state = {
'tuck': {'left': 'none', 'right': 'none'},
'collide': {'left': False, 'right': False},
'flipped': {'left': False, 'right': False}
}
self._joint_moves = {
'tuck': {
'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],
'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]
},
'untuck': {
'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],
'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]
}
}
self._collide_lsub = rospy.Subscriber(
'robot/limb/left/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'left')
self._collide_rsub = rospy.Subscriber(
'robot/limb/right/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'right')
self._disable_pub = {
'left': rospy.Publisher(
'robot/limb/left/suppress_collision_avoidance',
Empty, queue_size=10),
'right': rospy.Publisher(
'robot/limb/right/suppress_collision_avoidance',
Empty, queue_size=10)
}
self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
self._enable_pub = rospy.Publisher('robot/set_super_enable',
Bool, queue_size=10)
def _update_collision(self, data, limb):
self._arm_state['collide'][limb] = len(data.collision_object) > 0
self._check_arm_state()
def _check_arm_state(self):
"""
Check for goals and behind collision field.
If s1 joint is over the peak, collision will need to be disabled
to get the arm around the head-arm collision force-field.
"""
diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold
for limb in self._limbs:
angles = [self._arms[limb].joint_angle(joint)
for joint in self._arms[limb].joint_names()]
# Check if in a goal position
untuck_goal = map(diff_check, angles,
self._joint_moves['untuck'][limb])
tuck_goal = map(diff_check, angles[0:2],
self._joint_moves['tuck'][limb][0:2])
if all(untuck_goal):
self._arm_state['tuck'][limb] = 'untuck'
elif all(tuck_goal):
self._arm_state['tuck'][limb] = 'tuck'
else:
self._arm_state['tuck'][limb] = 'none'
# Check if shoulder is flipped over peak
self._arm_state['flipped'][limb] = (
self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)
def _prepare_to_tuck(self):
# If arms are in "tucked" state, disable collision avoidance
# before enabling robot, to avoid arm jerking from "force-field".
head = baxter_interface.Head()
start_disabled = not self._rs.state().enabled
at_goal = lambda: (abs(head.pan()) <=
baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)
rospy.loginfo("Moving head to neutral position")
while not at_goal() and not rospy.is_shutdown():
if start_disabled:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
if not self._rs.state().enabled:
self._enable_pub.publish(True)
head.set_pan(0.0, 50.0, timeout=0)
self._tuck_rate.sleep()
if start_disabled:
while self._rs.state().enabled == True and not rospy.is_shutdown():
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def _move_to(self, tuck, disabled):
if any(disabled.values()):
[pub.publish(Empty()) for pub in self._disable_pub.values()]
while (any(self._arm_state['tuck'][limb] != goal
for limb, goal in tuck.viewitems())
and not rospy.is_shutdown()):
if self._rs.state().enabled == False:
self._enable_pub.publish(True)
for limb in self._limbs:
if disabled[limb]:
self._disable_pub[limb].publish(Empty())
if limb in tuck:
self._arms[limb].set_joint_positions(dict(zip(
self._arms[limb].joint_names(),
self._joint_moves[tuck[limb]][limb])))
self._check_arm_state()
self._tuck_rate.sleep()
if any(self._arm_state['collide'].values()):
self._rs.disable()
return
def supervised_tuck(self):
# Update our starting state to check if arms are tucked
self._prepare_to_tuck()
self._check_arm_state()
# Tuck Arms
if self._tuck == True:
# If arms are already tucked, report this to user and exit.
if all(self._arm_state['tuck'][limb] == 'tuck'
for limb in self._limbs):
rospy.loginfo("Tucking: Arms already in 'Tucked' position.")
self._done = True
return
else:
rospy.loginfo("Tucking: One or more arms not Tucked.")
any_flipped = not all(self._arm_state['flipped'].values())
if any_flipped:
rospy.loginfo(
"Moving to neutral start position with collision %s.",
"on" if any_flipped else "off")
# Move to neutral pose before tucking arms to avoid damage
self._check_arm_state()
actions = dict()
disabled = {'left': True, 'right': True}
for limb in self._limbs:
if not self._arm_state['flipped'][limb]:
actions[limb] = 'untuck'
disabled[limb] = False
self._move_to(actions, disabled)
# Disable collision and Tuck Arms
rospy.loginfo("Tucking: Tucking with collision avoidance off.")
actions = {'left': 'tuck', 'right': 'tuck'}
disabled = {'left': True, 'right': True}
self._move_to(actions, disabled)
self._done = True
return
# Untuck Arms
else:
# If arms are tucked disable collision and untuck arms
if any(self._arm_state['flipped'].values()):
rospy.loginfo("Untucking: One or more arms Tucked;"
" Disabling Collision Avoidance and untucking.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
# If arms already untucked, move to neutral location
else:
rospy.loginfo("Untucking: Arms already Untucked;"
" Moving to neutral position.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
def clean_shutdown(self):
"""Handles ROS shutdown (Ctrl-C) safely."""
if not self._done:
rospy.logwarn('Aborting: Shutting down safely...')
if any(self._arm_state['collide'].values()):
while self._rs.state().enabled != False:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
#------------------------------------------------------------------#
def enable_robot(act):
#rospy.init_node('rsdk_robot_enable')
rs = baxter_interface.RobotEnable(CHECK_VERSION)
if act == 'state':
print rs.state()
elif act == 'enable':
rs.enable()
elif act == 'disable':
rs.disable()
elif act == 'reset':
rs.reset()
elif act == 'stop':
rs.stop()
return 0
#------------------------------------------------------------------#
def speak(x):
rospy.wait_for_service('ros_mary')
try:
add_two_ints = rospy.ServiceProxy('ros_mary',ros_mary)
resp1 = add_two_ints(x)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
#------------------------------------------------------------------#
speech = gapi.Speech('sp')
if len(sys.argv)==2:
if sys.argv[1] in gapi.languages.keys():
speech.lang = gapi.languages[sys.argv[1]]
elif sys.argv[1] in gapi.languages.values():
speech.lang = sys.argv[1]
def handler(fileName):
global speech
translator = gapi.Translator(speech.lang, 'en-uk')
try:
cfileName = psw.convert(fileName)
phrase = speech.getText(cfileName)
import os
os.remove(fileName)
os.remove(cfileName)
all_words = phrase.split(' ')
words = phrase.split(' ')
for i in range(len(words)):
words[i] = str(words[i])
all_words[i] = str(all_words[i])
print all_words[i]
print 'the phrase is:',phrase
if 'wake' in words:
|
elif 'sleep' in words:
speak('Going to sleep!, sir.')
Tuck_arms(True)
elif 'yourself' in words:
speak('Welcome to the school of computing! my name is Lucas. Which stands for. Leeds university cognative artificial system. Researchers here in leeds are teaching me how to become a smarter robot! so that I can help humans in their daily activities! One of the many interesting things I can do is, you can ask me to pick up an object and I will pick it up for you! Please try and ask me to pick something!')
elif 'pick' in words or 'picked' in words:
speak('going to pick up the object')
print 'pick detected'
pub2.publish(all_words,[],[],[],[],[],[],[],[])
elif 'lucas' in words or 'hello' in words:
speak('Hello, sir.')
except Exception, e:
print "Unexpected error:", sys.exc_info()[0], e
return True
pub2 = rospy.Publisher('obj_manipulation_voice', obj_hypotheses, queue_size=1)
rospy.init_node('Voice_recognition')
mic = psw.Microphone()
print 'sampling...'
sample = np.array(mic.sample(200))
print 'done'
speak('Ready.')
mic.listen(handler, sample.mean(), sample.std())
| speak('Ready to work!, sir.')
Tuck_arms(False) | conditional_block |
voice_recognition.py | #!/usr/bin/env python
import rospy
import sys
import alsaaudio, wave
import numpy as np
import psw
import gapi
import commands
from ros_mary_tts.srv import *
import baxter_interface
from baxter_interface import CHECK_VERSION
from copy import deepcopy
from std_msgs.msg import (
Empty,
Bool,
)
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_demos.msg import obj_hypotheses
def Tuck_arms(tuck):
#tuck_group.add_argument("-t", "--tuck", dest="tuck",
#tuck_group.add_argument("-u", "--untuck", dest="untuck",
rospy.loginfo("Initializing node... ")
#rospy.init_node("rsdk_tuck_arms")
rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",))
tucker = Tuck(tuck)
#rospy.on_shutdown(tucker.clean_shutdown)
tucker.supervised_tuck()
rospy.loginfo("Finished tuck")
#------------------------------------------------------------------#
class Tuck(object):
def __init__(self, tuck_cmd):
self._done = False
self._limbs = ('left', 'right')
self._arms = {
'left': baxter_interface.Limb('left'),
'right': baxter_interface.Limb('right'),
}
self._tuck = tuck_cmd
self._tuck_rate = rospy.Rate(20.0) # Hz
self._tuck_threshold = 0.2 # radians
self._peak_angle = -1.6 # radians
self._arm_state = {
'tuck': {'left': 'none', 'right': 'none'},
'collide': {'left': False, 'right': False},
'flipped': {'left': False, 'right': False}
}
self._joint_moves = {
'tuck': {
'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],
'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]
},
'untuck': {
'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],
'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]
}
}
self._collide_lsub = rospy.Subscriber(
'robot/limb/left/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'left')
self._collide_rsub = rospy.Subscriber(
'robot/limb/right/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'right')
self._disable_pub = {
'left': rospy.Publisher(
'robot/limb/left/suppress_collision_avoidance',
Empty, queue_size=10),
'right': rospy.Publisher(
'robot/limb/right/suppress_collision_avoidance',
Empty, queue_size=10)
}
self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
self._enable_pub = rospy.Publisher('robot/set_super_enable',
Bool, queue_size=10)
def _update_collision(self, data, limb):
self._arm_state['collide'][limb] = len(data.collision_object) > 0
self._check_arm_state()
def _check_arm_state(self):
"""
Check for goals and behind collision field.
If s1 joint is over the peak, collision will need to be disabled
to get the arm around the head-arm collision force-field.
"""
diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold
for limb in self._limbs:
angles = [self._arms[limb].joint_angle(joint)
for joint in self._arms[limb].joint_names()]
# Check if in a goal position
untuck_goal = map(diff_check, angles,
self._joint_moves['untuck'][limb])
tuck_goal = map(diff_check, angles[0:2],
self._joint_moves['tuck'][limb][0:2])
if all(untuck_goal):
self._arm_state['tuck'][limb] = 'untuck'
elif all(tuck_goal):
self._arm_state['tuck'][limb] = 'tuck'
else:
self._arm_state['tuck'][limb] = 'none'
# Check if shoulder is flipped over peak
self._arm_state['flipped'][limb] = (
self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)
def _prepare_to_tuck(self):
# If arms are in "tucked" state, disable collision avoidance
# before enabling robot, to avoid arm jerking from "force-field".
head = baxter_interface.Head()
start_disabled = not self._rs.state().enabled
at_goal = lambda: (abs(head.pan()) <=
baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)
rospy.loginfo("Moving head to neutral position")
while not at_goal() and not rospy.is_shutdown():
if start_disabled:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
if not self._rs.state().enabled:
self._enable_pub.publish(True)
head.set_pan(0.0, 50.0, timeout=0)
self._tuck_rate.sleep()
if start_disabled:
while self._rs.state().enabled == True and not rospy.is_shutdown():
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def _move_to(self, tuck, disabled):
if any(disabled.values()):
[pub.publish(Empty()) for pub in self._disable_pub.values()]
while (any(self._arm_state['tuck'][limb] != goal
for limb, goal in tuck.viewitems())
and not rospy.is_shutdown()):
if self._rs.state().enabled == False:
self._enable_pub.publish(True)
for limb in self._limbs:
if disabled[limb]:
self._disable_pub[limb].publish(Empty())
if limb in tuck:
self._arms[limb].set_joint_positions(dict(zip(
self._arms[limb].joint_names(),
self._joint_moves[tuck[limb]][limb])))
self._check_arm_state()
self._tuck_rate.sleep()
if any(self._arm_state['collide'].values()):
self._rs.disable()
return
def supervised_tuck(self):
# Update our starting state to check if arms are tucked
self._prepare_to_tuck()
self._check_arm_state()
# Tuck Arms
if self._tuck == True:
# If arms are already tucked, report this to user and exit.
if all(self._arm_state['tuck'][limb] == 'tuck'
for limb in self._limbs):
rospy.loginfo("Tucking: Arms already in 'Tucked' position.")
self._done = True
return
else:
rospy.loginfo("Tucking: One or more arms not Tucked.")
any_flipped = not all(self._arm_state['flipped'].values())
if any_flipped:
rospy.loginfo(
"Moving to neutral start position with collision %s.",
"on" if any_flipped else "off")
# Move to neutral pose before tucking arms to avoid damage
self._check_arm_state()
actions = dict()
disabled = {'left': True, 'right': True}
for limb in self._limbs:
if not self._arm_state['flipped'][limb]:
actions[limb] = 'untuck'
disabled[limb] = False
self._move_to(actions, disabled)
# Disable collision and Tuck Arms
rospy.loginfo("Tucking: Tucking with collision avoidance off.")
actions = {'left': 'tuck', 'right': 'tuck'}
disabled = {'left': True, 'right': True}
self._move_to(actions, disabled)
self._done = True
return
# Untuck Arms
else:
# If arms are tucked disable collision and untuck arms
if any(self._arm_state['flipped'].values()):
rospy.loginfo("Untucking: One or more arms Tucked;"
" Disabling Collision Avoidance and untucking.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped']) | else:
rospy.loginfo("Untucking: Arms already Untucked;"
" Moving to neutral position.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
def clean_shutdown(self):
"""Handles ROS shutdown (Ctrl-C) safely."""
if not self._done:
rospy.logwarn('Aborting: Shutting down safely...')
if any(self._arm_state['collide'].values()):
while self._rs.state().enabled != False:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
#------------------------------------------------------------------#
def enable_robot(act):
#rospy.init_node('rsdk_robot_enable')
rs = baxter_interface.RobotEnable(CHECK_VERSION)
if act == 'state':
print rs.state()
elif act == 'enable':
rs.enable()
elif act == 'disable':
rs.disable()
elif act == 'reset':
rs.reset()
elif act == 'stop':
rs.stop()
return 0
#------------------------------------------------------------------#
def speak(x):
rospy.wait_for_service('ros_mary')
try:
add_two_ints = rospy.ServiceProxy('ros_mary',ros_mary)
resp1 = add_two_ints(x)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
#------------------------------------------------------------------#
speech = gapi.Speech('sp')
if len(sys.argv)==2:
if sys.argv[1] in gapi.languages.keys():
speech.lang = gapi.languages[sys.argv[1]]
elif sys.argv[1] in gapi.languages.values():
speech.lang = sys.argv[1]
def handler(fileName):
global speech
translator = gapi.Translator(speech.lang, 'en-uk')
try:
cfileName = psw.convert(fileName)
phrase = speech.getText(cfileName)
import os
os.remove(fileName)
os.remove(cfileName)
all_words = phrase.split(' ')
words = phrase.split(' ')
for i in range(len(words)):
words[i] = str(words[i])
all_words[i] = str(all_words[i])
print all_words[i]
print 'the phrase is:',phrase
if 'wake' in words:
speak('Ready to work!, sir.')
Tuck_arms(False)
elif 'sleep' in words:
speak('Going to sleep!, sir.')
Tuck_arms(True)
elif 'yourself' in words:
speak('Welcome to the school of computing! my name is Lucas. Which stands for. Leeds university cognative artificial system. Researchers here in leeds are teaching me how to become a smarter robot! so that I can help humans in their daily activities! One of the many interesting things I can do is, you can ask me to pick up an object and I will pick it up for you! Please try and ask me to pick something!')
elif 'pick' in words or 'picked' in words:
speak('going to pick up the object')
print 'pick detected'
pub2.publish(all_words,[],[],[],[],[],[],[],[])
elif 'lucas' in words or 'hello' in words:
speak('Hello, sir.')
except Exception, e:
print "Unexpected error:", sys.exc_info()[0], e
return True
pub2 = rospy.Publisher('obj_manipulation_voice', obj_hypotheses, queue_size=1)
rospy.init_node('Voice_recognition')
mic = psw.Microphone()
print 'sampling...'
sample = np.array(mic.sample(200))
print 'done'
speak('Ready.')
mic.listen(handler, sample.mean(), sample.std()) | actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
# If arms already untucked, move to neutral location | random_line_split |
voice_recognition.py | #!/usr/bin/env python
import rospy
import sys
import alsaaudio, wave
import numpy as np
import psw
import gapi
import commands
from ros_mary_tts.srv import *
import baxter_interface
from baxter_interface import CHECK_VERSION
from copy import deepcopy
from std_msgs.msg import (
Empty,
Bool,
)
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_demos.msg import obj_hypotheses
def Tuck_arms(tuck):
#tuck_group.add_argument("-t", "--tuck", dest="tuck",
#tuck_group.add_argument("-u", "--untuck", dest="untuck",
rospy.loginfo("Initializing node... ")
#rospy.init_node("rsdk_tuck_arms")
rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",))
tucker = Tuck(tuck)
#rospy.on_shutdown(tucker.clean_shutdown)
tucker.supervised_tuck()
rospy.loginfo("Finished tuck")
#------------------------------------------------------------------#
class Tuck(object):
def __init__(self, tuck_cmd):
self._done = False
self._limbs = ('left', 'right')
self._arms = {
'left': baxter_interface.Limb('left'),
'right': baxter_interface.Limb('right'),
}
self._tuck = tuck_cmd
self._tuck_rate = rospy.Rate(20.0) # Hz
self._tuck_threshold = 0.2 # radians
self._peak_angle = -1.6 # radians
self._arm_state = {
'tuck': {'left': 'none', 'right': 'none'},
'collide': {'left': False, 'right': False},
'flipped': {'left': False, 'right': False}
}
self._joint_moves = {
'tuck': {
'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],
'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]
},
'untuck': {
'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],
'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]
}
}
self._collide_lsub = rospy.Subscriber(
'robot/limb/left/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'left')
self._collide_rsub = rospy.Subscriber(
'robot/limb/right/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'right')
self._disable_pub = {
'left': rospy.Publisher(
'robot/limb/left/suppress_collision_avoidance',
Empty, queue_size=10),
'right': rospy.Publisher(
'robot/limb/right/suppress_collision_avoidance',
Empty, queue_size=10)
}
self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
self._enable_pub = rospy.Publisher('robot/set_super_enable',
Bool, queue_size=10)
def _update_collision(self, data, limb):
|
def _check_arm_state(self):
"""
Check for goals and behind collision field.
If s1 joint is over the peak, collision will need to be disabled
to get the arm around the head-arm collision force-field.
"""
diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold
for limb in self._limbs:
angles = [self._arms[limb].joint_angle(joint)
for joint in self._arms[limb].joint_names()]
# Check if in a goal position
untuck_goal = map(diff_check, angles,
self._joint_moves['untuck'][limb])
tuck_goal = map(diff_check, angles[0:2],
self._joint_moves['tuck'][limb][0:2])
if all(untuck_goal):
self._arm_state['tuck'][limb] = 'untuck'
elif all(tuck_goal):
self._arm_state['tuck'][limb] = 'tuck'
else:
self._arm_state['tuck'][limb] = 'none'
# Check if shoulder is flipped over peak
self._arm_state['flipped'][limb] = (
self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)
def _prepare_to_tuck(self):
# If arms are in "tucked" state, disable collision avoidance
# before enabling robot, to avoid arm jerking from "force-field".
head = baxter_interface.Head()
start_disabled = not self._rs.state().enabled
at_goal = lambda: (abs(head.pan()) <=
baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)
rospy.loginfo("Moving head to neutral position")
while not at_goal() and not rospy.is_shutdown():
if start_disabled:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
if not self._rs.state().enabled:
self._enable_pub.publish(True)
head.set_pan(0.0, 50.0, timeout=0)
self._tuck_rate.sleep()
if start_disabled:
while self._rs.state().enabled == True and not rospy.is_shutdown():
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def _move_to(self, tuck, disabled):
if any(disabled.values()):
[pub.publish(Empty()) for pub in self._disable_pub.values()]
while (any(self._arm_state['tuck'][limb] != goal
for limb, goal in tuck.viewitems())
and not rospy.is_shutdown()):
if self._rs.state().enabled == False:
self._enable_pub.publish(True)
for limb in self._limbs:
if disabled[limb]:
self._disable_pub[limb].publish(Empty())
if limb in tuck:
self._arms[limb].set_joint_positions(dict(zip(
self._arms[limb].joint_names(),
self._joint_moves[tuck[limb]][limb])))
self._check_arm_state()
self._tuck_rate.sleep()
if any(self._arm_state['collide'].values()):
self._rs.disable()
return
def supervised_tuck(self):
# Update our starting state to check if arms are tucked
self._prepare_to_tuck()
self._check_arm_state()
# Tuck Arms
if self._tuck == True:
# If arms are already tucked, report this to user and exit.
if all(self._arm_state['tuck'][limb] == 'tuck'
for limb in self._limbs):
rospy.loginfo("Tucking: Arms already in 'Tucked' position.")
self._done = True
return
else:
rospy.loginfo("Tucking: One or more arms not Tucked.")
any_flipped = not all(self._arm_state['flipped'].values())
if any_flipped:
rospy.loginfo(
"Moving to neutral start position with collision %s.",
"on" if any_flipped else "off")
# Move to neutral pose before tucking arms to avoid damage
self._check_arm_state()
actions = dict()
disabled = {'left': True, 'right': True}
for limb in self._limbs:
if not self._arm_state['flipped'][limb]:
actions[limb] = 'untuck'
disabled[limb] = False
self._move_to(actions, disabled)
# Disable collision and Tuck Arms
rospy.loginfo("Tucking: Tucking with collision avoidance off.")
actions = {'left': 'tuck', 'right': 'tuck'}
disabled = {'left': True, 'right': True}
self._move_to(actions, disabled)
self._done = True
return
# Untuck Arms
else:
# If arms are tucked disable collision and untuck arms
if any(self._arm_state['flipped'].values()):
rospy.loginfo("Untucking: One or more arms Tucked;"
" Disabling Collision Avoidance and untucking.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
# If arms already untucked, move to neutral location
else:
rospy.loginfo("Untucking: Arms already Untucked;"
" Moving to neutral position.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
def clean_shutdown(self):
"""Handles ROS shutdown (Ctrl-C) safely."""
if not self._done:
rospy.logwarn('Aborting: Shutting down safely...')
if any(self._arm_state['collide'].values()):
while self._rs.state().enabled != False:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
#------------------------------------------------------------------#
def enable_robot(act):
#rospy.init_node('rsdk_robot_enable')
rs = baxter_interface.RobotEnable(CHECK_VERSION)
if act == 'state':
print rs.state()
elif act == 'enable':
rs.enable()
elif act == 'disable':
rs.disable()
elif act == 'reset':
rs.reset()
elif act == 'stop':
rs.stop()
return 0
#------------------------------------------------------------------#
def speak(x):
rospy.wait_for_service('ros_mary')
try:
add_two_ints = rospy.ServiceProxy('ros_mary',ros_mary)
resp1 = add_two_ints(x)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
#------------------------------------------------------------------#
speech = gapi.Speech('sp')
if len(sys.argv)==2:
if sys.argv[1] in gapi.languages.keys():
speech.lang = gapi.languages[sys.argv[1]]
elif sys.argv[1] in gapi.languages.values():
speech.lang = sys.argv[1]
def handler(fileName):
global speech
translator = gapi.Translator(speech.lang, 'en-uk')
try:
cfileName = psw.convert(fileName)
phrase = speech.getText(cfileName)
import os
os.remove(fileName)
os.remove(cfileName)
all_words = phrase.split(' ')
words = phrase.split(' ')
for i in range(len(words)):
words[i] = str(words[i])
all_words[i] = str(all_words[i])
print all_words[i]
print 'the phrase is:',phrase
if 'wake' in words:
speak('Ready to work!, sir.')
Tuck_arms(False)
elif 'sleep' in words:
speak('Going to sleep!, sir.')
Tuck_arms(True)
elif 'yourself' in words:
speak('Welcome to the school of computing! my name is Lucas. Which stands for. Leeds university cognative artificial system. Researchers here in leeds are teaching me how to become a smarter robot! so that I can help humans in their daily activities! One of the many interesting things I can do is, you can ask me to pick up an object and I will pick it up for you! Please try and ask me to pick something!')
elif 'pick' in words or 'picked' in words:
speak('going to pick up the object')
print 'pick detected'
pub2.publish(all_words,[],[],[],[],[],[],[],[])
elif 'lucas' in words or 'hello' in words:
speak('Hello, sir.')
except Exception, e:
print "Unexpected error:", sys.exc_info()[0], e
return True
pub2 = rospy.Publisher('obj_manipulation_voice', obj_hypotheses, queue_size=1)
rospy.init_node('Voice_recognition')
mic = psw.Microphone()
print 'sampling...'
sample = np.array(mic.sample(200))
print 'done'
speak('Ready.')
mic.listen(handler, sample.mean(), sample.std())
| self._arm_state['collide'][limb] = len(data.collision_object) > 0
self._check_arm_state() | identifier_body |
voice_recognition.py | #!/usr/bin/env python
import rospy
import sys
import alsaaudio, wave
import numpy as np
import psw
import gapi
import commands
from ros_mary_tts.srv import *
import baxter_interface
from baxter_interface import CHECK_VERSION
from copy import deepcopy
from std_msgs.msg import (
Empty,
Bool,
)
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_demos.msg import obj_hypotheses
def Tuck_arms(tuck):
#tuck_group.add_argument("-t", "--tuck", dest="tuck",
#tuck_group.add_argument("-u", "--untuck", dest="untuck",
rospy.loginfo("Initializing node... ")
#rospy.init_node("rsdk_tuck_arms")
rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",))
tucker = Tuck(tuck)
#rospy.on_shutdown(tucker.clean_shutdown)
tucker.supervised_tuck()
rospy.loginfo("Finished tuck")
#------------------------------------------------------------------#
class Tuck(object):
def __init__(self, tuck_cmd):
self._done = False
self._limbs = ('left', 'right')
self._arms = {
'left': baxter_interface.Limb('left'),
'right': baxter_interface.Limb('right'),
}
self._tuck = tuck_cmd
self._tuck_rate = rospy.Rate(20.0) # Hz
self._tuck_threshold = 0.2 # radians
self._peak_angle = -1.6 # radians
self._arm_state = {
'tuck': {'left': 'none', 'right': 'none'},
'collide': {'left': False, 'right': False},
'flipped': {'left': False, 'right': False}
}
self._joint_moves = {
'tuck': {
'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],
'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]
},
'untuck': {
'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],
'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]
}
}
self._collide_lsub = rospy.Subscriber(
'robot/limb/left/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'left')
self._collide_rsub = rospy.Subscriber(
'robot/limb/right/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'right')
self._disable_pub = {
'left': rospy.Publisher(
'robot/limb/left/suppress_collision_avoidance',
Empty, queue_size=10),
'right': rospy.Publisher(
'robot/limb/right/suppress_collision_avoidance',
Empty, queue_size=10)
}
self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
self._enable_pub = rospy.Publisher('robot/set_super_enable',
Bool, queue_size=10)
def _update_collision(self, data, limb):
self._arm_state['collide'][limb] = len(data.collision_object) > 0
self._check_arm_state()
def _check_arm_state(self):
"""
Check for goals and behind collision field.
If s1 joint is over the peak, collision will need to be disabled
to get the arm around the head-arm collision force-field.
"""
diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold
for limb in self._limbs:
angles = [self._arms[limb].joint_angle(joint)
for joint in self._arms[limb].joint_names()]
# Check if in a goal position
untuck_goal = map(diff_check, angles,
self._joint_moves['untuck'][limb])
tuck_goal = map(diff_check, angles[0:2],
self._joint_moves['tuck'][limb][0:2])
if all(untuck_goal):
self._arm_state['tuck'][limb] = 'untuck'
elif all(tuck_goal):
self._arm_state['tuck'][limb] = 'tuck'
else:
self._arm_state['tuck'][limb] = 'none'
# Check if shoulder is flipped over peak
self._arm_state['flipped'][limb] = (
self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)
def _prepare_to_tuck(self):
# If arms are in "tucked" state, disable collision avoidance
# before enabling robot, to avoid arm jerking from "force-field".
head = baxter_interface.Head()
start_disabled = not self._rs.state().enabled
at_goal = lambda: (abs(head.pan()) <=
baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)
rospy.loginfo("Moving head to neutral position")
while not at_goal() and not rospy.is_shutdown():
if start_disabled:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
if not self._rs.state().enabled:
self._enable_pub.publish(True)
head.set_pan(0.0, 50.0, timeout=0)
self._tuck_rate.sleep()
if start_disabled:
while self._rs.state().enabled == True and not rospy.is_shutdown():
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def _move_to(self, tuck, disabled):
if any(disabled.values()):
[pub.publish(Empty()) for pub in self._disable_pub.values()]
while (any(self._arm_state['tuck'][limb] != goal
for limb, goal in tuck.viewitems())
and not rospy.is_shutdown()):
if self._rs.state().enabled == False:
self._enable_pub.publish(True)
for limb in self._limbs:
if disabled[limb]:
self._disable_pub[limb].publish(Empty())
if limb in tuck:
self._arms[limb].set_joint_positions(dict(zip(
self._arms[limb].joint_names(),
self._joint_moves[tuck[limb]][limb])))
self._check_arm_state()
self._tuck_rate.sleep()
if any(self._arm_state['collide'].values()):
self._rs.disable()
return
def supervised_tuck(self):
# Update our starting state to check if arms are tucked
self._prepare_to_tuck()
self._check_arm_state()
# Tuck Arms
if self._tuck == True:
# If arms are already tucked, report this to user and exit.
if all(self._arm_state['tuck'][limb] == 'tuck'
for limb in self._limbs):
rospy.loginfo("Tucking: Arms already in 'Tucked' position.")
self._done = True
return
else:
rospy.loginfo("Tucking: One or more arms not Tucked.")
any_flipped = not all(self._arm_state['flipped'].values())
if any_flipped:
rospy.loginfo(
"Moving to neutral start position with collision %s.",
"on" if any_flipped else "off")
# Move to neutral pose before tucking arms to avoid damage
self._check_arm_state()
actions = dict()
disabled = {'left': True, 'right': True}
for limb in self._limbs:
if not self._arm_state['flipped'][limb]:
actions[limb] = 'untuck'
disabled[limb] = False
self._move_to(actions, disabled)
# Disable collision and Tuck Arms
rospy.loginfo("Tucking: Tucking with collision avoidance off.")
actions = {'left': 'tuck', 'right': 'tuck'}
disabled = {'left': True, 'right': True}
self._move_to(actions, disabled)
self._done = True
return
# Untuck Arms
else:
# If arms are tucked disable collision and untuck arms
if any(self._arm_state['flipped'].values()):
rospy.loginfo("Untucking: One or more arms Tucked;"
" Disabling Collision Avoidance and untucking.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
# If arms already untucked, move to neutral location
else:
rospy.loginfo("Untucking: Arms already Untucked;"
" Moving to neutral position.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
def clean_shutdown(self):
"""Handles ROS shutdown (Ctrl-C) safely."""
if not self._done:
rospy.logwarn('Aborting: Shutting down safely...')
if any(self._arm_state['collide'].values()):
while self._rs.state().enabled != False:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
#------------------------------------------------------------------#
def enable_robot(act):
#rospy.init_node('rsdk_robot_enable')
rs = baxter_interface.RobotEnable(CHECK_VERSION)
if act == 'state':
print rs.state()
elif act == 'enable':
rs.enable()
elif act == 'disable':
rs.disable()
elif act == 'reset':
rs.reset()
elif act == 'stop':
rs.stop()
return 0
#------------------------------------------------------------------#
def speak(x):
rospy.wait_for_service('ros_mary')
try:
add_two_ints = rospy.ServiceProxy('ros_mary',ros_mary)
resp1 = add_two_ints(x)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
#------------------------------------------------------------------#
speech = gapi.Speech('sp')
if len(sys.argv)==2:
if sys.argv[1] in gapi.languages.keys():
speech.lang = gapi.languages[sys.argv[1]]
elif sys.argv[1] in gapi.languages.values():
speech.lang = sys.argv[1]
def | (fileName):
global speech
translator = gapi.Translator(speech.lang, 'en-uk')
try:
cfileName = psw.convert(fileName)
phrase = speech.getText(cfileName)
import os
os.remove(fileName)
os.remove(cfileName)
all_words = phrase.split(' ')
words = phrase.split(' ')
for i in range(len(words)):
words[i] = str(words[i])
all_words[i] = str(all_words[i])
print all_words[i]
print 'the phrase is:',phrase
if 'wake' in words:
speak('Ready to work!, sir.')
Tuck_arms(False)
elif 'sleep' in words:
speak('Going to sleep!, sir.')
Tuck_arms(True)
elif 'yourself' in words:
speak('Welcome to the school of computing! my name is Lucas. Which stands for. Leeds university cognative artificial system. Researchers here in leeds are teaching me how to become a smarter robot! so that I can help humans in their daily activities! One of the many interesting things I can do is, you can ask me to pick up an object and I will pick it up for you! Please try and ask me to pick something!')
elif 'pick' in words or 'picked' in words:
speak('going to pick up the object')
print 'pick detected'
pub2.publish(all_words,[],[],[],[],[],[],[],[])
elif 'lucas' in words or 'hello' in words:
speak('Hello, sir.')
except Exception, e:
print "Unexpected error:", sys.exc_info()[0], e
return True
pub2 = rospy.Publisher('obj_manipulation_voice', obj_hypotheses, queue_size=1)
rospy.init_node('Voice_recognition')
mic = psw.Microphone()
print 'sampling...'
sample = np.array(mic.sample(200))
print 'done'
speak('Ready.')
mic.listen(handler, sample.mean(), sample.std())
| handler | identifier_name |
_criticizer_base.py | import re
import warnings
from collections import Counter, OrderedDict
from numbers import Number
import numpy as np
import tensorflow as tf
from tqdm import tqdm
from odin.bay import distributions as tfd
from odin.bay.distributions import CombinedDistribution
from odin.bay.vi import utils
from odin.bay.vi.autoencoder.variational_autoencoder import \
VariationalAutoencoder
from odin.bay.vi.data_utils import Factor
from odin.stats import is_discrete
from odin.utils import as_tuple
def prepare_inputs_factors(inputs, latents, factors, verbose):
if inputs is None:
if latents is None:
raise ValueError("Either inputs or latents must be provided")
assert factors is not None, \
"If latents is provided directly, factors must not be None."
latents = tf.nest.flatten(latents)
assert all(isinstance(z, tfd.Distribution) for z in latents), \
("All latents must be instance of Distribution but given: "
f"{[type(z).__name__ for z in latents]}")
### inputs is a tensorflow Dataset, convert everything to numpy
elif isinstance(inputs, tf.data.Dataset):
struct = tf.data.experimental.get_structure(inputs)
if isinstance(struct, dict):
struct = struct['inputs']
struct = tf.nest.flatten(struct)
n_inputs = len(struct)
if verbose:
inputs = tqdm(inputs, desc="Reading data")
if factors is None: # include factors
assert n_inputs >= 2, \
"factors are not included in the dataset: %s" % str(inputs)
x, y = [list() for _ in range((n_inputs - 1))], []
for data in inputs:
if isinstance(data, dict): # this is an ad-hoc hack
data = data['inputs']
for i, j in enumerate(data[:-1]):
x[i].append(j)
y.append(data[-1])
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 2:
inputs = inputs[0]
factors = tf.concat(y, axis=0).numpy()
else: # factors separated
x = [list() for _ in range(n_inputs)]
for data in inputs:
for i, j in enumerate(tf.nest.flatten(data)):
x[i].append(j)
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 1:
inputs = inputs[0]
if isinstance(factors, tf.data.Dataset):
if verbose:
factors = tqdm(factors, desc="Reading factors")
factors = tf.concat([i for i in factors], axis=0)
# end the progress
if isinstance(inputs, tqdm):
inputs.clear()
inputs.close()
# post-processing
else:
inputs = tf.nest.flatten(inputs)
assert len(factors.shape) == 2, "factors must be a matrix"
return inputs, latents, factors
class CriticizerBase(object):
def __init__(self,
vae: VariationalAutoencoder,
latent_indices=slice(None),
random_state=1):
super().__init__()
assert isinstance(vae, VariationalAutoencoder), \
"vae must be instance of odin.bay.vi.VariationalAutoencoder, given: %s" \
% str(type(vae))
self._vae = vae
if latent_indices is None:
latent_indices = slice(None)
self._latent_indices = latent_indices
if isinstance(random_state, Number):
random_state = np.random.RandomState(seed=random_state)
# main arguments
self._inputs = None
self._factors = None
self._original_factors = None
self._factor_names = None
self._representations = None
self._reconstructions = None
# concatenated train and test
self._representations_full = None
self._factors_full = None
self._original_factors_full = None
# others
self._rand = random_state
self._is_multi_latents = 0
@property
def is_multi_latents(self):
return self._is_multi_latents
@property
def is_sampled(self):
if self._factors is None or self._representations is None:
return False
return True
def assert_sampled(self):
if not self.is_sampled:
raise RuntimeError("Call the `sample_batch` method to sample mini-batch "
"of ground-truth data and learned representations.")
@property
def inputs(self):
self.assert_sampled()
return self._inputs
@property
def representations_full(self) -> tfd.Distribution:
return self._representations_full
@property
def latents_full(self) -> tfd.Distribution:
return self._representations_full
@property
def factors_full(self) -> tf.Tensor:
return self._factors_full
@property
def original_factors_full(self) -> tf.Tensor:
return self._original_factors_full
@property
def representations(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def latents(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def representations_mean(self):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.mean().numpy() for z in self.representations]
@property
def representations_variance(self):
r""" Return the variance of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.variance().numpy() for z in self.representations]
def representations_sample(self, n=()):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [
z.sample(sample_shape=n, seed=self.randint).numpy()
for z in self.representations
]
@property
def reconstructions(self):
r""" Return the reconstructed `Distributions` of inputs for training and
testing """
self.assert_sampled()
return self._reconstructions
@property
def reconstructions_mean(self):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.mean().numpy() for j in i] for i in self._reconstructions]
@property
def reconstructions_variance(self):
r""" Return the variance of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.variance().numpy() for j in i] for i in self._reconstructions]
def reconstructions_sample(self, n=()):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.sample(sample_shape=n, seed=self.randint).numpy()
for j in i]
for i in self._reconstructions]
@property
def original_factors(self):
r""" Return the training and testing original factors, i.e. the factors
before discretizing """
self.assert_sampled()
# the original factors is the same for all samples set
return self._original_factors
@property
def n_factors(self):
return self.factors[0].shape[1]
@property
def n_representations(self):
r""" return the number of latent codes """
return self.representations[0].event_shape[0]
@property
def n_codes(self):
r""" same as `n_representations`, return the number of latent codes """
return self.n_representations
@property
def n_train(self):
r""" Return number of samples for training """
return self.factors[0].shape[0]
@property
def n_test(self):
r""" Return number of samples for testing """
return self.factors[1].shape[0]
@property
def factors(self):
r""" Return the target variable (i.e. the factors of variation) for
training and testing """
self.assert_sampled()
return self._factors
@property
def factor_names(self):
self.assert_sampled()
# the dataset is unchanged, always at 0-th index
return np.array(self._factor_names)
@property
def code_names(self):
return np.array([f"Z{i}" for i in range(self.n_representations)])
@property
def random_state(self):
return self._rand
@property
def randint(self):
return self._rand.randint(1e8)
############## proxy to VAE methods
def index(self, factor_name):
r""" Return the column index of given factor_names within the
factor matrix """
return self._factor_names.index(str(factor_name))
def | (self, inputs, mask=None, sample_shape=()):
r""" Encode inputs to latent codes
Arguments:
inputs : a single Tensor or list of Tensor
Returns:
`tensorflow_probability.Distribution`, q(z|x) the latent distribution
"""
inputs = tf.nest.flatten(inputs)[:len(self._vae.encoder.inputs)]
latents = self._vae.encode(inputs[0] if len(inputs) == 1 else inputs,
training=False,
mask=mask,
sample_shape=sample_shape)
# only support single returned latent variable now
for z in tf.nest.flatten(latents):
assert isinstance(z, tfd.Distribution), \
"The latent code return from `vae.encode` must be instance of " + \
"tensorflow_probability.Distribution, but returned: %s" % \
str(z)
return latents
def decode(self, latents, mask=None, sample_shape=()):
r""" Decode the latents into reconstruction distribution """
outputs = self._vae.decode(latents,
training=False,
mask=mask,
sample_shape=sample_shape)
for o in tf.nest.flatten(outputs):
assert isinstance(o, tfd.Distribution), \
"vae decode method must return reconstruction distribution, but " + \
"returned: %s" % str(o)
return outputs
############## Experiment setup
def traversing(self,
indices=None,
min_val=-1.,
max_val=1.,
num=10,
n_samples=2,
mode='linear'):
r"""
Arguments:
indices : a list of Integer or None. The indices of latent code for
traversing. If None, all latent codes are used.
Return:
numpy.ndarray : traversed latent codes for training and testing,
the shape is `[len(indices) * n_samples * num, n_representations]`
"""
self.assert_sampled()
num = int(num)
n_samples = int(n_samples)
assert num > 1 and n_samples > 0, "num > 1 and n_samples > 0"
# ====== indices ====== #
if indices is None:
indices = list(range(self.n_representations))
else:
indices = [int(i) for i in tf.nest.flatten(indices)]
assert all(i < self.n_factors for i in indices), \
"There are %d factors, but the factor indices are: %s" % \
(self.n_factors, str(indices))
indices = np.array(indices)
# ====== check the mode ====== #
all_mode = ('quantile', 'linear')
mode = str(mode).strip().lower()
assert mode in all_mode, \
"Only support %s, but given mode='%s'" % (str(all_mode), mode)
# ====== helpers ====== #
def _traverse(z):
sampled_indices = self._rand.choice(z.shape[0],
size=int(n_samples),
replace=False)
Zs = []
for i in sampled_indices:
n = len(indices) * num
z_i = np.repeat(np.expand_dims(z[i], 0), n, axis=0)
for j, idx in enumerate(indices):
start = j * num
end = (j + 1) * num
# linear
if mode == 'linear':
z_i[start:end, idx] = np.linspace(min_val, max_val, num)
# Gaussian quantile
elif mode == 'quantile':
base_code = z_i[0, idx]
print(base_code)
exit()
# Gaussian linear
elif mode == '':
raise NotImplementedError
Zs.append(z_i)
Zs = np.concatenate(Zs, axis=0)
return Zs, sampled_indices
# ====== traverse through latent space ====== #
z_train, z_test = self.representations_mean
z_train, train_ids = _traverse(z_train)
z_test, test_ids = _traverse(z_test)
return z_train, z_test
def conditioning(self, known={}, logical_not=False, n_samples=None):
r""" Conditioning the sampled dataset on known factors
Arguments:
known : a mapping from index or name of factor to a callable, the
callable must return a list of boolean indices, which indicates
the samples to be selected
logical_not : a Boolean, if True applying the opposed conditioning
of the known factors
n_samples : an Integer (Optional), maximum number of selected samples.
Return:
a new `Criticizer` with the conditioned data and representations
Example:
```
# conditioning on: (1st-factor > 2) and (2nd-factor == 3)
conditioning({1: lambda x: x > 2, 2: lambda x: x==3})
```
"""
self.assert_sampled()
known = {
int(k) if isinstance(k, Number) else self.index(str(k)): v
for k, v in dict(known).items()
}
assert len(known) > 0 and all(callable(v) for v in known.values()), \
"'known' factors must be mapping from factor index to callable " + \
"but given: %s" % str(known)
# start conditioning
x_train, x_test = self.inputs
f_train, f_test = self.factors
train_ids = np.full(shape=f_train.shape[0], fill_value=True, dtype=np.bool)
test_ids = np.full(shape=f_test.shape[0], fill_value=True, dtype=np.bool)
for f_idx, fn_filter in known.items():
train_ids = np.logical_and(train_ids, fn_filter(f_train[:, f_idx]))
test_ids = np.logical_and(test_ids, fn_filter(f_test[:, f_idx]))
# select n_samples
if n_samples is not None:
n_samples = int(n_samples)
ratio = n_samples / (len(train_ids) + len(test_ids))
train_ids = train_ids[:int(ratio * len(train_ids))]
test_ids = test_ids[:int(ratio * len(test_ids))]
# opposing the conditions
if logical_not:
train_ids = np.logical_not(train_ids)
test_ids = np.logical_not(test_ids)
# add new samples set to stack
o_train, o_test = self.original_factors
x_train = [x[train_ids] for x in x_train]
x_test = [x[test_ids] for x in x_test]
# convert boolean indices to integer
z_train = self.encode(x_train)
z_test = self.encode(x_test)
r_train = self.decode(z_train)
r_test = self.decode(z_test)
if isinstance(z_train, (tuple, list)):
z_train = z_train[self._latent_indices]
z_test = z_test[self._latent_indices]
if self.is_multi_latents:
z_train = CombinedDistribution(z_train, name="LatentsTrain")
z_test = CombinedDistribution(z_test, name="LatentsTest")
# create a new critizer
crt = self.copy()
crt._representations = (\
z_train[0] if isinstance(z_train, (tuple, list)) else z_train,
z_test[0] if isinstance(z_test, (tuple, list)) else z_test)
crt._inputs = (x_train, x_test)
crt._reconstructions = (r_train, r_test)
crt._factors = (f_train[train_ids], f_test[test_ids])
crt._original_factors = (o_train[train_ids], o_test[test_ids])
return crt
def sample_batch(self,
inputs=None,
latents=None,
factors=None,
n_bins=5,
strategy=None,
factor_names=None,
train_percent=0.8,
n_samples=[2000, 1000],
batch_size=64,
verbose=True):
r""" Sample a batch of training and testing for evaluation of VAE
Arguments:
inputs : list of `ndarray` or `tensorflow.data.Dataset`.
Inputs to the model, note all data will be loaded in-memory
latents : list of `Distribution`
distribution of learned representation
factors : a `ndarray` or `tensorflow.data.Dataset`.
a matrix of groundtruth factors, note all data will be loaded in-memory
n_bins : int or array-like, shape (n_features,) (default=5)
The number of bins to produce. Raises ValueError if ``n_bins < 2``.
strategy : {'uniform', 'quantile', 'kmeans', 'gmm'}, (default='quantile')
Strategy used to define the widths of the bins.
`None` - No discretization performed
uniform - All bins in each feature have identical widths.
quantile - All bins in each feature have the same number of points.
kmeans - Values in each bin have the same nearest center of a 1D
k-means cluster.
gmm - using the components (in sorted order of mean) of Gaussian
mixture to label.
factor_names :
train_percent :
n_samples :
batch_size :
Returns:
`Criticizer` with sampled data
"""
from odin.bay.helpers import concat_distributions
inputs, latents, factors = prepare_inputs_factors(inputs,
latents,
factors,
verbose=verbose)
n_samples = as_tuple(n_samples, t=int, N=2)
n_inputs = factors.shape[0]
# ====== split train test ====== #
if inputs is None:
latents = latents[self._latent_indices]
split = int(n_inputs * train_percent)
train_ids = slice(None, split)
test_ids = slice(split, None)
train_latents = [z[train_ids] for z in latents]
test_latents = [z[test_ids] for z in latents]
if len(latents) == 1:
train_latents = train_latents[0]
test_latents = test_latents[0]
else:
self._is_multi_latents = len(latents)
train_latents = CombinedDistribution(train_latents, name="Latents")
test_latents = CombinedDistribution(test_latents, name="Latents")
else:
ids = self.random_state.permutation(n_inputs)
split = int(train_percent * n_inputs)
train_ids, test_ids = ids[:split], ids[split:]
train_inputs = [i[train_ids] for i in inputs]
test_inputs = [i[test_ids] for i in inputs]
# ====== create discretized factors ====== #
f_original = (factors[train_ids], factors[test_ids])
# discretizing the factors
if strategy is not None:
if verbose:
print(f"Discretizing factors: {n_bins} - {strategy}")
factors = utils.discretizing(factors,
n_bins=int(n_bins),
strategy=strategy)
# check for singular factor and ignore it
ids = []
for i, (name, f) in enumerate(zip(factor_names, factors.T)):
c = Counter(f)
if len(c) < 2:
warnings.warn(f"Ignore factor with name '{name}', singular data: {f}")
else:
ids.append(i)
if len(ids) != len(factor_names):
f_original = (f_original[0][:, ids], f_original[1][:, ids])
factor_names = factor_names[ids]
factors = factors[:, ids]
# create the factor class for sampling
train_factors = Factor(factors[train_ids],
factor_names=factor_names,
random_state=self.randint)
test_factors = Factor(factors[test_ids],
factor_names=factor_names,
random_state=self.randint)
# ====== sampling ====== #
def sampling(inputs_, factors_, nsamples, title):
Xs = [list() for _ in range(len(inputs))] # inputs
Ys = [] # factors
Zs = [] # latents
Os = [] # outputs
indices = []
n = 0
if verbose:
prog = tqdm(desc='Sampling %s' % title, total=nsamples)
while n < nsamples:
batch = min(batch_size, nsamples - n, factors_.shape[0])
if verbose:
prog.update(int(batch))
# factors
y, ids = factors_.sample_factors(num=batch, return_indices=True)
indices.append(ids)
Ys.append(y)
# inputs
inps = []
for x, i in zip(Xs, inputs_):
i = i[ids, :]
x.append(i)
inps.append(i)
# latents representation
z = self.encode(inps, sample_shape=())
o = tf.nest.flatten(self.decode(z))
if isinstance(z, (tuple, list)):
z = z[self._latent_indices]
if len(z) == 1:
z = z[0]
else:
self._is_multi_latents = len(z)
Os.append(o)
Zs.append(z)
# update the counter
n += len(y)
# end progress
if verbose:
prog.clear()
prog.close()
# aggregate all data
Xs = [np.concatenate(x, axis=0) for x in Xs]
Ys = np.concatenate(Ys, axis=0)
if self.is_multi_latents:
Zs = CombinedDistribution(
[
concat_distributions(
[z[zi] for z in Zs],
name="Latents%d" % zi,
) for zi in range(self.is_multi_latents)
],
name="Latents",
)
else:
Zs = concat_distributions(Zs, name="Latents")
Os = [
concat_distributions(
[j[i] for j in Os],
name="Output%d" % i,
) for i in range(len(Os[0]))
]
return Xs, Ys, Zs, Os, np.concatenate(indices, axis=0)
# perform sampling
if inputs is not None:
train = sampling(inputs_=train_inputs,
factors_=train_factors,
nsamples=n_samples[0],
title="Train")
test = sampling(inputs_=test_inputs,
factors_=test_factors,
nsamples=n_samples[1],
title="Test ")
ids_train = train[4]
ids_test = test[4]
# assign the variables
self._inputs = (train[0], test[0])
self._factors = (train[1], test[1])
self._representations = (train[2], test[2])
self._reconstructions = (train[3], test[3])
self._original_factors = (f_original[0][ids_train],
f_original[1][ids_test])
else:
self._inputs = (None, None)
self._factors = (train_factors.factors, test_factors.factors)
self._representations = (train_latents, test_latents)
self._reconstructions = (None, None)
self._original_factors = (f_original[0], f_original[1])
self._factor_names = train_factors.factor_names
# concatenated
self._representations_full = concat_distributions(self.representations)
self._factors_full = np.concatenate(self.factors, axis=0)
self._original_factors_full = np.concatenate(self.original_factors, axis=0)
return self
| encode | identifier_name |
_criticizer_base.py | import re
import warnings
from collections import Counter, OrderedDict
from numbers import Number
import numpy as np
import tensorflow as tf
from tqdm import tqdm
from odin.bay import distributions as tfd
from odin.bay.distributions import CombinedDistribution
from odin.bay.vi import utils
from odin.bay.vi.autoencoder.variational_autoencoder import \
VariationalAutoencoder
from odin.bay.vi.data_utils import Factor
from odin.stats import is_discrete
from odin.utils import as_tuple
def prepare_inputs_factors(inputs, latents, factors, verbose):
if inputs is None:
if latents is None:
raise ValueError("Either inputs or latents must be provided")
assert factors is not None, \
"If latents is provided directly, factors must not be None."
latents = tf.nest.flatten(latents)
assert all(isinstance(z, tfd.Distribution) for z in latents), \
("All latents must be instance of Distribution but given: "
f"{[type(z).__name__ for z in latents]}")
### inputs is a tensorflow Dataset, convert everything to numpy
elif isinstance(inputs, tf.data.Dataset):
struct = tf.data.experimental.get_structure(inputs)
if isinstance(struct, dict):
struct = struct['inputs']
struct = tf.nest.flatten(struct)
n_inputs = len(struct)
if verbose:
inputs = tqdm(inputs, desc="Reading data")
if factors is None: # include factors
assert n_inputs >= 2, \
"factors are not included in the dataset: %s" % str(inputs)
x, y = [list() for _ in range((n_inputs - 1))], []
for data in inputs:
if isinstance(data, dict): # this is an ad-hoc hack
data = data['inputs']
for i, j in enumerate(data[:-1]):
x[i].append(j)
y.append(data[-1])
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 2:
inputs = inputs[0]
factors = tf.concat(y, axis=0).numpy()
else: # factors separated
x = [list() for _ in range(n_inputs)]
for data in inputs:
for i, j in enumerate(tf.nest.flatten(data)):
x[i].append(j)
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 1:
inputs = inputs[0]
if isinstance(factors, tf.data.Dataset):
if verbose:
factors = tqdm(factors, desc="Reading factors")
factors = tf.concat([i for i in factors], axis=0)
# end the progress
if isinstance(inputs, tqdm):
inputs.clear()
inputs.close()
# post-processing
else:
inputs = tf.nest.flatten(inputs)
assert len(factors.shape) == 2, "factors must be a matrix"
return inputs, latents, factors
class CriticizerBase(object):
def __init__(self,
vae: VariationalAutoencoder,
latent_indices=slice(None),
random_state=1):
super().__init__()
assert isinstance(vae, VariationalAutoencoder), \
"vae must be instance of odin.bay.vi.VariationalAutoencoder, given: %s" \
% str(type(vae))
self._vae = vae
if latent_indices is None:
latent_indices = slice(None)
self._latent_indices = latent_indices
if isinstance(random_state, Number):
random_state = np.random.RandomState(seed=random_state)
# main arguments
self._inputs = None
self._factors = None
self._original_factors = None
self._factor_names = None
self._representations = None
self._reconstructions = None
# concatenated train and test
self._representations_full = None
self._factors_full = None
self._original_factors_full = None
# others
self._rand = random_state
self._is_multi_latents = 0
@property
def is_multi_latents(self):
return self._is_multi_latents
@property
def is_sampled(self):
if self._factors is None or self._representations is None:
return False
return True
def assert_sampled(self):
if not self.is_sampled:
raise RuntimeError("Call the `sample_batch` method to sample mini-batch "
"of ground-truth data and learned representations.")
@property
def inputs(self):
self.assert_sampled()
return self._inputs
@property
def representations_full(self) -> tfd.Distribution:
return self._representations_full
@property
def latents_full(self) -> tfd.Distribution:
return self._representations_full
@property
def factors_full(self) -> tf.Tensor:
return self._factors_full
@property
def original_factors_full(self) -> tf.Tensor:
return self._original_factors_full
@property
def representations(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def latents(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def representations_mean(self):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.mean().numpy() for z in self.representations]
@property
def representations_variance(self):
r""" Return the variance of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.variance().numpy() for z in self.representations]
def representations_sample(self, n=()):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [
z.sample(sample_shape=n, seed=self.randint).numpy()
for z in self.representations
]
@property
def reconstructions(self):
r""" Return the reconstructed `Distributions` of inputs for training and
testing """
self.assert_sampled()
return self._reconstructions
@property
def reconstructions_mean(self):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.mean().numpy() for j in i] for i in self._reconstructions]
@property
def reconstructions_variance(self):
r""" Return the variance of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.variance().numpy() for j in i] for i in self._reconstructions]
def reconstructions_sample(self, n=()):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.sample(sample_shape=n, seed=self.randint).numpy()
for j in i]
for i in self._reconstructions]
@property
def original_factors(self):
r""" Return the training and testing original factors, i.e. the factors
before discretizing """
self.assert_sampled()
# the original factors is the same for all samples set
return self._original_factors
@property
def n_factors(self):
return self.factors[0].shape[1]
@property
def n_representations(self):
r""" return the number of latent codes """
return self.representations[0].event_shape[0]
@property
def n_codes(self):
r""" same as `n_representations`, return the number of latent codes """
return self.n_representations
@property
def n_train(self):
r""" Return number of samples for training """
return self.factors[0].shape[0]
@property
def n_test(self):
r""" Return number of samples for testing """
return self.factors[1].shape[0]
@property
def factors(self):
r""" Return the target variable (i.e. the factors of variation) for
training and testing """
self.assert_sampled()
return self._factors
@property
def factor_names(self):
self.assert_sampled()
# the dataset is unchanged, always at 0-th index
return np.array(self._factor_names)
@property
def code_names(self):
return np.array([f"Z{i}" for i in range(self.n_representations)])
@property
def random_state(self):
return self._rand
@property
def randint(self):
return self._rand.randint(1e8)
############## proxy to VAE methods
def index(self, factor_name):
r""" Return the column index of given factor_names within the
factor matrix """
return self._factor_names.index(str(factor_name))
def encode(self, inputs, mask=None, sample_shape=()):
r""" Encode inputs to latent codes
Arguments:
inputs : a single Tensor or list of Tensor
Returns:
`tensorflow_probability.Distribution`, q(z|x) the latent distribution
"""
inputs = tf.nest.flatten(inputs)[:len(self._vae.encoder.inputs)]
latents = self._vae.encode(inputs[0] if len(inputs) == 1 else inputs,
training=False,
mask=mask,
sample_shape=sample_shape) | for z in tf.nest.flatten(latents):
assert isinstance(z, tfd.Distribution), \
"The latent code return from `vae.encode` must be instance of " + \
"tensorflow_probability.Distribution, but returned: %s" % \
str(z)
return latents
def decode(self, latents, mask=None, sample_shape=()):
r""" Decode the latents into reconstruction distribution """
outputs = self._vae.decode(latents,
training=False,
mask=mask,
sample_shape=sample_shape)
for o in tf.nest.flatten(outputs):
assert isinstance(o, tfd.Distribution), \
"vae decode method must return reconstruction distribution, but " + \
"returned: %s" % str(o)
return outputs
############## Experiment setup
def traversing(self,
indices=None,
min_val=-1.,
max_val=1.,
num=10,
n_samples=2,
mode='linear'):
r"""
Arguments:
indices : a list of Integer or None. The indices of latent code for
traversing. If None, all latent codes are used.
Return:
numpy.ndarray : traversed latent codes for training and testing,
the shape is `[len(indices) * n_samples * num, n_representations]`
"""
self.assert_sampled()
num = int(num)
n_samples = int(n_samples)
assert num > 1 and n_samples > 0, "num > 1 and n_samples > 0"
# ====== indices ====== #
if indices is None:
indices = list(range(self.n_representations))
else:
indices = [int(i) for i in tf.nest.flatten(indices)]
assert all(i < self.n_factors for i in indices), \
"There are %d factors, but the factor indices are: %s" % \
(self.n_factors, str(indices))
indices = np.array(indices)
# ====== check the mode ====== #
all_mode = ('quantile', 'linear')
mode = str(mode).strip().lower()
assert mode in all_mode, \
"Only support %s, but given mode='%s'" % (str(all_mode), mode)
# ====== helpers ====== #
def _traverse(z):
sampled_indices = self._rand.choice(z.shape[0],
size=int(n_samples),
replace=False)
Zs = []
for i in sampled_indices:
n = len(indices) * num
z_i = np.repeat(np.expand_dims(z[i], 0), n, axis=0)
for j, idx in enumerate(indices):
start = j * num
end = (j + 1) * num
# linear
if mode == 'linear':
z_i[start:end, idx] = np.linspace(min_val, max_val, num)
# Gaussian quantile
elif mode == 'quantile':
base_code = z_i[0, idx]
print(base_code)
exit()
# Gaussian linear
elif mode == '':
raise NotImplementedError
Zs.append(z_i)
Zs = np.concatenate(Zs, axis=0)
return Zs, sampled_indices
# ====== traverse through latent space ====== #
z_train, z_test = self.representations_mean
z_train, train_ids = _traverse(z_train)
z_test, test_ids = _traverse(z_test)
return z_train, z_test
def conditioning(self, known={}, logical_not=False, n_samples=None):
r""" Conditioning the sampled dataset on known factors
Arguments:
known : a mapping from index or name of factor to a callable, the
callable must return a list of boolean indices, which indicates
the samples to be selected
logical_not : a Boolean, if True applying the opposed conditioning
of the known factors
n_samples : an Integer (Optional), maximum number of selected samples.
Return:
a new `Criticizer` with the conditioned data and representations
Example:
```
# conditioning on: (1st-factor > 2) and (2nd-factor == 3)
conditioning({1: lambda x: x > 2, 2: lambda x: x==3})
```
"""
self.assert_sampled()
known = {
int(k) if isinstance(k, Number) else self.index(str(k)): v
for k, v in dict(known).items()
}
assert len(known) > 0 and all(callable(v) for v in known.values()), \
"'known' factors must be mapping from factor index to callable " + \
"but given: %s" % str(known)
# start conditioning
x_train, x_test = self.inputs
f_train, f_test = self.factors
train_ids = np.full(shape=f_train.shape[0], fill_value=True, dtype=np.bool)
test_ids = np.full(shape=f_test.shape[0], fill_value=True, dtype=np.bool)
for f_idx, fn_filter in known.items():
train_ids = np.logical_and(train_ids, fn_filter(f_train[:, f_idx]))
test_ids = np.logical_and(test_ids, fn_filter(f_test[:, f_idx]))
# select n_samples
if n_samples is not None:
n_samples = int(n_samples)
ratio = n_samples / (len(train_ids) + len(test_ids))
train_ids = train_ids[:int(ratio * len(train_ids))]
test_ids = test_ids[:int(ratio * len(test_ids))]
# opposing the conditions
if logical_not:
train_ids = np.logical_not(train_ids)
test_ids = np.logical_not(test_ids)
# add new samples set to stack
o_train, o_test = self.original_factors
x_train = [x[train_ids] for x in x_train]
x_test = [x[test_ids] for x in x_test]
# convert boolean indices to integer
z_train = self.encode(x_train)
z_test = self.encode(x_test)
r_train = self.decode(z_train)
r_test = self.decode(z_test)
if isinstance(z_train, (tuple, list)):
z_train = z_train[self._latent_indices]
z_test = z_test[self._latent_indices]
if self.is_multi_latents:
z_train = CombinedDistribution(z_train, name="LatentsTrain")
z_test = CombinedDistribution(z_test, name="LatentsTest")
# create a new critizer
crt = self.copy()
crt._representations = (\
z_train[0] if isinstance(z_train, (tuple, list)) else z_train,
z_test[0] if isinstance(z_test, (tuple, list)) else z_test)
crt._inputs = (x_train, x_test)
crt._reconstructions = (r_train, r_test)
crt._factors = (f_train[train_ids], f_test[test_ids])
crt._original_factors = (o_train[train_ids], o_test[test_ids])
return crt
def sample_batch(self,
inputs=None,
latents=None,
factors=None,
n_bins=5,
strategy=None,
factor_names=None,
train_percent=0.8,
n_samples=[2000, 1000],
batch_size=64,
verbose=True):
r""" Sample a batch of training and testing for evaluation of VAE
Arguments:
inputs : list of `ndarray` or `tensorflow.data.Dataset`.
Inputs to the model, note all data will be loaded in-memory
latents : list of `Distribution`
distribution of learned representation
factors : a `ndarray` or `tensorflow.data.Dataset`.
a matrix of groundtruth factors, note all data will be loaded in-memory
n_bins : int or array-like, shape (n_features,) (default=5)
The number of bins to produce. Raises ValueError if ``n_bins < 2``.
strategy : {'uniform', 'quantile', 'kmeans', 'gmm'}, (default='quantile')
Strategy used to define the widths of the bins.
`None` - No discretization performed
uniform - All bins in each feature have identical widths.
quantile - All bins in each feature have the same number of points.
kmeans - Values in each bin have the same nearest center of a 1D
k-means cluster.
gmm - using the components (in sorted order of mean) of Gaussian
mixture to label.
factor_names :
train_percent :
n_samples :
batch_size :
Returns:
`Criticizer` with sampled data
"""
from odin.bay.helpers import concat_distributions
inputs, latents, factors = prepare_inputs_factors(inputs,
latents,
factors,
verbose=verbose)
n_samples = as_tuple(n_samples, t=int, N=2)
n_inputs = factors.shape[0]
# ====== split train test ====== #
if inputs is None:
latents = latents[self._latent_indices]
split = int(n_inputs * train_percent)
train_ids = slice(None, split)
test_ids = slice(split, None)
train_latents = [z[train_ids] for z in latents]
test_latents = [z[test_ids] for z in latents]
if len(latents) == 1:
train_latents = train_latents[0]
test_latents = test_latents[0]
else:
self._is_multi_latents = len(latents)
train_latents = CombinedDistribution(train_latents, name="Latents")
test_latents = CombinedDistribution(test_latents, name="Latents")
else:
ids = self.random_state.permutation(n_inputs)
split = int(train_percent * n_inputs)
train_ids, test_ids = ids[:split], ids[split:]
train_inputs = [i[train_ids] for i in inputs]
test_inputs = [i[test_ids] for i in inputs]
# ====== create discretized factors ====== #
f_original = (factors[train_ids], factors[test_ids])
# discretizing the factors
if strategy is not None:
if verbose:
print(f"Discretizing factors: {n_bins} - {strategy}")
factors = utils.discretizing(factors,
n_bins=int(n_bins),
strategy=strategy)
# check for singular factor and ignore it
ids = []
for i, (name, f) in enumerate(zip(factor_names, factors.T)):
c = Counter(f)
if len(c) < 2:
warnings.warn(f"Ignore factor with name '{name}', singular data: {f}")
else:
ids.append(i)
if len(ids) != len(factor_names):
f_original = (f_original[0][:, ids], f_original[1][:, ids])
factor_names = factor_names[ids]
factors = factors[:, ids]
# create the factor class for sampling
train_factors = Factor(factors[train_ids],
factor_names=factor_names,
random_state=self.randint)
test_factors = Factor(factors[test_ids],
factor_names=factor_names,
random_state=self.randint)
# ====== sampling ====== #
def sampling(inputs_, factors_, nsamples, title):
Xs = [list() for _ in range(len(inputs))] # inputs
Ys = [] # factors
Zs = [] # latents
Os = [] # outputs
indices = []
n = 0
if verbose:
prog = tqdm(desc='Sampling %s' % title, total=nsamples)
while n < nsamples:
batch = min(batch_size, nsamples - n, factors_.shape[0])
if verbose:
prog.update(int(batch))
# factors
y, ids = factors_.sample_factors(num=batch, return_indices=True)
indices.append(ids)
Ys.append(y)
# inputs
inps = []
for x, i in zip(Xs, inputs_):
i = i[ids, :]
x.append(i)
inps.append(i)
# latents representation
z = self.encode(inps, sample_shape=())
o = tf.nest.flatten(self.decode(z))
if isinstance(z, (tuple, list)):
z = z[self._latent_indices]
if len(z) == 1:
z = z[0]
else:
self._is_multi_latents = len(z)
Os.append(o)
Zs.append(z)
# update the counter
n += len(y)
# end progress
if verbose:
prog.clear()
prog.close()
# aggregate all data
Xs = [np.concatenate(x, axis=0) for x in Xs]
Ys = np.concatenate(Ys, axis=0)
if self.is_multi_latents:
Zs = CombinedDistribution(
[
concat_distributions(
[z[zi] for z in Zs],
name="Latents%d" % zi,
) for zi in range(self.is_multi_latents)
],
name="Latents",
)
else:
Zs = concat_distributions(Zs, name="Latents")
Os = [
concat_distributions(
[j[i] for j in Os],
name="Output%d" % i,
) for i in range(len(Os[0]))
]
return Xs, Ys, Zs, Os, np.concatenate(indices, axis=0)
# perform sampling
if inputs is not None:
train = sampling(inputs_=train_inputs,
factors_=train_factors,
nsamples=n_samples[0],
title="Train")
test = sampling(inputs_=test_inputs,
factors_=test_factors,
nsamples=n_samples[1],
title="Test ")
ids_train = train[4]
ids_test = test[4]
# assign the variables
self._inputs = (train[0], test[0])
self._factors = (train[1], test[1])
self._representations = (train[2], test[2])
self._reconstructions = (train[3], test[3])
self._original_factors = (f_original[0][ids_train],
f_original[1][ids_test])
else:
self._inputs = (None, None)
self._factors = (train_factors.factors, test_factors.factors)
self._representations = (train_latents, test_latents)
self._reconstructions = (None, None)
self._original_factors = (f_original[0], f_original[1])
self._factor_names = train_factors.factor_names
# concatenated
self._representations_full = concat_distributions(self.representations)
self._factors_full = np.concatenate(self.factors, axis=0)
self._original_factors_full = np.concatenate(self.original_factors, axis=0)
return self | # only support single returned latent variable now | random_line_split |
_criticizer_base.py | import re
import warnings
from collections import Counter, OrderedDict
from numbers import Number
import numpy as np
import tensorflow as tf
from tqdm import tqdm
from odin.bay import distributions as tfd
from odin.bay.distributions import CombinedDistribution
from odin.bay.vi import utils
from odin.bay.vi.autoencoder.variational_autoencoder import \
VariationalAutoencoder
from odin.bay.vi.data_utils import Factor
from odin.stats import is_discrete
from odin.utils import as_tuple
def prepare_inputs_factors(inputs, latents, factors, verbose):
if inputs is None:
if latents is None:
raise ValueError("Either inputs or latents must be provided")
assert factors is not None, \
"If latents is provided directly, factors must not be None."
latents = tf.nest.flatten(latents)
assert all(isinstance(z, tfd.Distribution) for z in latents), \
("All latents must be instance of Distribution but given: "
f"{[type(z).__name__ for z in latents]}")
### inputs is a tensorflow Dataset, convert everything to numpy
elif isinstance(inputs, tf.data.Dataset):
struct = tf.data.experimental.get_structure(inputs)
if isinstance(struct, dict):
struct = struct['inputs']
struct = tf.nest.flatten(struct)
n_inputs = len(struct)
if verbose:
inputs = tqdm(inputs, desc="Reading data")
if factors is None: # include factors
assert n_inputs >= 2, \
"factors are not included in the dataset: %s" % str(inputs)
x, y = [list() for _ in range((n_inputs - 1))], []
for data in inputs:
if isinstance(data, dict): # this is an ad-hoc hack
data = data['inputs']
for i, j in enumerate(data[:-1]):
x[i].append(j)
y.append(data[-1])
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 2:
inputs = inputs[0]
factors = tf.concat(y, axis=0).numpy()
else: # factors separated
x = [list() for _ in range(n_inputs)]
for data in inputs:
for i, j in enumerate(tf.nest.flatten(data)):
x[i].append(j)
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 1:
inputs = inputs[0]
if isinstance(factors, tf.data.Dataset):
if verbose:
factors = tqdm(factors, desc="Reading factors")
factors = tf.concat([i for i in factors], axis=0)
# end the progress
if isinstance(inputs, tqdm):
inputs.clear()
inputs.close()
# post-processing
else:
inputs = tf.nest.flatten(inputs)
assert len(factors.shape) == 2, "factors must be a matrix"
return inputs, latents, factors
class CriticizerBase(object):
def __init__(self,
vae: VariationalAutoencoder,
latent_indices=slice(None),
random_state=1):
super().__init__()
assert isinstance(vae, VariationalAutoencoder), \
"vae must be instance of odin.bay.vi.VariationalAutoencoder, given: %s" \
% str(type(vae))
self._vae = vae
if latent_indices is None:
latent_indices = slice(None)
self._latent_indices = latent_indices
if isinstance(random_state, Number):
random_state = np.random.RandomState(seed=random_state)
# main arguments
self._inputs = None
self._factors = None
self._original_factors = None
self._factor_names = None
self._representations = None
self._reconstructions = None
# concatenated train and test
self._representations_full = None
self._factors_full = None
self._original_factors_full = None
# others
self._rand = random_state
self._is_multi_latents = 0
@property
def is_multi_latents(self):
return self._is_multi_latents
@property
def is_sampled(self):
if self._factors is None or self._representations is None:
return False
return True
def assert_sampled(self):
if not self.is_sampled:
raise RuntimeError("Call the `sample_batch` method to sample mini-batch "
"of ground-truth data and learned representations.")
@property
def inputs(self):
self.assert_sampled()
return self._inputs
@property
def representations_full(self) -> tfd.Distribution:
|
@property
def latents_full(self) -> tfd.Distribution:
return self._representations_full
@property
def factors_full(self) -> tf.Tensor:
return self._factors_full
@property
def original_factors_full(self) -> tf.Tensor:
return self._original_factors_full
@property
def representations(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def latents(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def representations_mean(self):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.mean().numpy() for z in self.representations]
@property
def representations_variance(self):
r""" Return the variance of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.variance().numpy() for z in self.representations]
def representations_sample(self, n=()):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [
z.sample(sample_shape=n, seed=self.randint).numpy()
for z in self.representations
]
@property
def reconstructions(self):
r""" Return the reconstructed `Distributions` of inputs for training and
testing """
self.assert_sampled()
return self._reconstructions
@property
def reconstructions_mean(self):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.mean().numpy() for j in i] for i in self._reconstructions]
@property
def reconstructions_variance(self):
r""" Return the variance of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.variance().numpy() for j in i] for i in self._reconstructions]
def reconstructions_sample(self, n=()):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.sample(sample_shape=n, seed=self.randint).numpy()
for j in i]
for i in self._reconstructions]
@property
def original_factors(self):
r""" Return the training and testing original factors, i.e. the factors
before discretizing """
self.assert_sampled()
# the original factors is the same for all samples set
return self._original_factors
@property
def n_factors(self):
return self.factors[0].shape[1]
@property
def n_representations(self):
r""" return the number of latent codes """
return self.representations[0].event_shape[0]
@property
def n_codes(self):
r""" same as `n_representations`, return the number of latent codes """
return self.n_representations
@property
def n_train(self):
r""" Return number of samples for training """
return self.factors[0].shape[0]
@property
def n_test(self):
r""" Return number of samples for testing """
return self.factors[1].shape[0]
@property
def factors(self):
r""" Return the target variable (i.e. the factors of variation) for
training and testing """
self.assert_sampled()
return self._factors
@property
def factor_names(self):
self.assert_sampled()
# the dataset is unchanged, always at 0-th index
return np.array(self._factor_names)
@property
def code_names(self):
return np.array([f"Z{i}" for i in range(self.n_representations)])
@property
def random_state(self):
return self._rand
@property
def randint(self):
return self._rand.randint(1e8)
############## proxy to VAE methods
def index(self, factor_name):
r""" Return the column index of given factor_names within the
factor matrix """
return self._factor_names.index(str(factor_name))
def encode(self, inputs, mask=None, sample_shape=()):
r""" Encode inputs to latent codes
Arguments:
inputs : a single Tensor or list of Tensor
Returns:
`tensorflow_probability.Distribution`, q(z|x) the latent distribution
"""
inputs = tf.nest.flatten(inputs)[:len(self._vae.encoder.inputs)]
latents = self._vae.encode(inputs[0] if len(inputs) == 1 else inputs,
training=False,
mask=mask,
sample_shape=sample_shape)
# only support single returned latent variable now
for z in tf.nest.flatten(latents):
assert isinstance(z, tfd.Distribution), \
"The latent code return from `vae.encode` must be instance of " + \
"tensorflow_probability.Distribution, but returned: %s" % \
str(z)
return latents
def decode(self, latents, mask=None, sample_shape=()):
r""" Decode the latents into reconstruction distribution """
outputs = self._vae.decode(latents,
training=False,
mask=mask,
sample_shape=sample_shape)
for o in tf.nest.flatten(outputs):
assert isinstance(o, tfd.Distribution), \
"vae decode method must return reconstruction distribution, but " + \
"returned: %s" % str(o)
return outputs
############## Experiment setup
def traversing(self,
indices=None,
min_val=-1.,
max_val=1.,
num=10,
n_samples=2,
mode='linear'):
r"""
Arguments:
indices : a list of Integer or None. The indices of latent code for
traversing. If None, all latent codes are used.
Return:
numpy.ndarray : traversed latent codes for training and testing,
the shape is `[len(indices) * n_samples * num, n_representations]`
"""
self.assert_sampled()
num = int(num)
n_samples = int(n_samples)
assert num > 1 and n_samples > 0, "num > 1 and n_samples > 0"
# ====== indices ====== #
if indices is None:
indices = list(range(self.n_representations))
else:
indices = [int(i) for i in tf.nest.flatten(indices)]
assert all(i < self.n_factors for i in indices), \
"There are %d factors, but the factor indices are: %s" % \
(self.n_factors, str(indices))
indices = np.array(indices)
# ====== check the mode ====== #
all_mode = ('quantile', 'linear')
mode = str(mode).strip().lower()
assert mode in all_mode, \
"Only support %s, but given mode='%s'" % (str(all_mode), mode)
# ====== helpers ====== #
def _traverse(z):
sampled_indices = self._rand.choice(z.shape[0],
size=int(n_samples),
replace=False)
Zs = []
for i in sampled_indices:
n = len(indices) * num
z_i = np.repeat(np.expand_dims(z[i], 0), n, axis=0)
for j, idx in enumerate(indices):
start = j * num
end = (j + 1) * num
# linear
if mode == 'linear':
z_i[start:end, idx] = np.linspace(min_val, max_val, num)
# Gaussian quantile
elif mode == 'quantile':
base_code = z_i[0, idx]
print(base_code)
exit()
# Gaussian linear
elif mode == '':
raise NotImplementedError
Zs.append(z_i)
Zs = np.concatenate(Zs, axis=0)
return Zs, sampled_indices
# ====== traverse through latent space ====== #
z_train, z_test = self.representations_mean
z_train, train_ids = _traverse(z_train)
z_test, test_ids = _traverse(z_test)
return z_train, z_test
def conditioning(self, known={}, logical_not=False, n_samples=None):
r""" Conditioning the sampled dataset on known factors
Arguments:
known : a mapping from index or name of factor to a callable, the
callable must return a list of boolean indices, which indicates
the samples to be selected
logical_not : a Boolean, if True applying the opposed conditioning
of the known factors
n_samples : an Integer (Optional), maximum number of selected samples.
Return:
a new `Criticizer` with the conditioned data and representations
Example:
```
# conditioning on: (1st-factor > 2) and (2nd-factor == 3)
conditioning({1: lambda x: x > 2, 2: lambda x: x==3})
```
"""
self.assert_sampled()
known = {
int(k) if isinstance(k, Number) else self.index(str(k)): v
for k, v in dict(known).items()
}
assert len(known) > 0 and all(callable(v) for v in known.values()), \
"'known' factors must be mapping from factor index to callable " + \
"but given: %s" % str(known)
# start conditioning
x_train, x_test = self.inputs
f_train, f_test = self.factors
train_ids = np.full(shape=f_train.shape[0], fill_value=True, dtype=np.bool)
test_ids = np.full(shape=f_test.shape[0], fill_value=True, dtype=np.bool)
for f_idx, fn_filter in known.items():
train_ids = np.logical_and(train_ids, fn_filter(f_train[:, f_idx]))
test_ids = np.logical_and(test_ids, fn_filter(f_test[:, f_idx]))
# select n_samples
if n_samples is not None:
n_samples = int(n_samples)
ratio = n_samples / (len(train_ids) + len(test_ids))
train_ids = train_ids[:int(ratio * len(train_ids))]
test_ids = test_ids[:int(ratio * len(test_ids))]
# opposing the conditions
if logical_not:
train_ids = np.logical_not(train_ids)
test_ids = np.logical_not(test_ids)
# add new samples set to stack
o_train, o_test = self.original_factors
x_train = [x[train_ids] for x in x_train]
x_test = [x[test_ids] for x in x_test]
# convert boolean indices to integer
z_train = self.encode(x_train)
z_test = self.encode(x_test)
r_train = self.decode(z_train)
r_test = self.decode(z_test)
if isinstance(z_train, (tuple, list)):
z_train = z_train[self._latent_indices]
z_test = z_test[self._latent_indices]
if self.is_multi_latents:
z_train = CombinedDistribution(z_train, name="LatentsTrain")
z_test = CombinedDistribution(z_test, name="LatentsTest")
# create a new critizer
crt = self.copy()
crt._representations = (\
z_train[0] if isinstance(z_train, (tuple, list)) else z_train,
z_test[0] if isinstance(z_test, (tuple, list)) else z_test)
crt._inputs = (x_train, x_test)
crt._reconstructions = (r_train, r_test)
crt._factors = (f_train[train_ids], f_test[test_ids])
crt._original_factors = (o_train[train_ids], o_test[test_ids])
return crt
def sample_batch(self,
inputs=None,
latents=None,
factors=None,
n_bins=5,
strategy=None,
factor_names=None,
train_percent=0.8,
n_samples=[2000, 1000],
batch_size=64,
verbose=True):
r""" Sample a batch of training and testing for evaluation of VAE
Arguments:
inputs : list of `ndarray` or `tensorflow.data.Dataset`.
Inputs to the model, note all data will be loaded in-memory
latents : list of `Distribution`
distribution of learned representation
factors : a `ndarray` or `tensorflow.data.Dataset`.
a matrix of groundtruth factors, note all data will be loaded in-memory
n_bins : int or array-like, shape (n_features,) (default=5)
The number of bins to produce. Raises ValueError if ``n_bins < 2``.
strategy : {'uniform', 'quantile', 'kmeans', 'gmm'}, (default='quantile')
Strategy used to define the widths of the bins.
`None` - No discretization performed
uniform - All bins in each feature have identical widths.
quantile - All bins in each feature have the same number of points.
kmeans - Values in each bin have the same nearest center of a 1D
k-means cluster.
gmm - using the components (in sorted order of mean) of Gaussian
mixture to label.
factor_names :
train_percent :
n_samples :
batch_size :
Returns:
`Criticizer` with sampled data
"""
from odin.bay.helpers import concat_distributions
inputs, latents, factors = prepare_inputs_factors(inputs,
latents,
factors,
verbose=verbose)
n_samples = as_tuple(n_samples, t=int, N=2)
n_inputs = factors.shape[0]
# ====== split train test ====== #
if inputs is None:
latents = latents[self._latent_indices]
split = int(n_inputs * train_percent)
train_ids = slice(None, split)
test_ids = slice(split, None)
train_latents = [z[train_ids] for z in latents]
test_latents = [z[test_ids] for z in latents]
if len(latents) == 1:
train_latents = train_latents[0]
test_latents = test_latents[0]
else:
self._is_multi_latents = len(latents)
train_latents = CombinedDistribution(train_latents, name="Latents")
test_latents = CombinedDistribution(test_latents, name="Latents")
else:
ids = self.random_state.permutation(n_inputs)
split = int(train_percent * n_inputs)
train_ids, test_ids = ids[:split], ids[split:]
train_inputs = [i[train_ids] for i in inputs]
test_inputs = [i[test_ids] for i in inputs]
# ====== create discretized factors ====== #
f_original = (factors[train_ids], factors[test_ids])
# discretizing the factors
if strategy is not None:
if verbose:
print(f"Discretizing factors: {n_bins} - {strategy}")
factors = utils.discretizing(factors,
n_bins=int(n_bins),
strategy=strategy)
# check for singular factor and ignore it
ids = []
for i, (name, f) in enumerate(zip(factor_names, factors.T)):
c = Counter(f)
if len(c) < 2:
warnings.warn(f"Ignore factor with name '{name}', singular data: {f}")
else:
ids.append(i)
if len(ids) != len(factor_names):
f_original = (f_original[0][:, ids], f_original[1][:, ids])
factor_names = factor_names[ids]
factors = factors[:, ids]
# create the factor class for sampling
train_factors = Factor(factors[train_ids],
factor_names=factor_names,
random_state=self.randint)
test_factors = Factor(factors[test_ids],
factor_names=factor_names,
random_state=self.randint)
# ====== sampling ====== #
def sampling(inputs_, factors_, nsamples, title):
Xs = [list() for _ in range(len(inputs))] # inputs
Ys = [] # factors
Zs = [] # latents
Os = [] # outputs
indices = []
n = 0
if verbose:
prog = tqdm(desc='Sampling %s' % title, total=nsamples)
while n < nsamples:
batch = min(batch_size, nsamples - n, factors_.shape[0])
if verbose:
prog.update(int(batch))
# factors
y, ids = factors_.sample_factors(num=batch, return_indices=True)
indices.append(ids)
Ys.append(y)
# inputs
inps = []
for x, i in zip(Xs, inputs_):
i = i[ids, :]
x.append(i)
inps.append(i)
# latents representation
z = self.encode(inps, sample_shape=())
o = tf.nest.flatten(self.decode(z))
if isinstance(z, (tuple, list)):
z = z[self._latent_indices]
if len(z) == 1:
z = z[0]
else:
self._is_multi_latents = len(z)
Os.append(o)
Zs.append(z)
# update the counter
n += len(y)
# end progress
if verbose:
prog.clear()
prog.close()
# aggregate all data
Xs = [np.concatenate(x, axis=0) for x in Xs]
Ys = np.concatenate(Ys, axis=0)
if self.is_multi_latents:
Zs = CombinedDistribution(
[
concat_distributions(
[z[zi] for z in Zs],
name="Latents%d" % zi,
) for zi in range(self.is_multi_latents)
],
name="Latents",
)
else:
Zs = concat_distributions(Zs, name="Latents")
Os = [
concat_distributions(
[j[i] for j in Os],
name="Output%d" % i,
) for i in range(len(Os[0]))
]
return Xs, Ys, Zs, Os, np.concatenate(indices, axis=0)
# perform sampling
if inputs is not None:
train = sampling(inputs_=train_inputs,
factors_=train_factors,
nsamples=n_samples[0],
title="Train")
test = sampling(inputs_=test_inputs,
factors_=test_factors,
nsamples=n_samples[1],
title="Test ")
ids_train = train[4]
ids_test = test[4]
# assign the variables
self._inputs = (train[0], test[0])
self._factors = (train[1], test[1])
self._representations = (train[2], test[2])
self._reconstructions = (train[3], test[3])
self._original_factors = (f_original[0][ids_train],
f_original[1][ids_test])
else:
self._inputs = (None, None)
self._factors = (train_factors.factors, test_factors.factors)
self._representations = (train_latents, test_latents)
self._reconstructions = (None, None)
self._original_factors = (f_original[0], f_original[1])
self._factor_names = train_factors.factor_names
# concatenated
self._representations_full = concat_distributions(self.representations)
self._factors_full = np.concatenate(self.factors, axis=0)
self._original_factors_full = np.concatenate(self.original_factors, axis=0)
return self
| return self._representations_full | identifier_body |
_criticizer_base.py | import re
import warnings
from collections import Counter, OrderedDict
from numbers import Number
import numpy as np
import tensorflow as tf
from tqdm import tqdm
from odin.bay import distributions as tfd
from odin.bay.distributions import CombinedDistribution
from odin.bay.vi import utils
from odin.bay.vi.autoencoder.variational_autoencoder import \
VariationalAutoencoder
from odin.bay.vi.data_utils import Factor
from odin.stats import is_discrete
from odin.utils import as_tuple
def prepare_inputs_factors(inputs, latents, factors, verbose):
if inputs is None:
if latents is None:
raise ValueError("Either inputs or latents must be provided")
assert factors is not None, \
"If latents is provided directly, factors must not be None."
latents = tf.nest.flatten(latents)
assert all(isinstance(z, tfd.Distribution) for z in latents), \
("All latents must be instance of Distribution but given: "
f"{[type(z).__name__ for z in latents]}")
### inputs is a tensorflow Dataset, convert everything to numpy
elif isinstance(inputs, tf.data.Dataset):
struct = tf.data.experimental.get_structure(inputs)
if isinstance(struct, dict):
struct = struct['inputs']
struct = tf.nest.flatten(struct)
n_inputs = len(struct)
if verbose:
inputs = tqdm(inputs, desc="Reading data")
if factors is None: # include factors
assert n_inputs >= 2, \
"factors are not included in the dataset: %s" % str(inputs)
x, y = [list() for _ in range((n_inputs - 1))], []
for data in inputs:
if isinstance(data, dict): # this is an ad-hoc hack
data = data['inputs']
for i, j in enumerate(data[:-1]):
x[i].append(j)
y.append(data[-1])
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 2:
inputs = inputs[0]
factors = tf.concat(y, axis=0).numpy()
else: # factors separated
x = [list() for _ in range(n_inputs)]
for data in inputs:
for i, j in enumerate(tf.nest.flatten(data)):
x[i].append(j)
inputs = [tf.concat(i, axis=0).numpy() for i in x]
if n_inputs == 1:
inputs = inputs[0]
if isinstance(factors, tf.data.Dataset):
if verbose:
factors = tqdm(factors, desc="Reading factors")
factors = tf.concat([i for i in factors], axis=0)
# end the progress
if isinstance(inputs, tqdm):
inputs.clear()
inputs.close()
# post-processing
else:
inputs = tf.nest.flatten(inputs)
assert len(factors.shape) == 2, "factors must be a matrix"
return inputs, latents, factors
class CriticizerBase(object):
def __init__(self,
vae: VariationalAutoencoder,
latent_indices=slice(None),
random_state=1):
super().__init__()
assert isinstance(vae, VariationalAutoencoder), \
"vae must be instance of odin.bay.vi.VariationalAutoencoder, given: %s" \
% str(type(vae))
self._vae = vae
if latent_indices is None:
latent_indices = slice(None)
self._latent_indices = latent_indices
if isinstance(random_state, Number):
|
# main arguments
self._inputs = None
self._factors = None
self._original_factors = None
self._factor_names = None
self._representations = None
self._reconstructions = None
# concatenated train and test
self._representations_full = None
self._factors_full = None
self._original_factors_full = None
# others
self._rand = random_state
self._is_multi_latents = 0
@property
def is_multi_latents(self):
return self._is_multi_latents
@property
def is_sampled(self):
if self._factors is None or self._representations is None:
return False
return True
def assert_sampled(self):
if not self.is_sampled:
raise RuntimeError("Call the `sample_batch` method to sample mini-batch "
"of ground-truth data and learned representations.")
@property
def inputs(self):
self.assert_sampled()
return self._inputs
@property
def representations_full(self) -> tfd.Distribution:
return self._representations_full
@property
def latents_full(self) -> tfd.Distribution:
return self._representations_full
@property
def factors_full(self) -> tf.Tensor:
return self._factors_full
@property
def original_factors_full(self) -> tf.Tensor:
return self._original_factors_full
@property
def representations(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def latents(self):
r""" Return the learned latent representations `Distribution`
(i.e. the latent code) for training and testing """
self.assert_sampled()
return self._representations
@property
def representations_mean(self):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.mean().numpy() for z in self.representations]
@property
def representations_variance(self):
r""" Return the variance of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [z.variance().numpy() for z in self.representations]
def representations_sample(self, n=()):
r""" Return the mean of learned representations distribution
(i.e. the latent code) for training and testing """
self.assert_sampled()
return [
z.sample(sample_shape=n, seed=self.randint).numpy()
for z in self.representations
]
@property
def reconstructions(self):
r""" Return the reconstructed `Distributions` of inputs for training and
testing """
self.assert_sampled()
return self._reconstructions
@property
def reconstructions_mean(self):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.mean().numpy() for j in i] for i in self._reconstructions]
@property
def reconstructions_variance(self):
r""" Return the variance of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.variance().numpy() for j in i] for i in self._reconstructions]
def reconstructions_sample(self, n=()):
r""" Return the mean of reconstructed distributions of inputs for
training and testing """
self.assert_sampled()
return [[j.sample(sample_shape=n, seed=self.randint).numpy()
for j in i]
for i in self._reconstructions]
@property
def original_factors(self):
r""" Return the training and testing original factors, i.e. the factors
before discretizing """
self.assert_sampled()
# the original factors is the same for all samples set
return self._original_factors
@property
def n_factors(self):
return self.factors[0].shape[1]
@property
def n_representations(self):
r""" return the number of latent codes """
return self.representations[0].event_shape[0]
@property
def n_codes(self):
r""" same as `n_representations`, return the number of latent codes """
return self.n_representations
@property
def n_train(self):
r""" Return number of samples for training """
return self.factors[0].shape[0]
@property
def n_test(self):
r""" Return number of samples for testing """
return self.factors[1].shape[0]
@property
def factors(self):
r""" Return the target variable (i.e. the factors of variation) for
training and testing """
self.assert_sampled()
return self._factors
@property
def factor_names(self):
self.assert_sampled()
# the dataset is unchanged, always at 0-th index
return np.array(self._factor_names)
@property
def code_names(self):
return np.array([f"Z{i}" for i in range(self.n_representations)])
@property
def random_state(self):
return self._rand
@property
def randint(self):
return self._rand.randint(1e8)
############## proxy to VAE methods
def index(self, factor_name):
r""" Return the column index of given factor_names within the
factor matrix """
return self._factor_names.index(str(factor_name))
def encode(self, inputs, mask=None, sample_shape=()):
r""" Encode inputs to latent codes
Arguments:
inputs : a single Tensor or list of Tensor
Returns:
`tensorflow_probability.Distribution`, q(z|x) the latent distribution
"""
inputs = tf.nest.flatten(inputs)[:len(self._vae.encoder.inputs)]
latents = self._vae.encode(inputs[0] if len(inputs) == 1 else inputs,
training=False,
mask=mask,
sample_shape=sample_shape)
# only support single returned latent variable now
for z in tf.nest.flatten(latents):
assert isinstance(z, tfd.Distribution), \
"The latent code return from `vae.encode` must be instance of " + \
"tensorflow_probability.Distribution, but returned: %s" % \
str(z)
return latents
def decode(self, latents, mask=None, sample_shape=()):
r""" Decode the latents into reconstruction distribution """
outputs = self._vae.decode(latents,
training=False,
mask=mask,
sample_shape=sample_shape)
for o in tf.nest.flatten(outputs):
assert isinstance(o, tfd.Distribution), \
"vae decode method must return reconstruction distribution, but " + \
"returned: %s" % str(o)
return outputs
############## Experiment setup
def traversing(self,
indices=None,
min_val=-1.,
max_val=1.,
num=10,
n_samples=2,
mode='linear'):
r"""
Arguments:
indices : a list of Integer or None. The indices of latent code for
traversing. If None, all latent codes are used.
Return:
numpy.ndarray : traversed latent codes for training and testing,
the shape is `[len(indices) * n_samples * num, n_representations]`
"""
self.assert_sampled()
num = int(num)
n_samples = int(n_samples)
assert num > 1 and n_samples > 0, "num > 1 and n_samples > 0"
# ====== indices ====== #
if indices is None:
indices = list(range(self.n_representations))
else:
indices = [int(i) for i in tf.nest.flatten(indices)]
assert all(i < self.n_factors for i in indices), \
"There are %d factors, but the factor indices are: %s" % \
(self.n_factors, str(indices))
indices = np.array(indices)
# ====== check the mode ====== #
all_mode = ('quantile', 'linear')
mode = str(mode).strip().lower()
assert mode in all_mode, \
"Only support %s, but given mode='%s'" % (str(all_mode), mode)
# ====== helpers ====== #
def _traverse(z):
sampled_indices = self._rand.choice(z.shape[0],
size=int(n_samples),
replace=False)
Zs = []
for i in sampled_indices:
n = len(indices) * num
z_i = np.repeat(np.expand_dims(z[i], 0), n, axis=0)
for j, idx in enumerate(indices):
start = j * num
end = (j + 1) * num
# linear
if mode == 'linear':
z_i[start:end, idx] = np.linspace(min_val, max_val, num)
# Gaussian quantile
elif mode == 'quantile':
base_code = z_i[0, idx]
print(base_code)
exit()
# Gaussian linear
elif mode == '':
raise NotImplementedError
Zs.append(z_i)
Zs = np.concatenate(Zs, axis=0)
return Zs, sampled_indices
# ====== traverse through latent space ====== #
z_train, z_test = self.representations_mean
z_train, train_ids = _traverse(z_train)
z_test, test_ids = _traverse(z_test)
return z_train, z_test
def conditioning(self, known={}, logical_not=False, n_samples=None):
r""" Conditioning the sampled dataset on known factors
Arguments:
known : a mapping from index or name of factor to a callable, the
callable must return a list of boolean indices, which indicates
the samples to be selected
logical_not : a Boolean, if True applying the opposed conditioning
of the known factors
n_samples : an Integer (Optional), maximum number of selected samples.
Return:
a new `Criticizer` with the conditioned data and representations
Example:
```
# conditioning on: (1st-factor > 2) and (2nd-factor == 3)
conditioning({1: lambda x: x > 2, 2: lambda x: x==3})
```
"""
self.assert_sampled()
known = {
int(k) if isinstance(k, Number) else self.index(str(k)): v
for k, v in dict(known).items()
}
assert len(known) > 0 and all(callable(v) for v in known.values()), \
"'known' factors must be mapping from factor index to callable " + \
"but given: %s" % str(known)
# start conditioning
x_train, x_test = self.inputs
f_train, f_test = self.factors
train_ids = np.full(shape=f_train.shape[0], fill_value=True, dtype=np.bool)
test_ids = np.full(shape=f_test.shape[0], fill_value=True, dtype=np.bool)
for f_idx, fn_filter in known.items():
train_ids = np.logical_and(train_ids, fn_filter(f_train[:, f_idx]))
test_ids = np.logical_and(test_ids, fn_filter(f_test[:, f_idx]))
# select n_samples
if n_samples is not None:
n_samples = int(n_samples)
ratio = n_samples / (len(train_ids) + len(test_ids))
train_ids = train_ids[:int(ratio * len(train_ids))]
test_ids = test_ids[:int(ratio * len(test_ids))]
# opposing the conditions
if logical_not:
train_ids = np.logical_not(train_ids)
test_ids = np.logical_not(test_ids)
# add new samples set to stack
o_train, o_test = self.original_factors
x_train = [x[train_ids] for x in x_train]
x_test = [x[test_ids] for x in x_test]
# convert boolean indices to integer
z_train = self.encode(x_train)
z_test = self.encode(x_test)
r_train = self.decode(z_train)
r_test = self.decode(z_test)
if isinstance(z_train, (tuple, list)):
z_train = z_train[self._latent_indices]
z_test = z_test[self._latent_indices]
if self.is_multi_latents:
z_train = CombinedDistribution(z_train, name="LatentsTrain")
z_test = CombinedDistribution(z_test, name="LatentsTest")
# create a new critizer
crt = self.copy()
crt._representations = (\
z_train[0] if isinstance(z_train, (tuple, list)) else z_train,
z_test[0] if isinstance(z_test, (tuple, list)) else z_test)
crt._inputs = (x_train, x_test)
crt._reconstructions = (r_train, r_test)
crt._factors = (f_train[train_ids], f_test[test_ids])
crt._original_factors = (o_train[train_ids], o_test[test_ids])
return crt
def sample_batch(self,
inputs=None,
latents=None,
factors=None,
n_bins=5,
strategy=None,
factor_names=None,
train_percent=0.8,
n_samples=[2000, 1000],
batch_size=64,
verbose=True):
r""" Sample a batch of training and testing for evaluation of VAE
Arguments:
inputs : list of `ndarray` or `tensorflow.data.Dataset`.
Inputs to the model, note all data will be loaded in-memory
latents : list of `Distribution`
distribution of learned representation
factors : a `ndarray` or `tensorflow.data.Dataset`.
a matrix of groundtruth factors, note all data will be loaded in-memory
n_bins : int or array-like, shape (n_features,) (default=5)
The number of bins to produce. Raises ValueError if ``n_bins < 2``.
strategy : {'uniform', 'quantile', 'kmeans', 'gmm'}, (default='quantile')
Strategy used to define the widths of the bins.
`None` - No discretization performed
uniform - All bins in each feature have identical widths.
quantile - All bins in each feature have the same number of points.
kmeans - Values in each bin have the same nearest center of a 1D
k-means cluster.
gmm - using the components (in sorted order of mean) of Gaussian
mixture to label.
factor_names :
train_percent :
n_samples :
batch_size :
Returns:
`Criticizer` with sampled data
"""
from odin.bay.helpers import concat_distributions
inputs, latents, factors = prepare_inputs_factors(inputs,
latents,
factors,
verbose=verbose)
n_samples = as_tuple(n_samples, t=int, N=2)
n_inputs = factors.shape[0]
# ====== split train test ====== #
if inputs is None:
latents = latents[self._latent_indices]
split = int(n_inputs * train_percent)
train_ids = slice(None, split)
test_ids = slice(split, None)
train_latents = [z[train_ids] for z in latents]
test_latents = [z[test_ids] for z in latents]
if len(latents) == 1:
train_latents = train_latents[0]
test_latents = test_latents[0]
else:
self._is_multi_latents = len(latents)
train_latents = CombinedDistribution(train_latents, name="Latents")
test_latents = CombinedDistribution(test_latents, name="Latents")
else:
ids = self.random_state.permutation(n_inputs)
split = int(train_percent * n_inputs)
train_ids, test_ids = ids[:split], ids[split:]
train_inputs = [i[train_ids] for i in inputs]
test_inputs = [i[test_ids] for i in inputs]
# ====== create discretized factors ====== #
f_original = (factors[train_ids], factors[test_ids])
# discretizing the factors
if strategy is not None:
if verbose:
print(f"Discretizing factors: {n_bins} - {strategy}")
factors = utils.discretizing(factors,
n_bins=int(n_bins),
strategy=strategy)
# check for singular factor and ignore it
ids = []
for i, (name, f) in enumerate(zip(factor_names, factors.T)):
c = Counter(f)
if len(c) < 2:
warnings.warn(f"Ignore factor with name '{name}', singular data: {f}")
else:
ids.append(i)
if len(ids) != len(factor_names):
f_original = (f_original[0][:, ids], f_original[1][:, ids])
factor_names = factor_names[ids]
factors = factors[:, ids]
# create the factor class for sampling
train_factors = Factor(factors[train_ids],
factor_names=factor_names,
random_state=self.randint)
test_factors = Factor(factors[test_ids],
factor_names=factor_names,
random_state=self.randint)
# ====== sampling ====== #
def sampling(inputs_, factors_, nsamples, title):
Xs = [list() for _ in range(len(inputs))] # inputs
Ys = [] # factors
Zs = [] # latents
Os = [] # outputs
indices = []
n = 0
if verbose:
prog = tqdm(desc='Sampling %s' % title, total=nsamples)
while n < nsamples:
batch = min(batch_size, nsamples - n, factors_.shape[0])
if verbose:
prog.update(int(batch))
# factors
y, ids = factors_.sample_factors(num=batch, return_indices=True)
indices.append(ids)
Ys.append(y)
# inputs
inps = []
for x, i in zip(Xs, inputs_):
i = i[ids, :]
x.append(i)
inps.append(i)
# latents representation
z = self.encode(inps, sample_shape=())
o = tf.nest.flatten(self.decode(z))
if isinstance(z, (tuple, list)):
z = z[self._latent_indices]
if len(z) == 1:
z = z[0]
else:
self._is_multi_latents = len(z)
Os.append(o)
Zs.append(z)
# update the counter
n += len(y)
# end progress
if verbose:
prog.clear()
prog.close()
# aggregate all data
Xs = [np.concatenate(x, axis=0) for x in Xs]
Ys = np.concatenate(Ys, axis=0)
if self.is_multi_latents:
Zs = CombinedDistribution(
[
concat_distributions(
[z[zi] for z in Zs],
name="Latents%d" % zi,
) for zi in range(self.is_multi_latents)
],
name="Latents",
)
else:
Zs = concat_distributions(Zs, name="Latents")
Os = [
concat_distributions(
[j[i] for j in Os],
name="Output%d" % i,
) for i in range(len(Os[0]))
]
return Xs, Ys, Zs, Os, np.concatenate(indices, axis=0)
# perform sampling
if inputs is not None:
train = sampling(inputs_=train_inputs,
factors_=train_factors,
nsamples=n_samples[0],
title="Train")
test = sampling(inputs_=test_inputs,
factors_=test_factors,
nsamples=n_samples[1],
title="Test ")
ids_train = train[4]
ids_test = test[4]
# assign the variables
self._inputs = (train[0], test[0])
self._factors = (train[1], test[1])
self._representations = (train[2], test[2])
self._reconstructions = (train[3], test[3])
self._original_factors = (f_original[0][ids_train],
f_original[1][ids_test])
else:
self._inputs = (None, None)
self._factors = (train_factors.factors, test_factors.factors)
self._representations = (train_latents, test_latents)
self._reconstructions = (None, None)
self._original_factors = (f_original[0], f_original[1])
self._factor_names = train_factors.factor_names
# concatenated
self._representations_full = concat_distributions(self.representations)
self._factors_full = np.concatenate(self.factors, axis=0)
self._original_factors_full = np.concatenate(self.original_factors, axis=0)
return self
| random_state = np.random.RandomState(seed=random_state) | conditional_block |
patterns.rs | use insta::assert_snapshot;
use test_utils::mark;
use super::{infer, infer_with_mismatches};
#[test]
fn infer_pattern() {
assert_snapshot!(
infer(r#"
fn test(x: &i32) {
let y = x;
let &z = x;
let a = z;
let (c, d) = (1, "hello");
for (e, f) in some_iter {
let g = e;
}
if let [val] = opt {
let h = val;
}
let lambda = |a: u64, b, c: i32| { a + b; c };
let ref ref_to_x = x;
let mut mut_x = x;
let ref mut mut_ref_to_x = x;
let k = mut_ref_to_x;
}
"#),
@r###"
9..10 'x': &i32
18..369 '{ ...o_x; }': ()
28..29 'y': &i32
32..33 'x': &i32
43..45 '&z': &i32
44..45 'z': i32
48..49 'x': &i32
59..60 'a': i32
63..64 'z': i32
74..80 '(c, d)': (i32, &str)
75..76 'c': i32
78..79 'd': &str
83..95 '(1, "hello")': (i32, &str)
84..85 '1': i32
87..94 '"hello"': &str
102..152 'for (e... }': ()
106..112 '(e, f)': ({unknown}, {unknown})
107..108 'e': {unknown}
110..111 'f': {unknown}
116..125 'some_iter': {unknown}
126..152 '{ ... }': ()
140..141 'g': {unknown}
144..145 'e': {unknown}
158..205 'if let... }': ()
165..170 '[val]': [{unknown}]
166..169 'val': {unknown}
173..176 'opt': [{unknown}]
177..205 '{ ... }': ()
191..192 'h': {unknown}
195..198 'val': {unknown}
215..221 'lambda': |u64, u64, i32| -> i32
224..256 '|a: u6...b; c }': |u64, u64, i32| -> i32
225..226 'a': u64
233..234 'b': u64
236..237 'c': i32
244..256 '{ a + b; c }': i32
246..247 'a': u64
246..251 'a + b': u64
250..251 'b': u64
253..254 'c': i32
267..279 'ref ref_to_x': &&i32
282..283 'x': &i32
293..302 'mut mut_x': &i32
305..306 'x': &i32
316..336 'ref mu...f_to_x': &mut &i32
339..340 'x': &i32
350..351 'k': &mut &i32
354..366 'mut_ref_to_x': &mut &i32
"###
);
}
#[test]
fn infer_literal_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
fn any<T>() -> T { loop {} }
fn test(x: &i32) {
if let "foo" = any() {}
if let 1 = any() {}
if let 1u32 = any() {}
if let 1f32 = any() {}
if let 1.0 = any() {}
if let true = any() {}
}
"#, true),
@r###"
18..29 '{ loop {} }': T
20..27 'loop {}': !
25..27 '{}': ()
38..39 'x': &i32
47..209 '{ ...) {} }': ()
53..76 'if let...y() {}': ()
60..65 '"foo"': &str
60..65 '"foo"': &str
68..71 'any': fn any<&str>() -> &str
68..73 'any()': &str
74..76 '{}': ()
81..100 'if let...y() {}': ()
88..89 '1': i32
88..89 '1': i32
92..95 'any': fn any<i32>() -> i32
92..97 'any()': i32
98..100 '{}': ()
105..127 'if let...y() {}': ()
112..116 '1u32': u32
112..116 '1u32': u32
119..122 'any': fn any<u32>() -> u32
119..124 'any()': u32
125..127 '{}': ()
132..154 'if let...y() {}': ()
139..143 '1f32': f32
139..143 '1f32': f32
146..149 'any': fn any<f32>() -> f32
146..151 'any()': f32
152..154 '{}': ()
159..180 'if let...y() {}': ()
166..169 '1.0': f64
166..169 '1.0': f64
172..175 'any': fn any<f64>() -> f64
172..177 'any()': f64
178..180 '{}': ()
185..207 'if let...y() {}': ()
192..196 'true': bool
192..196 'true': bool
199..202 'any': fn any<bool>() -> bool
199..204 'any()': bool
205..207 '{}': ()
"###
);
}
#[test]
fn infer_range_pattern() |
#[test]
fn infer_pattern_match_ergonomics() {
assert_snapshot!(
infer(r#"
struct A<T>(T);
fn test() {
let A(n) = &A(1);
let A(n) = &mut A(1);
}
"#),
@r###"
28..79 '{ ...(1); }': ()
38..42 'A(n)': A<i32>
40..41 'n': &i32
45..50 '&A(1)': &A<i32>
46..47 'A': A<i32>(i32) -> A<i32>
46..50 'A(1)': A<i32>
48..49 '1': i32
60..64 'A(n)': A<i32>
62..63 'n': &mut i32
67..76 '&mut A(1)': &mut A<i32>
72..73 'A': A<i32>(i32) -> A<i32>
72..76 'A(1)': A<i32>
74..75 '1': i32
"###
);
}
#[test]
fn infer_pattern_match_ergonomics_ref() {
mark::check!(match_ergonomics_ref);
assert_snapshot!(
infer(r#"
fn test() {
let v = &(1, &2);
let (_, &w) = v;
}
"#),
@r###"
11..57 '{ ...= v; }': ()
21..22 'v': &(i32, &i32)
25..33 '&(1, &2)': &(i32, &i32)
26..33 '(1, &2)': (i32, &i32)
27..28 '1': i32
30..32 '&2': &i32
31..32 '2': i32
43..50 '(_, &w)': (i32, &i32)
44..45 '_': i32
47..49 '&w': &i32
48..49 'w': i32
53..54 'v': &(i32, &i32)
"###
);
}
#[test]
fn infer_pattern_match_slice() {
assert_snapshot!(
infer(r#"
fn test() {
let slice: &[f64] = &[0.0];
match slice {
&[] => {},
&[a] => {
a;
},
&[b, c] => {
b;
c;
}
_ => {}
}
}
"#),
@r###"
11..210 '{ ... } }': ()
21..26 'slice': &[f64]
37..43 '&[0.0]': &[f64; _]
38..43 '[0.0]': [f64; _]
39..42 '0.0': f64
49..208 'match ... }': ()
55..60 'slice': &[f64]
71..74 '&[]': &[f64]
72..74 '[]': [f64]
78..80 '{}': ()
90..94 '&[a]': &[f64]
91..94 '[a]': [f64]
92..93 'a': f64
98..124 '{ ... }': ()
112..113 'a': f64
134..141 '&[b, c]': &[f64]
135..141 '[b, c]': [f64]
136..137 'b': f64
139..140 'c': f64
145..186 '{ ... }': ()
159..160 'b': f64
174..175 'c': f64
195..196 '_': &[f64]
200..202 '{}': ()
"###
);
}
#[test]
fn infer_pattern_match_arr() {
assert_snapshot!(
infer(r#"
fn test() {
let arr: [f64; 2] = [0.0, 1.0];
match arr {
[1.0, a] => {
a;
},
[b, c] => {
b;
c;
}
}
}
"#),
@r###"
11..180 '{ ... } }': ()
21..24 'arr': [f64; _]
37..47 '[0.0, 1.0]': [f64; _]
38..41 '0.0': f64
43..46 '1.0': f64
53..178 'match ... }': ()
59..62 'arr': [f64; _]
73..81 '[1.0, a]': [f64; _]
74..77 '1.0': f64
74..77 '1.0': f64
79..80 'a': f64
85..111 '{ ... }': ()
99..100 'a': f64
121..127 '[b, c]': [f64; _]
122..123 'b': f64
125..126 'c': f64
131..172 '{ ... }': ()
145..146 'b': f64
160..161 'c': f64
"###
);
}
#[test]
fn infer_adt_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B
}
struct S(u32, E);
fn test() {
let e = E::A { x: 3 };
let S(y, z) = foo;
let E::A { x: new_var } = e;
match e {
E::A { x } => x,
E::B if foo => 1,
E::B => 10,
};
let ref d @ E::A { .. } = e;
d;
}
"#),
@r###"
68..289 '{ ... d; }': ()
78..79 'e': E
82..95 'E::A { x: 3 }': E
92..93 '3': usize
106..113 'S(y, z)': S
108..109 'y': u32
111..112 'z': E
116..119 'foo': S
129..148 'E::A {..._var }': E
139..146 'new_var': usize
151..152 'e': E
159..245 'match ... }': usize
165..166 'e': E
177..187 'E::A { x }': E
184..185 'x': usize
191..192 'x': usize
202..206 'E::B': E
210..213 'foo': bool
217..218 '1': usize
228..232 'E::B': E
236..238 '10': usize
256..275 'ref d ...{ .. }': &E
264..275 'E::A { .. }': E
278..279 'e': E
285..286 'd': &E
"###
);
}
#[test]
fn enum_variant_through_self_in_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B(usize),
C
}
impl E {
fn test() {
match (loop {}) {
Self::A { x } => { x; },
Self::B(x) => { x; },
Self::C => {},
};
}
}
"#),
@r###"
76..218 '{ ... }': ()
86..211 'match ... }': ()
93..100 'loop {}': !
98..100 '{}': ()
116..129 'Self::A { x }': E
126..127 'x': usize
133..139 '{ x; }': ()
135..136 'x': usize
153..163 'Self::B(x)': E
161..162 'x': usize
167..173 '{ x; }': ()
169..170 'x': usize
187..194 'Self::C': E
198..200 '{}': ()
"###
);
}
#[test]
fn infer_generics_in_patterns() {
assert_snapshot!(
infer(r#"
struct A<T> {
x: T,
}
enum Option<T> {
Some(T),
None,
}
fn test(a1: A<u32>, o: Option<u64>) {
let A { x: x2 } = a1;
let A::<i64> { x: x3 } = A { x: 1 };
match o {
Option::Some(t) => t,
_ => 1,
};
}
"#),
@r###"
79..81 'a1': A<u32>
91..92 'o': Option<u64>
107..244 '{ ... }; }': ()
117..128 'A { x: x2 }': A<u32>
124..126 'x2': u32
131..133 'a1': A<u32>
143..161 'A::<i6...: x3 }': A<i64>
157..159 'x3': i64
164..174 'A { x: 1 }': A<i64>
171..172 '1': i64
180..241 'match ... }': u64
186..187 'o': Option<u64>
198..213 'Option::Some(t)': Option<u64>
211..212 't': u64
217..218 't': u64
228..229 '_': Option<u64>
233..234 '1': u64
"###
);
}
#[test]
fn infer_const_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
enum Option<T> { None }
use Option::None;
struct Foo;
const Bar: usize = 1;
fn test() {
let a: Option<u32> = None;
let b: Option<i64> = match a {
None => None,
};
let _: () = match () { Foo => Foo }; // Expected mismatch
let _: () = match () { Bar => Bar }; // Expected mismatch
}
"#, true),
@r###"
74..75 '1': usize
88..310 '{ ...atch }': ()
98..99 'a': Option<u32>
115..119 'None': Option<u32>
129..130 'b': Option<i64>
146..183 'match ... }': Option<i64>
152..153 'a': Option<u32>
164..168 'None': Option<u32>
172..176 'None': Option<i64>
193..194 '_': ()
201..224 'match ... Foo }': Foo
207..209 '()': ()
212..215 'Foo': Foo
219..222 'Foo': Foo
255..256 '_': ()
263..286 'match ... Bar }': usize
269..271 '()': ()
274..277 'Bar': usize
281..284 'Bar': usize
201..224: expected (), got Foo
263..286: expected (), got usize
"###
);
}
#[test]
fn infer_guard() {
assert_snapshot!(
infer(r#"
struct S;
impl S { fn foo(&self) -> bool { false } }
fn main() {
match S {
s if s.foo() => (),
}
}
"#), @"
28..32 'self': &S
42..51 '{ false }': bool
44..49 'false': bool
65..116 '{ ... } }': ()
71..114 'match ... }': ()
77..78 'S': S
89..90 's': S
94..95 's': S
94..101 's.foo()': bool
105..107 '()': ()
")
}
#[test]
fn match_ergonomics_in_closure_params() {
assert_snapshot!(
infer(r#"
#[lang = "fn_once"]
trait FnOnce<Args> {
type Output;
}
fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
fn test() {
foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
foo(&(1, "a"), |(x, y)| x);
}
"#),
@r###"
94..95 't': T
100..101 'f': F
111..122 '{ loop {} }': U
113..120 'loop {}': !
118..120 '{}': ()
134..233 '{ ... x); }': ()
140..143 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
140..167 'foo(&(...y)| x)': i32
144..153 '&(1, "a")': &(i32, &str)
145..153 '(1, "a")': (i32, &str)
146..147 '1': i32
149..152 '"a"': &str
155..166 '|&(x, y)| x': |&(i32, &str)| -> i32
156..163 '&(x, y)': &(i32, &str)
157..163 '(x, y)': (i32, &str)
158..159 'x': i32
161..162 'y': &str
165..166 'x': i32
204..207 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
204..230 'foo(&(...y)| x)': &i32
208..217 '&(1, "a")': &(i32, &str)
209..217 '(1, "a")': (i32, &str)
210..211 '1': i32
213..216 '"a"': &str
219..229 '|(x, y)| x': |&(i32, &str)| -> &i32
220..226 '(x, y)': (i32, &str)
221..222 'x': &i32
224..225 'y': &&str
228..229 'x': &i32
"###
);
}
| {
assert_snapshot!(
infer_with_mismatches(r#"
fn test(x: &i32) {
if let 1..76 = 2u32 {}
if let 1..=76 = 2u32 {}
}
"#, true),
@r###"
9..10 'x': &i32
18..76 '{ ...2 {} }': ()
24..46 'if let...u32 {}': ()
31..36 '1..76': u32
39..43 '2u32': u32
44..46 '{}': ()
51..74 'if let...u32 {}': ()
58..64 '1..=76': u32
67..71 '2u32': u32
72..74 '{}': ()
"###
);
} | identifier_body |
patterns.rs | use insta::assert_snapshot;
use test_utils::mark;
use super::{infer, infer_with_mismatches};
#[test]
fn infer_pattern() {
assert_snapshot!(
infer(r#"
fn test(x: &i32) {
let y = x;
let &z = x;
let a = z;
let (c, d) = (1, "hello");
for (e, f) in some_iter {
let g = e;
}
if let [val] = opt {
let h = val;
}
let lambda = |a: u64, b, c: i32| { a + b; c };
let ref ref_to_x = x;
let mut mut_x = x;
let ref mut mut_ref_to_x = x;
let k = mut_ref_to_x;
}
"#),
@r###"
9..10 'x': &i32
18..369 '{ ...o_x; }': ()
28..29 'y': &i32
32..33 'x': &i32
43..45 '&z': &i32
44..45 'z': i32
48..49 'x': &i32
59..60 'a': i32
63..64 'z': i32
74..80 '(c, d)': (i32, &str)
75..76 'c': i32
78..79 'd': &str
83..95 '(1, "hello")': (i32, &str)
84..85 '1': i32
87..94 '"hello"': &str
102..152 'for (e... }': ()
106..112 '(e, f)': ({unknown}, {unknown})
107..108 'e': {unknown}
110..111 'f': {unknown}
116..125 'some_iter': {unknown}
126..152 '{ ... }': ()
140..141 'g': {unknown}
144..145 'e': {unknown}
158..205 'if let... }': ()
165..170 '[val]': [{unknown}]
166..169 'val': {unknown}
173..176 'opt': [{unknown}]
177..205 '{ ... }': ()
191..192 'h': {unknown}
195..198 'val': {unknown}
215..221 'lambda': |u64, u64, i32| -> i32
224..256 '|a: u6...b; c }': |u64, u64, i32| -> i32
225..226 'a': u64
233..234 'b': u64
236..237 'c': i32
244..256 '{ a + b; c }': i32
246..247 'a': u64
246..251 'a + b': u64
250..251 'b': u64
253..254 'c': i32
267..279 'ref ref_to_x': &&i32
282..283 'x': &i32
293..302 'mut mut_x': &i32
305..306 'x': &i32
316..336 'ref mu...f_to_x': &mut &i32
339..340 'x': &i32
350..351 'k': &mut &i32
354..366 'mut_ref_to_x': &mut &i32
"###
);
}
#[test]
fn infer_literal_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
fn any<T>() -> T { loop {} }
fn test(x: &i32) {
if let "foo" = any() {}
if let 1 = any() {}
if let 1u32 = any() {}
if let 1f32 = any() {}
if let 1.0 = any() {}
if let true = any() {}
}
"#, true),
@r###"
18..29 '{ loop {} }': T
20..27 'loop {}': !
25..27 '{}': ()
38..39 'x': &i32
47..209 '{ ...) {} }': ()
53..76 'if let...y() {}': ()
60..65 '"foo"': &str
60..65 '"foo"': &str
68..71 'any': fn any<&str>() -> &str
68..73 'any()': &str
74..76 '{}': ()
81..100 'if let...y() {}': ()
88..89 '1': i32
88..89 '1': i32
92..95 'any': fn any<i32>() -> i32
92..97 'any()': i32
98..100 '{}': ()
105..127 'if let...y() {}': ()
112..116 '1u32': u32
112..116 '1u32': u32
119..122 'any': fn any<u32>() -> u32
119..124 'any()': u32
125..127 '{}': ()
132..154 'if let...y() {}': ()
139..143 '1f32': f32
139..143 '1f32': f32
146..149 'any': fn any<f32>() -> f32
146..151 'any()': f32
152..154 '{}': ()
159..180 'if let...y() {}': ()
166..169 '1.0': f64
166..169 '1.0': f64
172..175 'any': fn any<f64>() -> f64
172..177 'any()': f64
178..180 '{}': ()
185..207 'if let...y() {}': ()
192..196 'true': bool
192..196 'true': bool
199..202 'any': fn any<bool>() -> bool
199..204 'any()': bool
205..207 '{}': ()
"###
);
}
#[test]
fn infer_range_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
fn test(x: &i32) {
if let 1..76 = 2u32 {}
if let 1..=76 = 2u32 {}
}
"#, true),
@r###"
9..10 'x': &i32
18..76 '{ ...2 {} }': ()
24..46 'if let...u32 {}': ()
31..36 '1..76': u32
39..43 '2u32': u32
44..46 '{}': ()
51..74 'if let...u32 {}': ()
58..64 '1..=76': u32
67..71 '2u32': u32
72..74 '{}': ()
"###
);
}
#[test]
fn infer_pattern_match_ergonomics() {
assert_snapshot!(
infer(r#"
struct A<T>(T);
fn test() {
let A(n) = &A(1);
let A(n) = &mut A(1);
}
"#),
@r###"
28..79 '{ ...(1); }': ()
38..42 'A(n)': A<i32>
40..41 'n': &i32
45..50 '&A(1)': &A<i32>
46..47 'A': A<i32>(i32) -> A<i32>
46..50 'A(1)': A<i32>
48..49 '1': i32
60..64 'A(n)': A<i32>
62..63 'n': &mut i32
67..76 '&mut A(1)': &mut A<i32>
72..73 'A': A<i32>(i32) -> A<i32>
72..76 'A(1)': A<i32>
74..75 '1': i32
"###
);
}
#[test]
fn | () {
mark::check!(match_ergonomics_ref);
assert_snapshot!(
infer(r#"
fn test() {
let v = &(1, &2);
let (_, &w) = v;
}
"#),
@r###"
11..57 '{ ...= v; }': ()
21..22 'v': &(i32, &i32)
25..33 '&(1, &2)': &(i32, &i32)
26..33 '(1, &2)': (i32, &i32)
27..28 '1': i32
30..32 '&2': &i32
31..32 '2': i32
43..50 '(_, &w)': (i32, &i32)
44..45 '_': i32
47..49 '&w': &i32
48..49 'w': i32
53..54 'v': &(i32, &i32)
"###
);
}
#[test]
fn infer_pattern_match_slice() {
assert_snapshot!(
infer(r#"
fn test() {
let slice: &[f64] = &[0.0];
match slice {
&[] => {},
&[a] => {
a;
},
&[b, c] => {
b;
c;
}
_ => {}
}
}
"#),
@r###"
11..210 '{ ... } }': ()
21..26 'slice': &[f64]
37..43 '&[0.0]': &[f64; _]
38..43 '[0.0]': [f64; _]
39..42 '0.0': f64
49..208 'match ... }': ()
55..60 'slice': &[f64]
71..74 '&[]': &[f64]
72..74 '[]': [f64]
78..80 '{}': ()
90..94 '&[a]': &[f64]
91..94 '[a]': [f64]
92..93 'a': f64
98..124 '{ ... }': ()
112..113 'a': f64
134..141 '&[b, c]': &[f64]
135..141 '[b, c]': [f64]
136..137 'b': f64
139..140 'c': f64
145..186 '{ ... }': ()
159..160 'b': f64
174..175 'c': f64
195..196 '_': &[f64]
200..202 '{}': ()
"###
);
}
#[test]
fn infer_pattern_match_arr() {
assert_snapshot!(
infer(r#"
fn test() {
let arr: [f64; 2] = [0.0, 1.0];
match arr {
[1.0, a] => {
a;
},
[b, c] => {
b;
c;
}
}
}
"#),
@r###"
11..180 '{ ... } }': ()
21..24 'arr': [f64; _]
37..47 '[0.0, 1.0]': [f64; _]
38..41 '0.0': f64
43..46 '1.0': f64
53..178 'match ... }': ()
59..62 'arr': [f64; _]
73..81 '[1.0, a]': [f64; _]
74..77 '1.0': f64
74..77 '1.0': f64
79..80 'a': f64
85..111 '{ ... }': ()
99..100 'a': f64
121..127 '[b, c]': [f64; _]
122..123 'b': f64
125..126 'c': f64
131..172 '{ ... }': ()
145..146 'b': f64
160..161 'c': f64
"###
);
}
#[test]
fn infer_adt_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B
}
struct S(u32, E);
fn test() {
let e = E::A { x: 3 };
let S(y, z) = foo;
let E::A { x: new_var } = e;
match e {
E::A { x } => x,
E::B if foo => 1,
E::B => 10,
};
let ref d @ E::A { .. } = e;
d;
}
"#),
@r###"
68..289 '{ ... d; }': ()
78..79 'e': E
82..95 'E::A { x: 3 }': E
92..93 '3': usize
106..113 'S(y, z)': S
108..109 'y': u32
111..112 'z': E
116..119 'foo': S
129..148 'E::A {..._var }': E
139..146 'new_var': usize
151..152 'e': E
159..245 'match ... }': usize
165..166 'e': E
177..187 'E::A { x }': E
184..185 'x': usize
191..192 'x': usize
202..206 'E::B': E
210..213 'foo': bool
217..218 '1': usize
228..232 'E::B': E
236..238 '10': usize
256..275 'ref d ...{ .. }': &E
264..275 'E::A { .. }': E
278..279 'e': E
285..286 'd': &E
"###
);
}
#[test]
fn enum_variant_through_self_in_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B(usize),
C
}
impl E {
fn test() {
match (loop {}) {
Self::A { x } => { x; },
Self::B(x) => { x; },
Self::C => {},
};
}
}
"#),
@r###"
76..218 '{ ... }': ()
86..211 'match ... }': ()
93..100 'loop {}': !
98..100 '{}': ()
116..129 'Self::A { x }': E
126..127 'x': usize
133..139 '{ x; }': ()
135..136 'x': usize
153..163 'Self::B(x)': E
161..162 'x': usize
167..173 '{ x; }': ()
169..170 'x': usize
187..194 'Self::C': E
198..200 '{}': ()
"###
);
}
#[test]
fn infer_generics_in_patterns() {
assert_snapshot!(
infer(r#"
struct A<T> {
x: T,
}
enum Option<T> {
Some(T),
None,
}
fn test(a1: A<u32>, o: Option<u64>) {
let A { x: x2 } = a1;
let A::<i64> { x: x3 } = A { x: 1 };
match o {
Option::Some(t) => t,
_ => 1,
};
}
"#),
@r###"
79..81 'a1': A<u32>
91..92 'o': Option<u64>
107..244 '{ ... }; }': ()
117..128 'A { x: x2 }': A<u32>
124..126 'x2': u32
131..133 'a1': A<u32>
143..161 'A::<i6...: x3 }': A<i64>
157..159 'x3': i64
164..174 'A { x: 1 }': A<i64>
171..172 '1': i64
180..241 'match ... }': u64
186..187 'o': Option<u64>
198..213 'Option::Some(t)': Option<u64>
211..212 't': u64
217..218 't': u64
228..229 '_': Option<u64>
233..234 '1': u64
"###
);
}
#[test]
fn infer_const_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
enum Option<T> { None }
use Option::None;
struct Foo;
const Bar: usize = 1;
fn test() {
let a: Option<u32> = None;
let b: Option<i64> = match a {
None => None,
};
let _: () = match () { Foo => Foo }; // Expected mismatch
let _: () = match () { Bar => Bar }; // Expected mismatch
}
"#, true),
@r###"
74..75 '1': usize
88..310 '{ ...atch }': ()
98..99 'a': Option<u32>
115..119 'None': Option<u32>
129..130 'b': Option<i64>
146..183 'match ... }': Option<i64>
152..153 'a': Option<u32>
164..168 'None': Option<u32>
172..176 'None': Option<i64>
193..194 '_': ()
201..224 'match ... Foo }': Foo
207..209 '()': ()
212..215 'Foo': Foo
219..222 'Foo': Foo
255..256 '_': ()
263..286 'match ... Bar }': usize
269..271 '()': ()
274..277 'Bar': usize
281..284 'Bar': usize
201..224: expected (), got Foo
263..286: expected (), got usize
"###
);
}
#[test]
fn infer_guard() {
assert_snapshot!(
infer(r#"
struct S;
impl S { fn foo(&self) -> bool { false } }
fn main() {
match S {
s if s.foo() => (),
}
}
"#), @"
28..32 'self': &S
42..51 '{ false }': bool
44..49 'false': bool
65..116 '{ ... } }': ()
71..114 'match ... }': ()
77..78 'S': S
89..90 's': S
94..95 's': S
94..101 's.foo()': bool
105..107 '()': ()
")
}
#[test]
fn match_ergonomics_in_closure_params() {
assert_snapshot!(
infer(r#"
#[lang = "fn_once"]
trait FnOnce<Args> {
type Output;
}
fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
fn test() {
foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
foo(&(1, "a"), |(x, y)| x);
}
"#),
@r###"
94..95 't': T
100..101 'f': F
111..122 '{ loop {} }': U
113..120 'loop {}': !
118..120 '{}': ()
134..233 '{ ... x); }': ()
140..143 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
140..167 'foo(&(...y)| x)': i32
144..153 '&(1, "a")': &(i32, &str)
145..153 '(1, "a")': (i32, &str)
146..147 '1': i32
149..152 '"a"': &str
155..166 '|&(x, y)| x': |&(i32, &str)| -> i32
156..163 '&(x, y)': &(i32, &str)
157..163 '(x, y)': (i32, &str)
158..159 'x': i32
161..162 'y': &str
165..166 'x': i32
204..207 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
204..230 'foo(&(...y)| x)': &i32
208..217 '&(1, "a")': &(i32, &str)
209..217 '(1, "a")': (i32, &str)
210..211 '1': i32
213..216 '"a"': &str
219..229 '|(x, y)| x': |&(i32, &str)| -> &i32
220..226 '(x, y)': (i32, &str)
221..222 'x': &i32
224..225 'y': &&str
228..229 'x': &i32
"###
);
}
| infer_pattern_match_ergonomics_ref | identifier_name |
patterns.rs | use insta::assert_snapshot;
use test_utils::mark;
use super::{infer, infer_with_mismatches};
#[test]
fn infer_pattern() {
assert_snapshot!( | let (c, d) = (1, "hello");
for (e, f) in some_iter {
let g = e;
}
if let [val] = opt {
let h = val;
}
let lambda = |a: u64, b, c: i32| { a + b; c };
let ref ref_to_x = x;
let mut mut_x = x;
let ref mut mut_ref_to_x = x;
let k = mut_ref_to_x;
}
"#),
@r###"
9..10 'x': &i32
18..369 '{ ...o_x; }': ()
28..29 'y': &i32
32..33 'x': &i32
43..45 '&z': &i32
44..45 'z': i32
48..49 'x': &i32
59..60 'a': i32
63..64 'z': i32
74..80 '(c, d)': (i32, &str)
75..76 'c': i32
78..79 'd': &str
83..95 '(1, "hello")': (i32, &str)
84..85 '1': i32
87..94 '"hello"': &str
102..152 'for (e... }': ()
106..112 '(e, f)': ({unknown}, {unknown})
107..108 'e': {unknown}
110..111 'f': {unknown}
116..125 'some_iter': {unknown}
126..152 '{ ... }': ()
140..141 'g': {unknown}
144..145 'e': {unknown}
158..205 'if let... }': ()
165..170 '[val]': [{unknown}]
166..169 'val': {unknown}
173..176 'opt': [{unknown}]
177..205 '{ ... }': ()
191..192 'h': {unknown}
195..198 'val': {unknown}
215..221 'lambda': |u64, u64, i32| -> i32
224..256 '|a: u6...b; c }': |u64, u64, i32| -> i32
225..226 'a': u64
233..234 'b': u64
236..237 'c': i32
244..256 '{ a + b; c }': i32
246..247 'a': u64
246..251 'a + b': u64
250..251 'b': u64
253..254 'c': i32
267..279 'ref ref_to_x': &&i32
282..283 'x': &i32
293..302 'mut mut_x': &i32
305..306 'x': &i32
316..336 'ref mu...f_to_x': &mut &i32
339..340 'x': &i32
350..351 'k': &mut &i32
354..366 'mut_ref_to_x': &mut &i32
"###
);
}
#[test]
fn infer_literal_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
fn any<T>() -> T { loop {} }
fn test(x: &i32) {
if let "foo" = any() {}
if let 1 = any() {}
if let 1u32 = any() {}
if let 1f32 = any() {}
if let 1.0 = any() {}
if let true = any() {}
}
"#, true),
@r###"
18..29 '{ loop {} }': T
20..27 'loop {}': !
25..27 '{}': ()
38..39 'x': &i32
47..209 '{ ...) {} }': ()
53..76 'if let...y() {}': ()
60..65 '"foo"': &str
60..65 '"foo"': &str
68..71 'any': fn any<&str>() -> &str
68..73 'any()': &str
74..76 '{}': ()
81..100 'if let...y() {}': ()
88..89 '1': i32
88..89 '1': i32
92..95 'any': fn any<i32>() -> i32
92..97 'any()': i32
98..100 '{}': ()
105..127 'if let...y() {}': ()
112..116 '1u32': u32
112..116 '1u32': u32
119..122 'any': fn any<u32>() -> u32
119..124 'any()': u32
125..127 '{}': ()
132..154 'if let...y() {}': ()
139..143 '1f32': f32
139..143 '1f32': f32
146..149 'any': fn any<f32>() -> f32
146..151 'any()': f32
152..154 '{}': ()
159..180 'if let...y() {}': ()
166..169 '1.0': f64
166..169 '1.0': f64
172..175 'any': fn any<f64>() -> f64
172..177 'any()': f64
178..180 '{}': ()
185..207 'if let...y() {}': ()
192..196 'true': bool
192..196 'true': bool
199..202 'any': fn any<bool>() -> bool
199..204 'any()': bool
205..207 '{}': ()
"###
);
}
#[test]
fn infer_range_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
fn test(x: &i32) {
if let 1..76 = 2u32 {}
if let 1..=76 = 2u32 {}
}
"#, true),
@r###"
9..10 'x': &i32
18..76 '{ ...2 {} }': ()
24..46 'if let...u32 {}': ()
31..36 '1..76': u32
39..43 '2u32': u32
44..46 '{}': ()
51..74 'if let...u32 {}': ()
58..64 '1..=76': u32
67..71 '2u32': u32
72..74 '{}': ()
"###
);
}
#[test]
fn infer_pattern_match_ergonomics() {
assert_snapshot!(
infer(r#"
struct A<T>(T);
fn test() {
let A(n) = &A(1);
let A(n) = &mut A(1);
}
"#),
@r###"
28..79 '{ ...(1); }': ()
38..42 'A(n)': A<i32>
40..41 'n': &i32
45..50 '&A(1)': &A<i32>
46..47 'A': A<i32>(i32) -> A<i32>
46..50 'A(1)': A<i32>
48..49 '1': i32
60..64 'A(n)': A<i32>
62..63 'n': &mut i32
67..76 '&mut A(1)': &mut A<i32>
72..73 'A': A<i32>(i32) -> A<i32>
72..76 'A(1)': A<i32>
74..75 '1': i32
"###
);
}
#[test]
fn infer_pattern_match_ergonomics_ref() {
mark::check!(match_ergonomics_ref);
assert_snapshot!(
infer(r#"
fn test() {
let v = &(1, &2);
let (_, &w) = v;
}
"#),
@r###"
11..57 '{ ...= v; }': ()
21..22 'v': &(i32, &i32)
25..33 '&(1, &2)': &(i32, &i32)
26..33 '(1, &2)': (i32, &i32)
27..28 '1': i32
30..32 '&2': &i32
31..32 '2': i32
43..50 '(_, &w)': (i32, &i32)
44..45 '_': i32
47..49 '&w': &i32
48..49 'w': i32
53..54 'v': &(i32, &i32)
"###
);
}
#[test]
fn infer_pattern_match_slice() {
assert_snapshot!(
infer(r#"
fn test() {
let slice: &[f64] = &[0.0];
match slice {
&[] => {},
&[a] => {
a;
},
&[b, c] => {
b;
c;
}
_ => {}
}
}
"#),
@r###"
11..210 '{ ... } }': ()
21..26 'slice': &[f64]
37..43 '&[0.0]': &[f64; _]
38..43 '[0.0]': [f64; _]
39..42 '0.0': f64
49..208 'match ... }': ()
55..60 'slice': &[f64]
71..74 '&[]': &[f64]
72..74 '[]': [f64]
78..80 '{}': ()
90..94 '&[a]': &[f64]
91..94 '[a]': [f64]
92..93 'a': f64
98..124 '{ ... }': ()
112..113 'a': f64
134..141 '&[b, c]': &[f64]
135..141 '[b, c]': [f64]
136..137 'b': f64
139..140 'c': f64
145..186 '{ ... }': ()
159..160 'b': f64
174..175 'c': f64
195..196 '_': &[f64]
200..202 '{}': ()
"###
);
}
#[test]
fn infer_pattern_match_arr() {
assert_snapshot!(
infer(r#"
fn test() {
let arr: [f64; 2] = [0.0, 1.0];
match arr {
[1.0, a] => {
a;
},
[b, c] => {
b;
c;
}
}
}
"#),
@r###"
11..180 '{ ... } }': ()
21..24 'arr': [f64; _]
37..47 '[0.0, 1.0]': [f64; _]
38..41 '0.0': f64
43..46 '1.0': f64
53..178 'match ... }': ()
59..62 'arr': [f64; _]
73..81 '[1.0, a]': [f64; _]
74..77 '1.0': f64
74..77 '1.0': f64
79..80 'a': f64
85..111 '{ ... }': ()
99..100 'a': f64
121..127 '[b, c]': [f64; _]
122..123 'b': f64
125..126 'c': f64
131..172 '{ ... }': ()
145..146 'b': f64
160..161 'c': f64
"###
);
}
#[test]
fn infer_adt_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B
}
struct S(u32, E);
fn test() {
let e = E::A { x: 3 };
let S(y, z) = foo;
let E::A { x: new_var } = e;
match e {
E::A { x } => x,
E::B if foo => 1,
E::B => 10,
};
let ref d @ E::A { .. } = e;
d;
}
"#),
@r###"
68..289 '{ ... d; }': ()
78..79 'e': E
82..95 'E::A { x: 3 }': E
92..93 '3': usize
106..113 'S(y, z)': S
108..109 'y': u32
111..112 'z': E
116..119 'foo': S
129..148 'E::A {..._var }': E
139..146 'new_var': usize
151..152 'e': E
159..245 'match ... }': usize
165..166 'e': E
177..187 'E::A { x }': E
184..185 'x': usize
191..192 'x': usize
202..206 'E::B': E
210..213 'foo': bool
217..218 '1': usize
228..232 'E::B': E
236..238 '10': usize
256..275 'ref d ...{ .. }': &E
264..275 'E::A { .. }': E
278..279 'e': E
285..286 'd': &E
"###
);
}
#[test]
fn enum_variant_through_self_in_pattern() {
assert_snapshot!(
infer(r#"
enum E {
A { x: usize },
B(usize),
C
}
impl E {
fn test() {
match (loop {}) {
Self::A { x } => { x; },
Self::B(x) => { x; },
Self::C => {},
};
}
}
"#),
@r###"
76..218 '{ ... }': ()
86..211 'match ... }': ()
93..100 'loop {}': !
98..100 '{}': ()
116..129 'Self::A { x }': E
126..127 'x': usize
133..139 '{ x; }': ()
135..136 'x': usize
153..163 'Self::B(x)': E
161..162 'x': usize
167..173 '{ x; }': ()
169..170 'x': usize
187..194 'Self::C': E
198..200 '{}': ()
"###
);
}
#[test]
fn infer_generics_in_patterns() {
assert_snapshot!(
infer(r#"
struct A<T> {
x: T,
}
enum Option<T> {
Some(T),
None,
}
fn test(a1: A<u32>, o: Option<u64>) {
let A { x: x2 } = a1;
let A::<i64> { x: x3 } = A { x: 1 };
match o {
Option::Some(t) => t,
_ => 1,
};
}
"#),
@r###"
79..81 'a1': A<u32>
91..92 'o': Option<u64>
107..244 '{ ... }; }': ()
117..128 'A { x: x2 }': A<u32>
124..126 'x2': u32
131..133 'a1': A<u32>
143..161 'A::<i6...: x3 }': A<i64>
157..159 'x3': i64
164..174 'A { x: 1 }': A<i64>
171..172 '1': i64
180..241 'match ... }': u64
186..187 'o': Option<u64>
198..213 'Option::Some(t)': Option<u64>
211..212 't': u64
217..218 't': u64
228..229 '_': Option<u64>
233..234 '1': u64
"###
);
}
#[test]
fn infer_const_pattern() {
assert_snapshot!(
infer_with_mismatches(r#"
enum Option<T> { None }
use Option::None;
struct Foo;
const Bar: usize = 1;
fn test() {
let a: Option<u32> = None;
let b: Option<i64> = match a {
None => None,
};
let _: () = match () { Foo => Foo }; // Expected mismatch
let _: () = match () { Bar => Bar }; // Expected mismatch
}
"#, true),
@r###"
74..75 '1': usize
88..310 '{ ...atch }': ()
98..99 'a': Option<u32>
115..119 'None': Option<u32>
129..130 'b': Option<i64>
146..183 'match ... }': Option<i64>
152..153 'a': Option<u32>
164..168 'None': Option<u32>
172..176 'None': Option<i64>
193..194 '_': ()
201..224 'match ... Foo }': Foo
207..209 '()': ()
212..215 'Foo': Foo
219..222 'Foo': Foo
255..256 '_': ()
263..286 'match ... Bar }': usize
269..271 '()': ()
274..277 'Bar': usize
281..284 'Bar': usize
201..224: expected (), got Foo
263..286: expected (), got usize
"###
);
}
#[test]
fn infer_guard() {
assert_snapshot!(
infer(r#"
struct S;
impl S { fn foo(&self) -> bool { false } }
fn main() {
match S {
s if s.foo() => (),
}
}
"#), @"
28..32 'self': &S
42..51 '{ false }': bool
44..49 'false': bool
65..116 '{ ... } }': ()
71..114 'match ... }': ()
77..78 'S': S
89..90 's': S
94..95 's': S
94..101 's.foo()': bool
105..107 '()': ()
")
}
#[test]
fn match_ergonomics_in_closure_params() {
assert_snapshot!(
infer(r#"
#[lang = "fn_once"]
trait FnOnce<Args> {
type Output;
}
fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
fn test() {
foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
foo(&(1, "a"), |(x, y)| x);
}
"#),
@r###"
94..95 't': T
100..101 'f': F
111..122 '{ loop {} }': U
113..120 'loop {}': !
118..120 '{}': ()
134..233 '{ ... x); }': ()
140..143 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
140..167 'foo(&(...y)| x)': i32
144..153 '&(1, "a")': &(i32, &str)
145..153 '(1, "a")': (i32, &str)
146..147 '1': i32
149..152 '"a"': &str
155..166 '|&(x, y)| x': |&(i32, &str)| -> i32
156..163 '&(x, y)': &(i32, &str)
157..163 '(x, y)': (i32, &str)
158..159 'x': i32
161..162 'y': &str
165..166 'x': i32
204..207 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
204..230 'foo(&(...y)| x)': &i32
208..217 '&(1, "a")': &(i32, &str)
209..217 '(1, "a")': (i32, &str)
210..211 '1': i32
213..216 '"a"': &str
219..229 '|(x, y)| x': |&(i32, &str)| -> &i32
220..226 '(x, y)': (i32, &str)
221..222 'x': &i32
224..225 'y': &&str
228..229 'x': &i32
"###
);
} | infer(r#"
fn test(x: &i32) {
let y = x;
let &z = x;
let a = z; | random_line_split |
dual_encoder.py | import array
import numpy as np
import tensorflow as tf
from collections import defaultdict
import codecs
from tensorflow.contrib.metrics import streaming_sparse_recall_at_k as recall_at_k
def loadVOCAB(filename):
vocab = None
with open(filename) as f:
vocab = f.read().splitlines()
dct = defaultdict(int)
for idx, word in enumerate(vocab):
dct[word] = idx
return [vocab, dct]
def loadGLOVE(filename, vocab):
"""
Load glove vectors from a .txt file.
Optionally limit the vocabulary to save memory. `vocab` should be a set.
"""
dct = {}
vectors = array.array('d')
current_idx = 0
with codecs.open(filename, "r", encoding="utf-8") as f:
for _, line in enumerate(f):
tokens = line.split(" ")
word = tokens[0]
entries = tokens[1:]
if not vocab or word in vocab:
dct[word] = current_idx
vectors.extend(float(x) for x in entries)
current_idx += 1
word_dim = len(entries)
num_vectors = len(dct)
tf.logging.info("Found {} out of {} vectors in Glove".format(num_vectors, len(vocab)))
return [np.array(vectors).reshape(num_vectors, word_dim), dct]
def buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, embedding_dim):
initial_embeddings = np.random.uniform(-0.25, 0.25, (len(vocab_dict), embedding_dim)).astype("float32")
for word, glove_word_idx in glove_dict.items():
word_idx = vocab_dict.get(word)
initial_embeddings[word_idx, :] = glove_vectors[glove_word_idx]
return initial_embeddings
FLAGS = tf.flags.FLAGS
def get_embeddings(hparams):
if hparams.glove_path and hparams.vocab_path:
tf.logging.info("Loading Glove embeddings...")
vocab_array, vocab_dict = loadVOCAB(hparams.vocab_path)
glove_vectors, glove_dict = loadGLOVE(hparams.glove_path, vocab=set(vocab_array))
initializer = buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, hparams.embedding_dim)
else:
tf.logging.info("No glove/vocab path specificed, starting with random embeddings.")
initializer = tf.random_uniform_initializer(-0.25, 0.25)
return tf.get_variable("word_embeddings", shape=[hparams.vocab_size, hparams.embedding_dim],
initializer=initializer)
class DualEncoders:
| def __init__(self, hparams):
self.hparams = hparams
self.global_step = tf.Variable(0, trainable=False, name='global_step')
self.learning_rate = tf.train.exponential_decay(
self.hparams.learning_rate, # Base learning rate.
self.global_step, # Current index into the dataset.
self.hparams.decay_step, # Decay step.
self.hparams.decay_rate, # Decay rate.
staircase=self.hparams.staircase, name="learning_rate_decay")
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
self.context = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Context")
self.context_len = tf.placeholder(tf.int64, [None], name="ContextLenValue")
self.utterance = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Utterance")
self.utterance_len = tf.placeholder(tf.int64, [None], name="UtteranceLenValue")
self.targets = tf.placeholder(tf.int64, [None], name="TargetLabels")
self.val_targets = tf.placeholder(tf.int64, [None], name="ValidationLabels")
logits = self.inference()
probs = tf.sigmoid(logits, name="probs_op")
losses = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=tf.to_float(self.targets), name="CrossEntropy")
mean_loss = tf.reduce_mean(losses, name="Mean_CE_Loss")
train_op = tf.contrib.layers.optimize_loss(loss=mean_loss,
global_step=self.global_step,
learning_rate=self.learning_rate,
clip_gradients=self.hparams.max_grad_norm,
optimizer=hparams.optimizer)
ema = tf.train.ExponentialMovingAverage(decay=0.99)
mean_loss_ema_op = ema.apply([mean_loss])
with tf.control_dependencies([self.targets]): # update only when train targets passed
train_op_group = tf.group(train_op, mean_loss_ema_op)
self.probs_op = probs
self.train_loss_op = ema.average(mean_loss)
self.train_op = train_op_group
self.train_summaries = tf.summary.merge([tf.summary.scalar("loss", mean_loss),
tf.summary.scalar("learning_rate", self.learning_rate)])
self.val_probs, self.val_summary = self.validation_accuracy(probs, self.val_targets, mean_loss)
def inference(self):
W_emb = get_embeddings(self.hparams)
context_emb = tf.nn.embedding_lookup(W_emb, self.context, name="ContextEmbedding")
utterance_emb = tf.nn.embedding_lookup(W_emb, self.utterance, name="UtteranceEmbedding")
with tf.variable_scope("BidirectionalLSTM"):
argsdict = {"forget_bias": 2.0, "use_peepholes": True, "state_is_tuple": True}
fw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
bw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
seq = tf.concat([context_emb, utterance_emb],axis=0)
seqlen = tf.concat([self.context_len,self.utterance_len], axis=0)
_, rnn_states = tf.nn.bidirectional_dynamic_rnn(fw_cell, bw_cell,
inputs=seq,
sequence_length=seqlen,
dtype=tf.float32)
fw_encoding_context, fw_encoding_utter = tf.split(rnn_states[0].h, 2, axis=0)
bw_encoding_context, bw_encoding_utter = tf.split(rnn_states[1].h, 2, axis=0)
encoding_context = tf.concat([fw_encoding_context, bw_encoding_context], axis=1)
encoding_utterance = tf.concat([fw_encoding_utter, bw_encoding_utter], axis=1)
with tf.variable_scope("Prediction"):
M = tf.get_variable(name="M", shape=[2 * self.hparams.rnn_dim, 2 * self.hparams.rnn_dim],
initializer=tf.random_uniform_initializer(-0.25, 0.25))
generated_response = tf.matmul(encoding_context, M)
generated_response = tf.expand_dims(generated_response, 2)
encoding_utterance = tf.expand_dims(encoding_utterance, 2)
logits = tf.matmul(generated_response, encoding_utterance, True)
logits = tf.reshape(logits, [-1])
return logits
def validation_accuracy(self, pred_labels, val_labels, val_loss):
shaped_probs = tf.reshape(pred_labels, [-1, 10])
def get_top(k):
return tf.reduce_mean(tf.cast(tf.nn.in_top_k(shaped_probs, val_labels, k=k), tf.float32))
ema = tf.train.ExponentialMovingAverage(decay=0.99)
top1, top2, top3, top5 = [get_top(k) for k in [1, 2, 3, 5]]
maintain_averages = ema.apply([top1, top2, top3, top5, val_loss])
with tf.control_dependencies([self.val_targets]): # update only when validation targets passed
self.update_averages = tf.group(maintain_averages)
# TODO reset shadow variables between validation sessions
self.val_loss = ema.average(val_loss)
self.top1_av = ema.average(top1)
self.top2_av = ema.average(top2)
self.top3_av = ema.average(top3)
self.top5_av = ema.average(top5)
val_summary = tf.summary.merge([tf.summary.scalar("validation_loss", self.val_loss),
tf.summary.scalar("top1", self.top1_av),
tf.summary.scalar("top2", self.top2_av),
tf.summary.scalar("top3", self.top3_av),
tf.summary.scalar("top5", self.top5_av),
tf.summary.histogram("correct_probs_distribution", shaped_probs[:, 0]),
tf.summary.histogram("incorrect_probs_distribution", shaped_probs[:, 1:])])
return shaped_probs, val_summary
def setSession(self, session):
self._sess = session
def save_model(self, saver, location, step):
saver.save(self._sess, location, global_step=step)
def load_model(self, saver, location):
print("Variable initializaion")
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
self._sess.run(init_op)
ckpt = tf.train.get_checkpoint_state(location)
if ckpt and ckpt.model_checkpoint_path:
print('Restoring model')
saver.restore(self._sess, ckpt.model_checkpoint_path)
def batch_fit(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"]}
train_summary, step, _, loss = self._sess.run([self.train_summaries, self.global_step,
self.train_op, self.train_loss_op], feed_dict=feed_dict)
return loss, step, train_summary
def predict(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"]}
return self._sess.run([self.probs_op], feed_dict=feed_dict)
def validate(self, batch_dict):
val_targets = np.zeros([len(batch_dict["label"]) / 10], dtype=np.int64)
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"],
self.val_targets: val_targets}
_, val_loss, t1, t2, t3, t5, val_probs, val_summary = self._sess.run([self.update_averages, self.val_loss,
self.top1_av, self.top2_av,
self.top3_av, self.top5_av,
self.val_probs,
self.val_summary],
feed_dict=feed_dict)
return [t1, t2, t3, t5], val_loss, val_summary | identifier_body | |
dual_encoder.py | import array
import numpy as np
import tensorflow as tf
from collections import defaultdict
import codecs
from tensorflow.contrib.metrics import streaming_sparse_recall_at_k as recall_at_k
def loadVOCAB(filename):
vocab = None
with open(filename) as f:
vocab = f.read().splitlines()
dct = defaultdict(int)
for idx, word in enumerate(vocab):
dct[word] = idx
return [vocab, dct]
def loadGLOVE(filename, vocab):
"""
Load glove vectors from a .txt file.
Optionally limit the vocabulary to save memory. `vocab` should be a set.
"""
dct = {}
vectors = array.array('d')
current_idx = 0
with codecs.open(filename, "r", encoding="utf-8") as f:
for _, line in enumerate(f):
tokens = line.split(" ")
word = tokens[0]
entries = tokens[1:]
if not vocab or word in vocab:
dct[word] = current_idx
vectors.extend(float(x) for x in entries)
current_idx += 1
word_dim = len(entries)
num_vectors = len(dct)
tf.logging.info("Found {} out of {} vectors in Glove".format(num_vectors, len(vocab)))
return [np.array(vectors).reshape(num_vectors, word_dim), dct]
def buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, embedding_dim):
initial_embeddings = np.random.uniform(-0.25, 0.25, (len(vocab_dict), embedding_dim)).astype("float32")
for word, glove_word_idx in glove_dict.items():
word_idx = vocab_dict.get(word)
initial_embeddings[word_idx, :] = glove_vectors[glove_word_idx]
return initial_embeddings
FLAGS = tf.flags.FLAGS
def get_embeddings(hparams):
if hparams.glove_path and hparams.vocab_path:
tf.logging.info("Loading Glove embeddings...")
vocab_array, vocab_dict = loadVOCAB(hparams.vocab_path)
glove_vectors, glove_dict = loadGLOVE(hparams.glove_path, vocab=set(vocab_array))
initializer = buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, hparams.embedding_dim)
else:
tf.logging.info("No glove/vocab path specificed, starting with random embeddings.")
initializer = tf.random_uniform_initializer(-0.25, 0.25)
return tf.get_variable("word_embeddings", shape=[hparams.vocab_size, hparams.embedding_dim],
initializer=initializer)
class DualEncoders:
def __init__(self, hparams):
self.hparams = hparams
self.global_step = tf.Variable(0, trainable=False, name='global_step')
| self.hparams.learning_rate, # Base learning rate.
self.global_step, # Current index into the dataset.
self.hparams.decay_step, # Decay step.
self.hparams.decay_rate, # Decay rate.
staircase=self.hparams.staircase, name="learning_rate_decay")
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
self.context = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Context")
self.context_len = tf.placeholder(tf.int64, [None], name="ContextLenValue")
self.utterance = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Utterance")
self.utterance_len = tf.placeholder(tf.int64, [None], name="UtteranceLenValue")
self.targets = tf.placeholder(tf.int64, [None], name="TargetLabels")
self.val_targets = tf.placeholder(tf.int64, [None], name="ValidationLabels")
logits = self.inference()
probs = tf.sigmoid(logits, name="probs_op")
losses = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=tf.to_float(self.targets), name="CrossEntropy")
mean_loss = tf.reduce_mean(losses, name="Mean_CE_Loss")
train_op = tf.contrib.layers.optimize_loss(loss=mean_loss,
global_step=self.global_step,
learning_rate=self.learning_rate,
clip_gradients=self.hparams.max_grad_norm,
optimizer=hparams.optimizer)
ema = tf.train.ExponentialMovingAverage(decay=0.99)
mean_loss_ema_op = ema.apply([mean_loss])
with tf.control_dependencies([self.targets]): # update only when train targets passed
train_op_group = tf.group(train_op, mean_loss_ema_op)
self.probs_op = probs
self.train_loss_op = ema.average(mean_loss)
self.train_op = train_op_group
self.train_summaries = tf.summary.merge([tf.summary.scalar("loss", mean_loss),
tf.summary.scalar("learning_rate", self.learning_rate)])
self.val_probs, self.val_summary = self.validation_accuracy(probs, self.val_targets, mean_loss)
def inference(self):
W_emb = get_embeddings(self.hparams)
context_emb = tf.nn.embedding_lookup(W_emb, self.context, name="ContextEmbedding")
utterance_emb = tf.nn.embedding_lookup(W_emb, self.utterance, name="UtteranceEmbedding")
with tf.variable_scope("BidirectionalLSTM"):
argsdict = {"forget_bias": 2.0, "use_peepholes": True, "state_is_tuple": True}
fw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
bw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
seq = tf.concat([context_emb, utterance_emb],axis=0)
seqlen = tf.concat([self.context_len,self.utterance_len], axis=0)
_, rnn_states = tf.nn.bidirectional_dynamic_rnn(fw_cell, bw_cell,
inputs=seq,
sequence_length=seqlen,
dtype=tf.float32)
fw_encoding_context, fw_encoding_utter = tf.split(rnn_states[0].h, 2, axis=0)
bw_encoding_context, bw_encoding_utter = tf.split(rnn_states[1].h, 2, axis=0)
encoding_context = tf.concat([fw_encoding_context, bw_encoding_context], axis=1)
encoding_utterance = tf.concat([fw_encoding_utter, bw_encoding_utter], axis=1)
with tf.variable_scope("Prediction"):
M = tf.get_variable(name="M", shape=[2 * self.hparams.rnn_dim, 2 * self.hparams.rnn_dim],
initializer=tf.random_uniform_initializer(-0.25, 0.25))
generated_response = tf.matmul(encoding_context, M)
generated_response = tf.expand_dims(generated_response, 2)
encoding_utterance = tf.expand_dims(encoding_utterance, 2)
logits = tf.matmul(generated_response, encoding_utterance, True)
logits = tf.reshape(logits, [-1])
return logits
def validation_accuracy(self, pred_labels, val_labels, val_loss):
shaped_probs = tf.reshape(pred_labels, [-1, 10])
def get_top(k):
return tf.reduce_mean(tf.cast(tf.nn.in_top_k(shaped_probs, val_labels, k=k), tf.float32))
ema = tf.train.ExponentialMovingAverage(decay=0.99)
top1, top2, top3, top5 = [get_top(k) for k in [1, 2, 3, 5]]
maintain_averages = ema.apply([top1, top2, top3, top5, val_loss])
with tf.control_dependencies([self.val_targets]): # update only when validation targets passed
self.update_averages = tf.group(maintain_averages)
# TODO reset shadow variables between validation sessions
self.val_loss = ema.average(val_loss)
self.top1_av = ema.average(top1)
self.top2_av = ema.average(top2)
self.top3_av = ema.average(top3)
self.top5_av = ema.average(top5)
val_summary = tf.summary.merge([tf.summary.scalar("validation_loss", self.val_loss),
tf.summary.scalar("top1", self.top1_av),
tf.summary.scalar("top2", self.top2_av),
tf.summary.scalar("top3", self.top3_av),
tf.summary.scalar("top5", self.top5_av),
tf.summary.histogram("correct_probs_distribution", shaped_probs[:, 0]),
tf.summary.histogram("incorrect_probs_distribution", shaped_probs[:, 1:])])
return shaped_probs, val_summary
def setSession(self, session):
self._sess = session
def save_model(self, saver, location, step):
saver.save(self._sess, location, global_step=step)
def load_model(self, saver, location):
print("Variable initializaion")
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
self._sess.run(init_op)
ckpt = tf.train.get_checkpoint_state(location)
if ckpt and ckpt.model_checkpoint_path:
print('Restoring model')
saver.restore(self._sess, ckpt.model_checkpoint_path)
def batch_fit(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"]}
train_summary, step, _, loss = self._sess.run([self.train_summaries, self.global_step,
self.train_op, self.train_loss_op], feed_dict=feed_dict)
return loss, step, train_summary
def predict(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"]}
return self._sess.run([self.probs_op], feed_dict=feed_dict)
def validate(self, batch_dict):
val_targets = np.zeros([len(batch_dict["label"]) / 10], dtype=np.int64)
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"],
self.val_targets: val_targets}
_, val_loss, t1, t2, t3, t5, val_probs, val_summary = self._sess.run([self.update_averages, self.val_loss,
self.top1_av, self.top2_av,
self.top3_av, self.top5_av,
self.val_probs,
self.val_summary],
feed_dict=feed_dict)
return [t1, t2, t3, t5], val_loss, val_summary | self.learning_rate = tf.train.exponential_decay( | random_line_split |
dual_encoder.py | import array
import numpy as np
import tensorflow as tf
from collections import defaultdict
import codecs
from tensorflow.contrib.metrics import streaming_sparse_recall_at_k as recall_at_k
def loadVOCAB(filename):
vocab = None
with open(filename) as f:
vocab = f.read().splitlines()
dct = defaultdict(int)
for idx, word in enumerate(vocab):
|
return [vocab, dct]
def loadGLOVE(filename, vocab):
"""
Load glove vectors from a .txt file.
Optionally limit the vocabulary to save memory. `vocab` should be a set.
"""
dct = {}
vectors = array.array('d')
current_idx = 0
with codecs.open(filename, "r", encoding="utf-8") as f:
for _, line in enumerate(f):
tokens = line.split(" ")
word = tokens[0]
entries = tokens[1:]
if not vocab or word in vocab:
dct[word] = current_idx
vectors.extend(float(x) for x in entries)
current_idx += 1
word_dim = len(entries)
num_vectors = len(dct)
tf.logging.info("Found {} out of {} vectors in Glove".format(num_vectors, len(vocab)))
return [np.array(vectors).reshape(num_vectors, word_dim), dct]
def buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, embedding_dim):
initial_embeddings = np.random.uniform(-0.25, 0.25, (len(vocab_dict), embedding_dim)).astype("float32")
for word, glove_word_idx in glove_dict.items():
word_idx = vocab_dict.get(word)
initial_embeddings[word_idx, :] = glove_vectors[glove_word_idx]
return initial_embeddings
FLAGS = tf.flags.FLAGS
def get_embeddings(hparams):
if hparams.glove_path and hparams.vocab_path:
tf.logging.info("Loading Glove embeddings...")
vocab_array, vocab_dict = loadVOCAB(hparams.vocab_path)
glove_vectors, glove_dict = loadGLOVE(hparams.glove_path, vocab=set(vocab_array))
initializer = buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, hparams.embedding_dim)
else:
tf.logging.info("No glove/vocab path specificed, starting with random embeddings.")
initializer = tf.random_uniform_initializer(-0.25, 0.25)
return tf.get_variable("word_embeddings", shape=[hparams.vocab_size, hparams.embedding_dim],
initializer=initializer)
class DualEncoders:
def __init__(self, hparams):
self.hparams = hparams
self.global_step = tf.Variable(0, trainable=False, name='global_step')
self.learning_rate = tf.train.exponential_decay(
self.hparams.learning_rate, # Base learning rate.
self.global_step, # Current index into the dataset.
self.hparams.decay_step, # Decay step.
self.hparams.decay_rate, # Decay rate.
staircase=self.hparams.staircase, name="learning_rate_decay")
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
self.context = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Context")
self.context_len = tf.placeholder(tf.int64, [None], name="ContextLenValue")
self.utterance = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Utterance")
self.utterance_len = tf.placeholder(tf.int64, [None], name="UtteranceLenValue")
self.targets = tf.placeholder(tf.int64, [None], name="TargetLabels")
self.val_targets = tf.placeholder(tf.int64, [None], name="ValidationLabels")
logits = self.inference()
probs = tf.sigmoid(logits, name="probs_op")
losses = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=tf.to_float(self.targets), name="CrossEntropy")
mean_loss = tf.reduce_mean(losses, name="Mean_CE_Loss")
train_op = tf.contrib.layers.optimize_loss(loss=mean_loss,
global_step=self.global_step,
learning_rate=self.learning_rate,
clip_gradients=self.hparams.max_grad_norm,
optimizer=hparams.optimizer)
ema = tf.train.ExponentialMovingAverage(decay=0.99)
mean_loss_ema_op = ema.apply([mean_loss])
with tf.control_dependencies([self.targets]): # update only when train targets passed
train_op_group = tf.group(train_op, mean_loss_ema_op)
self.probs_op = probs
self.train_loss_op = ema.average(mean_loss)
self.train_op = train_op_group
self.train_summaries = tf.summary.merge([tf.summary.scalar("loss", mean_loss),
tf.summary.scalar("learning_rate", self.learning_rate)])
self.val_probs, self.val_summary = self.validation_accuracy(probs, self.val_targets, mean_loss)
def inference(self):
W_emb = get_embeddings(self.hparams)
context_emb = tf.nn.embedding_lookup(W_emb, self.context, name="ContextEmbedding")
utterance_emb = tf.nn.embedding_lookup(W_emb, self.utterance, name="UtteranceEmbedding")
with tf.variable_scope("BidirectionalLSTM"):
argsdict = {"forget_bias": 2.0, "use_peepholes": True, "state_is_tuple": True}
fw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
bw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
seq = tf.concat([context_emb, utterance_emb],axis=0)
seqlen = tf.concat([self.context_len,self.utterance_len], axis=0)
_, rnn_states = tf.nn.bidirectional_dynamic_rnn(fw_cell, bw_cell,
inputs=seq,
sequence_length=seqlen,
dtype=tf.float32)
fw_encoding_context, fw_encoding_utter = tf.split(rnn_states[0].h, 2, axis=0)
bw_encoding_context, bw_encoding_utter = tf.split(rnn_states[1].h, 2, axis=0)
encoding_context = tf.concat([fw_encoding_context, bw_encoding_context], axis=1)
encoding_utterance = tf.concat([fw_encoding_utter, bw_encoding_utter], axis=1)
with tf.variable_scope("Prediction"):
M = tf.get_variable(name="M", shape=[2 * self.hparams.rnn_dim, 2 * self.hparams.rnn_dim],
initializer=tf.random_uniform_initializer(-0.25, 0.25))
generated_response = tf.matmul(encoding_context, M)
generated_response = tf.expand_dims(generated_response, 2)
encoding_utterance = tf.expand_dims(encoding_utterance, 2)
logits = tf.matmul(generated_response, encoding_utterance, True)
logits = tf.reshape(logits, [-1])
return logits
def validation_accuracy(self, pred_labels, val_labels, val_loss):
shaped_probs = tf.reshape(pred_labels, [-1, 10])
def get_top(k):
return tf.reduce_mean(tf.cast(tf.nn.in_top_k(shaped_probs, val_labels, k=k), tf.float32))
ema = tf.train.ExponentialMovingAverage(decay=0.99)
top1, top2, top3, top5 = [get_top(k) for k in [1, 2, 3, 5]]
maintain_averages = ema.apply([top1, top2, top3, top5, val_loss])
with tf.control_dependencies([self.val_targets]): # update only when validation targets passed
self.update_averages = tf.group(maintain_averages)
# TODO reset shadow variables between validation sessions
self.val_loss = ema.average(val_loss)
self.top1_av = ema.average(top1)
self.top2_av = ema.average(top2)
self.top3_av = ema.average(top3)
self.top5_av = ema.average(top5)
val_summary = tf.summary.merge([tf.summary.scalar("validation_loss", self.val_loss),
tf.summary.scalar("top1", self.top1_av),
tf.summary.scalar("top2", self.top2_av),
tf.summary.scalar("top3", self.top3_av),
tf.summary.scalar("top5", self.top5_av),
tf.summary.histogram("correct_probs_distribution", shaped_probs[:, 0]),
tf.summary.histogram("incorrect_probs_distribution", shaped_probs[:, 1:])])
return shaped_probs, val_summary
def setSession(self, session):
self._sess = session
def save_model(self, saver, location, step):
saver.save(self._sess, location, global_step=step)
def load_model(self, saver, location):
print("Variable initializaion")
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
self._sess.run(init_op)
ckpt = tf.train.get_checkpoint_state(location)
if ckpt and ckpt.model_checkpoint_path:
print('Restoring model')
saver.restore(self._sess, ckpt.model_checkpoint_path)
def batch_fit(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"]}
train_summary, step, _, loss = self._sess.run([self.train_summaries, self.global_step,
self.train_op, self.train_loss_op], feed_dict=feed_dict)
return loss, step, train_summary
def predict(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"]}
return self._sess.run([self.probs_op], feed_dict=feed_dict)
def validate(self, batch_dict):
val_targets = np.zeros([len(batch_dict["label"]) / 10], dtype=np.int64)
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"],
self.val_targets: val_targets}
_, val_loss, t1, t2, t3, t5, val_probs, val_summary = self._sess.run([self.update_averages, self.val_loss,
self.top1_av, self.top2_av,
self.top3_av, self.top5_av,
self.val_probs,
self.val_summary],
feed_dict=feed_dict)
return [t1, t2, t3, t5], val_loss, val_summary
| dct[word] = idx | conditional_block |
dual_encoder.py | import array
import numpy as np
import tensorflow as tf
from collections import defaultdict
import codecs
from tensorflow.contrib.metrics import streaming_sparse_recall_at_k as recall_at_k
def loadVOCAB(filename):
vocab = None
with open(filename) as f:
vocab = f.read().splitlines()
dct = defaultdict(int)
for idx, word in enumerate(vocab):
dct[word] = idx
return [vocab, dct]
def loadGLOVE(filename, vocab):
"""
Load glove vectors from a .txt file.
Optionally limit the vocabulary to save memory. `vocab` should be a set.
"""
dct = {}
vectors = array.array('d')
current_idx = 0
with codecs.open(filename, "r", encoding="utf-8") as f:
for _, line in enumerate(f):
tokens = line.split(" ")
word = tokens[0]
entries = tokens[1:]
if not vocab or word in vocab:
dct[word] = current_idx
vectors.extend(float(x) for x in entries)
current_idx += 1
word_dim = len(entries)
num_vectors = len(dct)
tf.logging.info("Found {} out of {} vectors in Glove".format(num_vectors, len(vocab)))
return [np.array(vectors).reshape(num_vectors, word_dim), dct]
def buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, embedding_dim):
initial_embeddings = np.random.uniform(-0.25, 0.25, (len(vocab_dict), embedding_dim)).astype("float32")
for word, glove_word_idx in glove_dict.items():
word_idx = vocab_dict.get(word)
initial_embeddings[word_idx, :] = glove_vectors[glove_word_idx]
return initial_embeddings
FLAGS = tf.flags.FLAGS
def get_embeddings(hparams):
if hparams.glove_path and hparams.vocab_path:
tf.logging.info("Loading Glove embeddings...")
vocab_array, vocab_dict = loadVOCAB(hparams.vocab_path)
glove_vectors, glove_dict = loadGLOVE(hparams.glove_path, vocab=set(vocab_array))
initializer = buildEMBMatrix(vocab_dict, glove_dict, glove_vectors, hparams.embedding_dim)
else:
tf.logging.info("No glove/vocab path specificed, starting with random embeddings.")
initializer = tf.random_uniform_initializer(-0.25, 0.25)
return tf.get_variable("word_embeddings", shape=[hparams.vocab_size, hparams.embedding_dim],
initializer=initializer)
class DualEncoders:
def __init__(self, hparams):
self.hparams = hparams
self.global_step = tf.Variable(0, trainable=False, name='global_step')
self.learning_rate = tf.train.exponential_decay(
self.hparams.learning_rate, # Base learning rate.
self.global_step, # Current index into the dataset.
self.hparams.decay_step, # Decay step.
self.hparams.decay_rate, # Decay rate.
staircase=self.hparams.staircase, name="learning_rate_decay")
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
self.context = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Context")
self.context_len = tf.placeholder(tf.int64, [None], name="ContextLenValue")
self.utterance = tf.placeholder(tf.int64, [None, hparams.max_context_len], name="Utterance")
self.utterance_len = tf.placeholder(tf.int64, [None], name="UtteranceLenValue")
self.targets = tf.placeholder(tf.int64, [None], name="TargetLabels")
self.val_targets = tf.placeholder(tf.int64, [None], name="ValidationLabels")
logits = self.inference()
probs = tf.sigmoid(logits, name="probs_op")
losses = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=tf.to_float(self.targets), name="CrossEntropy")
mean_loss = tf.reduce_mean(losses, name="Mean_CE_Loss")
train_op = tf.contrib.layers.optimize_loss(loss=mean_loss,
global_step=self.global_step,
learning_rate=self.learning_rate,
clip_gradients=self.hparams.max_grad_norm,
optimizer=hparams.optimizer)
ema = tf.train.ExponentialMovingAverage(decay=0.99)
mean_loss_ema_op = ema.apply([mean_loss])
with tf.control_dependencies([self.targets]): # update only when train targets passed
train_op_group = tf.group(train_op, mean_loss_ema_op)
self.probs_op = probs
self.train_loss_op = ema.average(mean_loss)
self.train_op = train_op_group
self.train_summaries = tf.summary.merge([tf.summary.scalar("loss", mean_loss),
tf.summary.scalar("learning_rate", self.learning_rate)])
self.val_probs, self.val_summary = self.validation_accuracy(probs, self.val_targets, mean_loss)
def inference(self):
W_emb = get_embeddings(self.hparams)
context_emb = tf.nn.embedding_lookup(W_emb, self.context, name="ContextEmbedding")
utterance_emb = tf.nn.embedding_lookup(W_emb, self.utterance, name="UtteranceEmbedding")
with tf.variable_scope("BidirectionalLSTM"):
argsdict = {"forget_bias": 2.0, "use_peepholes": True, "state_is_tuple": True}
fw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
bw_cell = tf.contrib.rnn.LSTMCell(self.hparams.rnn_dim, **argsdict)
seq = tf.concat([context_emb, utterance_emb],axis=0)
seqlen = tf.concat([self.context_len,self.utterance_len], axis=0)
_, rnn_states = tf.nn.bidirectional_dynamic_rnn(fw_cell, bw_cell,
inputs=seq,
sequence_length=seqlen,
dtype=tf.float32)
fw_encoding_context, fw_encoding_utter = tf.split(rnn_states[0].h, 2, axis=0)
bw_encoding_context, bw_encoding_utter = tf.split(rnn_states[1].h, 2, axis=0)
encoding_context = tf.concat([fw_encoding_context, bw_encoding_context], axis=1)
encoding_utterance = tf.concat([fw_encoding_utter, bw_encoding_utter], axis=1)
with tf.variable_scope("Prediction"):
M = tf.get_variable(name="M", shape=[2 * self.hparams.rnn_dim, 2 * self.hparams.rnn_dim],
initializer=tf.random_uniform_initializer(-0.25, 0.25))
generated_response = tf.matmul(encoding_context, M)
generated_response = tf.expand_dims(generated_response, 2)
encoding_utterance = tf.expand_dims(encoding_utterance, 2)
logits = tf.matmul(generated_response, encoding_utterance, True)
logits = tf.reshape(logits, [-1])
return logits
def validation_accuracy(self, pred_labels, val_labels, val_loss):
shaped_probs = tf.reshape(pred_labels, [-1, 10])
def get_top(k):
return tf.reduce_mean(tf.cast(tf.nn.in_top_k(shaped_probs, val_labels, k=k), tf.float32))
ema = tf.train.ExponentialMovingAverage(decay=0.99)
top1, top2, top3, top5 = [get_top(k) for k in [1, 2, 3, 5]]
maintain_averages = ema.apply([top1, top2, top3, top5, val_loss])
with tf.control_dependencies([self.val_targets]): # update only when validation targets passed
self.update_averages = tf.group(maintain_averages)
# TODO reset shadow variables between validation sessions
self.val_loss = ema.average(val_loss)
self.top1_av = ema.average(top1)
self.top2_av = ema.average(top2)
self.top3_av = ema.average(top3)
self.top5_av = ema.average(top5)
val_summary = tf.summary.merge([tf.summary.scalar("validation_loss", self.val_loss),
tf.summary.scalar("top1", self.top1_av),
tf.summary.scalar("top2", self.top2_av),
tf.summary.scalar("top3", self.top3_av),
tf.summary.scalar("top5", self.top5_av),
tf.summary.histogram("correct_probs_distribution", shaped_probs[:, 0]),
tf.summary.histogram("incorrect_probs_distribution", shaped_probs[:, 1:])])
return shaped_probs, val_summary
def setSession(self, session):
self._sess = session
def save_model(self, saver, location, step):
saver.save(self._sess, location, global_step=step)
def load_model(self, saver, location):
print("Variable initializaion")
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
self._sess.run(init_op)
ckpt = tf.train.get_checkpoint_state(location)
if ckpt and ckpt.model_checkpoint_path:
print('Restoring model')
saver.restore(self._sess, ckpt.model_checkpoint_path)
def batch_fit(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"]}
train_summary, step, _, loss = self._sess.run([self.train_summaries, self.global_step,
self.train_op, self.train_loss_op], feed_dict=feed_dict)
return loss, step, train_summary
def predict(self, batch_dict):
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"]}
return self._sess.run([self.probs_op], feed_dict=feed_dict)
def | (self, batch_dict):
val_targets = np.zeros([len(batch_dict["label"]) / 10], dtype=np.int64)
feed_dict = {self.context: batch_dict["context"],
self.context_len: batch_dict["context_len"],
self.utterance: batch_dict["utterance"],
self.utterance_len: batch_dict["utterance_len"],
self.targets: batch_dict["label"],
self.val_targets: val_targets}
_, val_loss, t1, t2, t3, t5, val_probs, val_summary = self._sess.run([self.update_averages, self.val_loss,
self.top1_av, self.top2_av,
self.top3_av, self.top5_av,
self.val_probs,
self.val_summary],
feed_dict=feed_dict)
return [t1, t2, t3, t5], val_loss, val_summary
| validate | identifier_name |
list_view.rs | use std::sync::Arc;
use crate::aliases::WinResult;
use crate::co;
use crate::funcs::{GetAsyncKeyState, GetCursorPos, PostQuitMessage};
use crate::gui::base::Base;
use crate::gui::events::ListViewEvents;
use crate::gui::native_controls::list_view_columns::ListViewColumns;
use crate::gui::native_controls::list_view_items::ListViewItems;
use crate::gui::native_controls::base_native_control::{BaseNativeControl, OptsId};
use crate::gui::privs::{auto_ctrl_id, multiply_dpi};
use crate::gui::traits::{baseref_from_parent, Parent};
use crate::handles::{HIMAGELIST, HMENU, HWND};
use crate::msg::lvm;
use crate::structs::{LVHITTESTINFO, NMITEMACTIVATE, NMLVKEYDOWN, POINT, SIZE};
/// Native
/// [list view](https://docs.microsoft.com/en-us/windows/win32/controls/list-view-controls-overview)
/// control. Not to be confused with the simpler [list box](crate::gui::ListBox)
/// control.
///
/// Implements [`Child`](crate::gui::Child) trait.
#[derive(Clone)]
pub struct ListView(Arc<Obj>);
struct Obj { // actual fields of ListView
base: BaseNativeControl,
opts_id: OptsId<ListViewOpts>,
events: ListViewEvents,
columns: ListViewColumns,
items: ListViewItems,
context_menu: Option<HMENU>,
}
impl_send_sync_child!(ListView);
impl ListView {
/// Instantiates a new `ListView` object, to be created on the parent window
/// with [`HWND::CreateWindowEx`](crate::HWND::CreateWindowEx).
pub fn new(parent: &dyn Parent, opts: ListViewOpts) -> ListView {
let parent_base_ref = baseref_from_parent(parent);
let opts = ListViewOpts::define_ctrl_id(opts);
let ctrl_id = opts.ctrl_id;
let context_menu = opts.context_menu;
let new_self = Self(
Arc::new(
Obj {
base: BaseNativeControl::new(parent_base_ref),
opts_id: OptsId::Wnd(opts),
events: ListViewEvents::new(parent_base_ref, ctrl_id),
columns: ListViewColumns::new(),
items: ListViewItems::new(),
context_menu,
},
),
);
new_self.0.columns.set_hwnd_ref(new_self.0.base.hwnd_ref());
new_self.0.items.set_hwnd_ref(new_self.0.base.hwnd_ref());
parent_base_ref.privileged_events_ref().wm(parent_base_ref.creation_wm(), {
let me = new_self.clone();
move |_| { me.create(); 0 }
});
new_self.handled_events(parent_base_ref, ctrl_id);
new_self
}
/// Instantiates a new `ListView` object, to be loaded from a dialog
/// resource with [`HWND::GetDlgItem`](crate::HWND::GetDlgItem).
///
/// **Note:** The optional `context_menu` is shared: it must be destroyed
/// manually after the control is destroyed. But note that menus loaded from
/// resources don't need to be destroyed.
pub fn new_dlg(
parent: &dyn Parent,
ctrl_id: u16,
context_menu: Option<HMENU>) -> ListView
{
let parent_base_ref = baseref_from_parent(parent);
let new_self = Self(
Arc::new(
Obj {
base: BaseNativeControl::new(parent_base_ref),
opts_id: OptsId::Dlg(ctrl_id),
events: ListViewEvents::new(parent_base_ref, ctrl_id),
columns: ListViewColumns::new(),
items: ListViewItems::new(),
context_menu,
},
),
);
new_self.0.columns.set_hwnd_ref(new_self.0.base.hwnd_ref());
new_self.0.items.set_hwnd_ref(new_self.0.base.hwnd_ref());
parent_base_ref.privileged_events_ref().wm_init_dialog({
let me = new_self.clone();
move |_| { me.create(); true }
});
new_self.handled_events(parent_base_ref, ctrl_id);
new_self
}
fn create(&self) {
| match &self.0.opts_id {
OptsId::Wnd(opts) => {
let mut pos = opts.position;
let mut sz = opts.size;
multiply_dpi(Some(&mut pos), Some(&mut sz))?;
self.0.base.create_window( // may panic
"SysListView32", None, pos, sz,
opts.ctrl_id,
opts.window_ex_style,
opts.window_style | opts.list_view_style.into(),
)?;
if opts.list_view_ex_style != co::LVS_EX::NoValue {
self.toggle_extended_style(true, opts.list_view_ex_style);
}
self.columns().add(&opts.columns)?;
Ok(())
},
OptsId::Dlg(ctrl_id) => self.0.base.create_dlg(*ctrl_id).map(|_| ()), // may panic
}
}().unwrap_or_else(|err| PostQuitMessage(err))
}
fn handled_events(&self, parent_base_ref: &Base, ctrl_id: u16) {
parent_base_ref.privileged_events_ref().add_nfy(ctrl_id, co::LVN::KEYDOWN.into(), {
let me = self.clone();
move |p| {
let lvnk = unsafe { p.cast_nmhdr::<NMLVKEYDOWN>() };
let has_ctrl = GetAsyncKeyState(co::VK::CONTROL);
let has_shift = GetAsyncKeyState(co::VK::SHIFT);
if has_ctrl && lvnk.wVKey == co::VK('A' as _) { // Ctrl+A
me.items().set_selected_all(true)
.unwrap_or_else(|err| PostQuitMessage(err));
} else if lvnk.wVKey == co::VK::APPS { // context menu key
me.show_context_menu(false, has_ctrl, has_shift).unwrap();
}
None
}
});
parent_base_ref.privileged_events_ref().add_nfy(ctrl_id, co::NM::RCLICK.into(), {
let me = self.clone();
move |p| {
let nmia = unsafe { p.cast_nmhdr::<NMITEMACTIVATE>() };
let has_ctrl = nmia.uKeyFlags.has(co::LVKF::CONTROL);
let has_shift = nmia.uKeyFlags.has(co::LVKF::SHIFT);
me.show_context_menu(true, has_ctrl, has_shift).unwrap();
None
}
});
}
pub_fn_ctrlid_hwnd_on_onsubclass!(ListViewEvents);
/// Exposes the column methods.
pub fn columns(&self) -> &ListViewColumns {
&self.0.columns
}
/// Returns the context menu attached to this list view, if any.
///
/// The context menu is attached when the list view is created, either by
/// calling [`ListView::new`](crate::gui::ListView::new) or
/// [`ListView::new_dlg`](crate::gui::ListView::new_dlg).
pub fn context_menu(&self) -> Option<HMENU> {
self.0.context_menu
}
/// Retrieves one of the associated image lists by sending an
/// [`LVM_GETIMAGELIST`](crate::msg::lvm::GetImageList) message.
pub fn image_list(&self, kind: co::LVSIL) -> Option<HIMAGELIST> {
self.hwnd().SendMessage(lvm::GetImageList { kind })
}
/// Exposes the item methods.
pub fn items(&self) -> &ListViewItems {
&self.0.items
}
/// Retrieves the current view by sending an
/// [`LVM_GETVIEW`](crate::msg::lvm::GetView) message.
pub fn current_view(&self) -> co::LV_VIEW {
self.hwnd().SendMessage(lvm::GetView {})
}
/// Sets the current view by sending an
/// [`LVM_SETVIEW`](crate::msg::lvm::SetView) message.
pub fn set_current_view(&self, view: co::LV_VIEW) -> WinResult<()> {
self.hwnd().SendMessage(lvm::SetView { view })
}
/// Sets the one of the associated image lists by sending an
/// [`LVM_SETIMAGELIST`](crate::msg::lvm::SetImageList) message.
///
/// Returns the previous image list, if any.
pub fn set_image_list(&self,
kind: co::LVSIL, himagelist: HIMAGELIST) -> Option<HIMAGELIST>
{
self.hwnd().SendMessage(lvm::SetImageList { kind, himagelist })
}
/// Toggles the given extended list view styles by sending an
/// [`LVM_SETEXTENDEDLISTVIEWSTYLE`](crate::msg::lvm::SetExtendedListViewStyle)
/// message.
pub fn toggle_extended_style(&self, set: bool, ex_style: co::LVS_EX) {
self.hwnd().SendMessage(lvm::SetExtendedListViewStyle {
mask: ex_style,
style: if set { ex_style } else { co::LVS_EX::NoValue },
});
}
fn show_context_menu(&self,
follow_cursor: bool, has_ctrl: bool, has_shift: bool) -> WinResult<()>
{
let hmenu = match self.0.context_menu {
Some(h) => h,
None => return Ok(()), // no menu, nothing to do
};
let menu_pos = if follow_cursor { // usually when fired by a right-click
let mut menu_pos = GetCursorPos()?; // relative to screen
self.hwnd().ScreenToClient(&mut menu_pos)?; // now relative to list view
let mut lvhti = LVHITTESTINFO::default(); // find item below cursor, if any
lvhti.pt = menu_pos;
match self.items().hit_test(&mut lvhti) {
Some(idx) => { // an item was right-clicked
if !has_ctrl && !has_shift {
if !self.items().is_selected(idx) {
self.items().set_selected_all(false)?;
self.items().set_selected(true, &[idx])?;
}
self.items().set_focused(idx)?;
}
},
None => { // no item was right-clicked
self.items().set_selected_all(false)?;
},
}
self.hwnd().SetFocus(); // because a right-click won't set the focus by itself
menu_pos
} else { // usually fired by the context meny key
let focused_idx_opt = self.items().focused();
if focused_idx_opt.is_some() && self.items().is_visible(focused_idx_opt.unwrap()) {
let focused_idx = focused_idx_opt.unwrap();
let rc_item = self.items().rect(focused_idx, co::LVIR::BOUNDS)?;
POINT::new(rc_item.left + 16,
rc_item.top + (rc_item.bottom - rc_item.top) / 2)
} else { // no item is focused and visible
POINT::new(6, 10) // arbitrary
}
};
hmenu.TrackPopupMenuAtPoint(
menu_pos, self.hwnd().GetParent()?, self.hwnd())
}
}
//------------------------------------------------------------------------------
/// Options to create a [`ListView`](crate::gui::ListView) programmatically with
/// [`ListView::new`](crate::gui::ListView::new).
pub struct ListViewOpts {
/// Control position within parent client area, in pixels, to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Will be adjusted to match current system DPI.
///
/// Defaults to 0 x 0.
pub position: POINT,
/// Control size, in pixels, to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Will be adjusted to match current system DPI.
///
/// Defaults to 50 x 50.
pub size: SIZE,
/// List view styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `LVS::REPORT | LVS::NOSORTHEADER | LVS::SHOWSELALWAYS | LVS::SHAREIMAGELISTS`.
pub list_view_style: co::LVS,
/// Extended list view styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `LVS_EX::NoValue`.
pub list_view_ex_style: co::LVS_EX,
/// Window styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `WS::CHILD | WS::VISIBLE | WS::TABSTOP | WS::GROUP`.
pub window_style: co::WS,
/// Extended window styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `WS_EX::LEFT | WS_EX::CLIENTEDGE`.
pub window_ex_style: co::WS_EX,
/// The control ID.
///
/// Defaults to an auto-generated ID.
pub ctrl_id: u16,
/// Context popup menu.
///
/// This menu is shared: it must be destroyed manually after the control is
/// destroyed. But note that menus loaded from resources don't need to be
/// destroyed.
///
/// Defaults to `None`.
pub context_menu: Option<HMENU>,
/// Text and width of columns to be added right away. The columns only show
/// in report mode.
///
/// Defaults to none.
pub columns: Vec<(String, u32)>,
}
impl Default for ListViewOpts {
fn default() -> Self {
Self {
position: POINT::new(0, 0),
size: SIZE::new(50, 50),
list_view_style: co::LVS::REPORT | co::LVS::NOSORTHEADER | co::LVS::SHOWSELALWAYS | co::LVS::SHAREIMAGELISTS,
list_view_ex_style: co::LVS_EX::NoValue,
window_style: co::WS::CHILD | co::WS::VISIBLE | co::WS::TABSTOP | co::WS::GROUP,
window_ex_style: co::WS_EX::LEFT | co::WS_EX::CLIENTEDGE,
ctrl_id: 0,
context_menu: None,
columns: Vec::default(),
}
}
}
impl ListViewOpts {
fn define_ctrl_id(mut self) -> Self {
if self.ctrl_id == 0 {
self.ctrl_id = auto_ctrl_id();
}
self
}
} | || -> WinResult<()> {
| random_line_split |
list_view.rs | use std::sync::Arc;
use crate::aliases::WinResult;
use crate::co;
use crate::funcs::{GetAsyncKeyState, GetCursorPos, PostQuitMessage};
use crate::gui::base::Base;
use crate::gui::events::ListViewEvents;
use crate::gui::native_controls::list_view_columns::ListViewColumns;
use crate::gui::native_controls::list_view_items::ListViewItems;
use crate::gui::native_controls::base_native_control::{BaseNativeControl, OptsId};
use crate::gui::privs::{auto_ctrl_id, multiply_dpi};
use crate::gui::traits::{baseref_from_parent, Parent};
use crate::handles::{HIMAGELIST, HMENU, HWND};
use crate::msg::lvm;
use crate::structs::{LVHITTESTINFO, NMITEMACTIVATE, NMLVKEYDOWN, POINT, SIZE};
/// Native
/// [list view](https://docs.microsoft.com/en-us/windows/win32/controls/list-view-controls-overview)
/// control. Not to be confused with the simpler [list box](crate::gui::ListBox)
/// control.
///
/// Implements [`Child`](crate::gui::Child) trait.
#[derive(Clone)]
pub struct ListView(Arc<Obj>);
struct Obj { // actual fields of ListView
base: BaseNativeControl,
opts_id: OptsId<ListViewOpts>,
events: ListViewEvents,
columns: ListViewColumns,
items: ListViewItems,
context_menu: Option<HMENU>,
}
impl_send_sync_child!(ListView);
impl ListView {
/// Instantiates a new `ListView` object, to be created on the parent window
/// with [`HWND::CreateWindowEx`](crate::HWND::CreateWindowEx).
pub fn new(parent: &dyn Parent, opts: ListViewOpts) -> ListView {
let parent_base_ref = baseref_from_parent(parent);
let opts = ListViewOpts::define_ctrl_id(opts);
let ctrl_id = opts.ctrl_id;
let context_menu = opts.context_menu;
let new_self = Self(
Arc::new(
Obj {
base: BaseNativeControl::new(parent_base_ref),
opts_id: OptsId::Wnd(opts),
events: ListViewEvents::new(parent_base_ref, ctrl_id),
columns: ListViewColumns::new(),
items: ListViewItems::new(),
context_menu,
},
),
);
new_self.0.columns.set_hwnd_ref(new_self.0.base.hwnd_ref());
new_self.0.items.set_hwnd_ref(new_self.0.base.hwnd_ref());
parent_base_ref.privileged_events_ref().wm(parent_base_ref.creation_wm(), {
let me = new_self.clone();
move |_| { me.create(); 0 }
});
new_self.handled_events(parent_base_ref, ctrl_id);
new_self
}
/// Instantiates a new `ListView` object, to be loaded from a dialog
/// resource with [`HWND::GetDlgItem`](crate::HWND::GetDlgItem).
///
/// **Note:** The optional `context_menu` is shared: it must be destroyed
/// manually after the control is destroyed. But note that menus loaded from
/// resources don't need to be destroyed.
pub fn new_dlg(
parent: &dyn Parent,
ctrl_id: u16,
context_menu: Option<HMENU>) -> ListView
{
let parent_base_ref = baseref_from_parent(parent);
let new_self = Self(
Arc::new(
Obj {
base: BaseNativeControl::new(parent_base_ref),
opts_id: OptsId::Dlg(ctrl_id),
events: ListViewEvents::new(parent_base_ref, ctrl_id),
columns: ListViewColumns::new(),
items: ListViewItems::new(),
context_menu,
},
),
);
new_self.0.columns.set_hwnd_ref(new_self.0.base.hwnd_ref());
new_self.0.items.set_hwnd_ref(new_self.0.base.hwnd_ref());
parent_base_ref.privileged_events_ref().wm_init_dialog({
let me = new_self.clone();
move |_| { me.create(); true }
});
new_self.handled_events(parent_base_ref, ctrl_id);
new_self
}
fn create(&self) {
|| -> WinResult<()> {
match &self.0.opts_id {
OptsId::Wnd(opts) => {
let mut pos = opts.position;
let mut sz = opts.size;
multiply_dpi(Some(&mut pos), Some(&mut sz))?;
self.0.base.create_window( // may panic
"SysListView32", None, pos, sz,
opts.ctrl_id,
opts.window_ex_style,
opts.window_style | opts.list_view_style.into(),
)?;
if opts.list_view_ex_style != co::LVS_EX::NoValue {
self.toggle_extended_style(true, opts.list_view_ex_style);
}
self.columns().add(&opts.columns)?;
Ok(())
},
OptsId::Dlg(ctrl_id) => self.0.base.create_dlg(*ctrl_id).map(|_| ()), // may panic
}
}().unwrap_or_else(|err| PostQuitMessage(err))
}
fn handled_events(&self, parent_base_ref: &Base, ctrl_id: u16) {
parent_base_ref.privileged_events_ref().add_nfy(ctrl_id, co::LVN::KEYDOWN.into(), {
let me = self.clone();
move |p| {
let lvnk = unsafe { p.cast_nmhdr::<NMLVKEYDOWN>() };
let has_ctrl = GetAsyncKeyState(co::VK::CONTROL);
let has_shift = GetAsyncKeyState(co::VK::SHIFT);
if has_ctrl && lvnk.wVKey == co::VK('A' as _) { // Ctrl+A
me.items().set_selected_all(true)
.unwrap_or_else(|err| PostQuitMessage(err));
} else if lvnk.wVKey == co::VK::APPS { // context menu key
me.show_context_menu(false, has_ctrl, has_shift).unwrap();
}
None
}
});
parent_base_ref.privileged_events_ref().add_nfy(ctrl_id, co::NM::RCLICK.into(), {
let me = self.clone();
move |p| {
let nmia = unsafe { p.cast_nmhdr::<NMITEMACTIVATE>() };
let has_ctrl = nmia.uKeyFlags.has(co::LVKF::CONTROL);
let has_shift = nmia.uKeyFlags.has(co::LVKF::SHIFT);
me.show_context_menu(true, has_ctrl, has_shift).unwrap();
None
}
});
}
pub_fn_ctrlid_hwnd_on_onsubclass!(ListViewEvents);
/// Exposes the column methods.
pub fn columns(&self) -> &ListViewColumns {
&self.0.columns
}
/// Returns the context menu attached to this list view, if any.
///
/// The context menu is attached when the list view is created, either by
/// calling [`ListView::new`](crate::gui::ListView::new) or
/// [`ListView::new_dlg`](crate::gui::ListView::new_dlg).
pub fn context_menu(&self) -> Option<HMENU> {
self.0.context_menu
}
/// Retrieves one of the associated image lists by sending an
/// [`LVM_GETIMAGELIST`](crate::msg::lvm::GetImageList) message.
pub fn image_list(&self, kind: co::LVSIL) -> Option<HIMAGELIST> {
self.hwnd().SendMessage(lvm::GetImageList { kind })
}
/// Exposes the item methods.
pub fn items(&self) -> &ListViewItems {
&self.0.items
}
/// Retrieves the current view by sending an
/// [`LVM_GETVIEW`](crate::msg::lvm::GetView) message.
pub fn current_view(&self) -> co::LV_VIEW {
self.hwnd().SendMessage(lvm::GetView {})
}
/// Sets the current view by sending an
/// [`LVM_SETVIEW`](crate::msg::lvm::SetView) message.
pub fn set_current_view(&self, view: co::LV_VIEW) -> WinResult<()> {
self.hwnd().SendMessage(lvm::SetView { view })
}
/// Sets the one of the associated image lists by sending an
/// [`LVM_SETIMAGELIST`](crate::msg::lvm::SetImageList) message.
///
/// Returns the previous image list, if any.
pub fn set_image_list(&self,
kind: co::LVSIL, himagelist: HIMAGELIST) -> Option<HIMAGELIST>
{
self.hwnd().SendMessage(lvm::SetImageList { kind, himagelist })
}
/// Toggles the given extended list view styles by sending an
/// [`LVM_SETEXTENDEDLISTVIEWSTYLE`](crate::msg::lvm::SetExtendedListViewStyle)
/// message.
pub fn | (&self, set: bool, ex_style: co::LVS_EX) {
self.hwnd().SendMessage(lvm::SetExtendedListViewStyle {
mask: ex_style,
style: if set { ex_style } else { co::LVS_EX::NoValue },
});
}
fn show_context_menu(&self,
follow_cursor: bool, has_ctrl: bool, has_shift: bool) -> WinResult<()>
{
let hmenu = match self.0.context_menu {
Some(h) => h,
None => return Ok(()), // no menu, nothing to do
};
let menu_pos = if follow_cursor { // usually when fired by a right-click
let mut menu_pos = GetCursorPos()?; // relative to screen
self.hwnd().ScreenToClient(&mut menu_pos)?; // now relative to list view
let mut lvhti = LVHITTESTINFO::default(); // find item below cursor, if any
lvhti.pt = menu_pos;
match self.items().hit_test(&mut lvhti) {
Some(idx) => { // an item was right-clicked
if !has_ctrl && !has_shift {
if !self.items().is_selected(idx) {
self.items().set_selected_all(false)?;
self.items().set_selected(true, &[idx])?;
}
self.items().set_focused(idx)?;
}
},
None => { // no item was right-clicked
self.items().set_selected_all(false)?;
},
}
self.hwnd().SetFocus(); // because a right-click won't set the focus by itself
menu_pos
} else { // usually fired by the context meny key
let focused_idx_opt = self.items().focused();
if focused_idx_opt.is_some() && self.items().is_visible(focused_idx_opt.unwrap()) {
let focused_idx = focused_idx_opt.unwrap();
let rc_item = self.items().rect(focused_idx, co::LVIR::BOUNDS)?;
POINT::new(rc_item.left + 16,
rc_item.top + (rc_item.bottom - rc_item.top) / 2)
} else { // no item is focused and visible
POINT::new(6, 10) // arbitrary
}
};
hmenu.TrackPopupMenuAtPoint(
menu_pos, self.hwnd().GetParent()?, self.hwnd())
}
}
//------------------------------------------------------------------------------
/// Options to create a [`ListView`](crate::gui::ListView) programmatically with
/// [`ListView::new`](crate::gui::ListView::new).
pub struct ListViewOpts {
/// Control position within parent client area, in pixels, to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Will be adjusted to match current system DPI.
///
/// Defaults to 0 x 0.
pub position: POINT,
/// Control size, in pixels, to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Will be adjusted to match current system DPI.
///
/// Defaults to 50 x 50.
pub size: SIZE,
/// List view styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `LVS::REPORT | LVS::NOSORTHEADER | LVS::SHOWSELALWAYS | LVS::SHAREIMAGELISTS`.
pub list_view_style: co::LVS,
/// Extended list view styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `LVS_EX::NoValue`.
pub list_view_ex_style: co::LVS_EX,
/// Window styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `WS::CHILD | WS::VISIBLE | WS::TABSTOP | WS::GROUP`.
pub window_style: co::WS,
/// Extended window styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `WS_EX::LEFT | WS_EX::CLIENTEDGE`.
pub window_ex_style: co::WS_EX,
/// The control ID.
///
/// Defaults to an auto-generated ID.
pub ctrl_id: u16,
/// Context popup menu.
///
/// This menu is shared: it must be destroyed manually after the control is
/// destroyed. But note that menus loaded from resources don't need to be
/// destroyed.
///
/// Defaults to `None`.
pub context_menu: Option<HMENU>,
/// Text and width of columns to be added right away. The columns only show
/// in report mode.
///
/// Defaults to none.
pub columns: Vec<(String, u32)>,
}
impl Default for ListViewOpts {
fn default() -> Self {
Self {
position: POINT::new(0, 0),
size: SIZE::new(50, 50),
list_view_style: co::LVS::REPORT | co::LVS::NOSORTHEADER | co::LVS::SHOWSELALWAYS | co::LVS::SHAREIMAGELISTS,
list_view_ex_style: co::LVS_EX::NoValue,
window_style: co::WS::CHILD | co::WS::VISIBLE | co::WS::TABSTOP | co::WS::GROUP,
window_ex_style: co::WS_EX::LEFT | co::WS_EX::CLIENTEDGE,
ctrl_id: 0,
context_menu: None,
columns: Vec::default(),
}
}
}
impl ListViewOpts {
fn define_ctrl_id(mut self) -> Self {
if self.ctrl_id == 0 {
self.ctrl_id = auto_ctrl_id();
}
self
}
}
| toggle_extended_style | identifier_name |
list_view.rs | use std::sync::Arc;
use crate::aliases::WinResult;
use crate::co;
use crate::funcs::{GetAsyncKeyState, GetCursorPos, PostQuitMessage};
use crate::gui::base::Base;
use crate::gui::events::ListViewEvents;
use crate::gui::native_controls::list_view_columns::ListViewColumns;
use crate::gui::native_controls::list_view_items::ListViewItems;
use crate::gui::native_controls::base_native_control::{BaseNativeControl, OptsId};
use crate::gui::privs::{auto_ctrl_id, multiply_dpi};
use crate::gui::traits::{baseref_from_parent, Parent};
use crate::handles::{HIMAGELIST, HMENU, HWND};
use crate::msg::lvm;
use crate::structs::{LVHITTESTINFO, NMITEMACTIVATE, NMLVKEYDOWN, POINT, SIZE};
/// Native
/// [list view](https://docs.microsoft.com/en-us/windows/win32/controls/list-view-controls-overview)
/// control. Not to be confused with the simpler [list box](crate::gui::ListBox)
/// control.
///
/// Implements [`Child`](crate::gui::Child) trait.
#[derive(Clone)]
pub struct ListView(Arc<Obj>);
struct Obj { // actual fields of ListView
base: BaseNativeControl,
opts_id: OptsId<ListViewOpts>,
events: ListViewEvents,
columns: ListViewColumns,
items: ListViewItems,
context_menu: Option<HMENU>,
}
impl_send_sync_child!(ListView);
impl ListView {
/// Instantiates a new `ListView` object, to be created on the parent window
/// with [`HWND::CreateWindowEx`](crate::HWND::CreateWindowEx).
pub fn new(parent: &dyn Parent, opts: ListViewOpts) -> ListView {
let parent_base_ref = baseref_from_parent(parent);
let opts = ListViewOpts::define_ctrl_id(opts);
let ctrl_id = opts.ctrl_id;
let context_menu = opts.context_menu;
let new_self = Self(
Arc::new(
Obj {
base: BaseNativeControl::new(parent_base_ref),
opts_id: OptsId::Wnd(opts),
events: ListViewEvents::new(parent_base_ref, ctrl_id),
columns: ListViewColumns::new(),
items: ListViewItems::new(),
context_menu,
},
),
);
new_self.0.columns.set_hwnd_ref(new_self.0.base.hwnd_ref());
new_self.0.items.set_hwnd_ref(new_self.0.base.hwnd_ref());
parent_base_ref.privileged_events_ref().wm(parent_base_ref.creation_wm(), {
let me = new_self.clone();
move |_| { me.create(); 0 }
});
new_self.handled_events(parent_base_ref, ctrl_id);
new_self
}
/// Instantiates a new `ListView` object, to be loaded from a dialog
/// resource with [`HWND::GetDlgItem`](crate::HWND::GetDlgItem).
///
/// **Note:** The optional `context_menu` is shared: it must be destroyed
/// manually after the control is destroyed. But note that menus loaded from
/// resources don't need to be destroyed.
pub fn new_dlg(
parent: &dyn Parent,
ctrl_id: u16,
context_menu: Option<HMENU>) -> ListView
{
let parent_base_ref = baseref_from_parent(parent);
let new_self = Self(
Arc::new(
Obj {
base: BaseNativeControl::new(parent_base_ref),
opts_id: OptsId::Dlg(ctrl_id),
events: ListViewEvents::new(parent_base_ref, ctrl_id),
columns: ListViewColumns::new(),
items: ListViewItems::new(),
context_menu,
},
),
);
new_self.0.columns.set_hwnd_ref(new_self.0.base.hwnd_ref());
new_self.0.items.set_hwnd_ref(new_self.0.base.hwnd_ref());
parent_base_ref.privileged_events_ref().wm_init_dialog({
let me = new_self.clone();
move |_| { me.create(); true }
});
new_self.handled_events(parent_base_ref, ctrl_id);
new_self
}
fn create(&self) {
|| -> WinResult<()> {
match &self.0.opts_id {
OptsId::Wnd(opts) => {
let mut pos = opts.position;
let mut sz = opts.size;
multiply_dpi(Some(&mut pos), Some(&mut sz))?;
self.0.base.create_window( // may panic
"SysListView32", None, pos, sz,
opts.ctrl_id,
opts.window_ex_style,
opts.window_style | opts.list_view_style.into(),
)?;
if opts.list_view_ex_style != co::LVS_EX::NoValue {
self.toggle_extended_style(true, opts.list_view_ex_style);
}
self.columns().add(&opts.columns)?;
Ok(())
},
OptsId::Dlg(ctrl_id) => self.0.base.create_dlg(*ctrl_id).map(|_| ()), // may panic
}
}().unwrap_or_else(|err| PostQuitMessage(err))
}
fn handled_events(&self, parent_base_ref: &Base, ctrl_id: u16) {
parent_base_ref.privileged_events_ref().add_nfy(ctrl_id, co::LVN::KEYDOWN.into(), {
let me = self.clone();
move |p| {
let lvnk = unsafe { p.cast_nmhdr::<NMLVKEYDOWN>() };
let has_ctrl = GetAsyncKeyState(co::VK::CONTROL);
let has_shift = GetAsyncKeyState(co::VK::SHIFT);
if has_ctrl && lvnk.wVKey == co::VK('A' as _) { // Ctrl+A
me.items().set_selected_all(true)
.unwrap_or_else(|err| PostQuitMessage(err));
} else if lvnk.wVKey == co::VK::APPS { // context menu key
me.show_context_menu(false, has_ctrl, has_shift).unwrap();
}
None
}
});
parent_base_ref.privileged_events_ref().add_nfy(ctrl_id, co::NM::RCLICK.into(), {
let me = self.clone();
move |p| {
let nmia = unsafe { p.cast_nmhdr::<NMITEMACTIVATE>() };
let has_ctrl = nmia.uKeyFlags.has(co::LVKF::CONTROL);
let has_shift = nmia.uKeyFlags.has(co::LVKF::SHIFT);
me.show_context_menu(true, has_ctrl, has_shift).unwrap();
None
}
});
}
pub_fn_ctrlid_hwnd_on_onsubclass!(ListViewEvents);
/// Exposes the column methods.
pub fn columns(&self) -> &ListViewColumns {
&self.0.columns
}
/// Returns the context menu attached to this list view, if any.
///
/// The context menu is attached when the list view is created, either by
/// calling [`ListView::new`](crate::gui::ListView::new) or
/// [`ListView::new_dlg`](crate::gui::ListView::new_dlg).
pub fn context_menu(&self) -> Option<HMENU> {
self.0.context_menu
}
/// Retrieves one of the associated image lists by sending an
/// [`LVM_GETIMAGELIST`](crate::msg::lvm::GetImageList) message.
pub fn image_list(&self, kind: co::LVSIL) -> Option<HIMAGELIST> {
self.hwnd().SendMessage(lvm::GetImageList { kind })
}
/// Exposes the item methods.
pub fn items(&self) -> &ListViewItems {
&self.0.items
}
/// Retrieves the current view by sending an
/// [`LVM_GETVIEW`](crate::msg::lvm::GetView) message.
pub fn current_view(&self) -> co::LV_VIEW {
self.hwnd().SendMessage(lvm::GetView {})
}
/// Sets the current view by sending an
/// [`LVM_SETVIEW`](crate::msg::lvm::SetView) message.
pub fn set_current_view(&self, view: co::LV_VIEW) -> WinResult<()> {
self.hwnd().SendMessage(lvm::SetView { view })
}
/// Sets the one of the associated image lists by sending an
/// [`LVM_SETIMAGELIST`](crate::msg::lvm::SetImageList) message.
///
/// Returns the previous image list, if any.
pub fn set_image_list(&self,
kind: co::LVSIL, himagelist: HIMAGELIST) -> Option<HIMAGELIST>
{
self.hwnd().SendMessage(lvm::SetImageList { kind, himagelist })
}
/// Toggles the given extended list view styles by sending an
/// [`LVM_SETEXTENDEDLISTVIEWSTYLE`](crate::msg::lvm::SetExtendedListViewStyle)
/// message.
pub fn toggle_extended_style(&self, set: bool, ex_style: co::LVS_EX) {
self.hwnd().SendMessage(lvm::SetExtendedListViewStyle {
mask: ex_style,
style: if set { ex_style } else { co::LVS_EX::NoValue },
});
}
fn show_context_menu(&self,
follow_cursor: bool, has_ctrl: bool, has_shift: bool) -> WinResult<()>
{
let hmenu = match self.0.context_menu {
Some(h) => h,
None => return Ok(()), // no menu, nothing to do
};
let menu_pos = if follow_cursor { // usually when fired by a right-click
let mut menu_pos = GetCursorPos()?; // relative to screen
self.hwnd().ScreenToClient(&mut menu_pos)?; // now relative to list view
let mut lvhti = LVHITTESTINFO::default(); // find item below cursor, if any
lvhti.pt = menu_pos;
match self.items().hit_test(&mut lvhti) {
Some(idx) => { // an item was right-clicked
if !has_ctrl && !has_shift {
if !self.items().is_selected(idx) {
self.items().set_selected_all(false)?;
self.items().set_selected(true, &[idx])?;
}
self.items().set_focused(idx)?;
}
},
None => { // no item was right-clicked
self.items().set_selected_all(false)?;
},
}
self.hwnd().SetFocus(); // because a right-click won't set the focus by itself
menu_pos
} else { // usually fired by the context meny key
let focused_idx_opt = self.items().focused();
if focused_idx_opt.is_some() && self.items().is_visible(focused_idx_opt.unwrap()) {
let focused_idx = focused_idx_opt.unwrap();
let rc_item = self.items().rect(focused_idx, co::LVIR::BOUNDS)?;
POINT::new(rc_item.left + 16,
rc_item.top + (rc_item.bottom - rc_item.top) / 2)
} else { // no item is focused and visible
POINT::new(6, 10) // arbitrary
}
};
hmenu.TrackPopupMenuAtPoint(
menu_pos, self.hwnd().GetParent()?, self.hwnd())
}
}
//------------------------------------------------------------------------------
/// Options to create a [`ListView`](crate::gui::ListView) programmatically with
/// [`ListView::new`](crate::gui::ListView::new).
pub struct ListViewOpts {
/// Control position within parent client area, in pixels, to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Will be adjusted to match current system DPI.
///
/// Defaults to 0 x 0.
pub position: POINT,
/// Control size, in pixels, to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Will be adjusted to match current system DPI.
///
/// Defaults to 50 x 50.
pub size: SIZE,
/// List view styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `LVS::REPORT | LVS::NOSORTHEADER | LVS::SHOWSELALWAYS | LVS::SHAREIMAGELISTS`.
pub list_view_style: co::LVS,
/// Extended list view styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `LVS_EX::NoValue`.
pub list_view_ex_style: co::LVS_EX,
/// Window styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `WS::CHILD | WS::VISIBLE | WS::TABSTOP | WS::GROUP`.
pub window_style: co::WS,
/// Extended window styles to be
/// [created](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-createwindowexw).
///
/// Defaults to `WS_EX::LEFT | WS_EX::CLIENTEDGE`.
pub window_ex_style: co::WS_EX,
/// The control ID.
///
/// Defaults to an auto-generated ID.
pub ctrl_id: u16,
/// Context popup menu.
///
/// This menu is shared: it must be destroyed manually after the control is
/// destroyed. But note that menus loaded from resources don't need to be
/// destroyed.
///
/// Defaults to `None`.
pub context_menu: Option<HMENU>,
/// Text and width of columns to be added right away. The columns only show
/// in report mode.
///
/// Defaults to none.
pub columns: Vec<(String, u32)>,
}
impl Default for ListViewOpts {
fn default() -> Self {
Self {
position: POINT::new(0, 0),
size: SIZE::new(50, 50),
list_view_style: co::LVS::REPORT | co::LVS::NOSORTHEADER | co::LVS::SHOWSELALWAYS | co::LVS::SHAREIMAGELISTS,
list_view_ex_style: co::LVS_EX::NoValue,
window_style: co::WS::CHILD | co::WS::VISIBLE | co::WS::TABSTOP | co::WS::GROUP,
window_ex_style: co::WS_EX::LEFT | co::WS_EX::CLIENTEDGE,
ctrl_id: 0,
context_menu: None,
columns: Vec::default(),
}
}
}
impl ListViewOpts {
fn define_ctrl_id(mut self) -> Self {
if self.ctrl_id == 0 |
self
}
}
| {
self.ctrl_id = auto_ctrl_id();
} | conditional_block |
main.rs | //
extern crate bio;
extern crate itertools;
use std::collections::HashMap;
use std::cmp;
use std::env;
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use bio::io::fastq;
use bio::alignment::pairwise::*;
use bio::alignment::AlignmentOperation;
// fn check(rec: &fastq::Record, read: &str) -> (u16, Vec<(usize, char, char)>) {
// let mut distance : u16 = 0;
// let qual = rec.qual();
// let mut dif : Vec<(usize, char, char)> = vec![];
// let mut index : usize = 0;
// for (i, j) in String::from_utf8_lossy(rec.seq()).chars().dropping(8).zip(read.chars()) {
// if qual[index] > 63 {
// if i != j {
// dif.push((index, i, j));
// distance += 1;
// }
// }
// else {
// distance += 1;
// }
// index += 1;
// }
// (distance, dif)
// }
fn hamming(seq1: &str, seq2: &str) -> u32 {
let mut score = 0;
for (i, j) in seq1.chars().zip(seq2.chars()) {
if i != j {
score += 1;
}
}
score
}
fn ham_mutations(seq1: &str, seq2: &str) -> (u32, String) {
let mut score = 0;
let mut mutations = "".to_string();
let mut n = 1;
for (i, j) in seq1.chars().zip(seq2.chars()) {
if i != j {
score += 1;
if score == 1 {
mutations = mutations + &format!("{}{}", n, i);
} else {
mutations = mutations + &format!(" {}{}", n, i);
}
}
n += 1;
}
(score, mutations)
}
fn reverse_complement(seq: &str) -> String {
seq.chars()
.map(|t| match t {
'A' => 'T',
'T' => 'A',
'G' => 'C',
'C' => 'G',
_ => 'N',
}).rev().collect::<String>()
}
fn qual_check(a: &[u8], b: &[u8]) -> bool {
for (i, j) in a.iter().zip(b.iter()) {
if i < j {
continue;
}
return false;
}
return true
}
fn data_stat(results: &HashMap<String, (String, Vec<u8>)>, output_file: &str) -> Result<String, Box<Error>> {
// statistics on the datasets
let wt_pac = "AGAGAAGATTTATCTGAAGTCGTTACGCGAG";
let mut diff_counts : [usize; 31] = [0; 31];
let mut diff_freq : [usize; 31] = [0; 31];
let mut output = try!(File::create(output_file));
let mut pac_stat = HashMap::new();
for (_, pac_info) in results {
let ref pac = pac_info.0;
let ref qual = pac_info.1;
// mutation statistics
let mut index = 0;
let mut distance = 0;
for (i, j) in pac.chars().zip(wt_pac.chars()) {
if qual[index] > 63 && i != j {
diff_freq[index] += 1;
distance += 1;
}
index += 1;
}
diff_counts[distance] += 1;
if distance > 8 {
println!("# {} {}", distance, pac);
}
// pac sites statistics
if pac_stat.contains_key(pac) {
*pac_stat.get_mut(pac).unwrap() += 1;
}
else {
pac_stat.insert(pac, 1);
}
}
println!("# Overall statistics:");
for i in 0..31 {
println!("# {}\t{}", i, diff_counts[i]);
}
println!("# Per-base statistics:");
for i in 0..31 {
println!("# {}\t{}", i, diff_freq[i]);
}
//try!(write!(output, "{}", "# pac counts:\n"));
for (pac, counts) in &pac_stat {
try!(write!(output, "{} {} {}\n", pac, hamming(&pac, &wt_pac), counts));
}
Ok("Done".into())
}
fn | () {
let args : Vec<String> = env::args().collect();
let file1 = fastq::Reader::from_file(&args[1]).unwrap();
let file2 = fastq::Reader::from_file(&args[2]).unwrap();
let mut num_records = 0;
let mut num_duplicates = 0;
let mut num_qual_skip = 0;
let mut results : HashMap<String, (String, Vec<u8>)>= HashMap::new();
let wt_read1 = if &args[3] == "M" {b"ACTAAGTGAGATGAATATGGCGGCACCAAAGGGCAACCGATTTTGGGAGGCCCGCAGTAGTCATGGGCGAAATCCTAAATTCGAATCGCCTGAGGCGCTGTGGGCTGCTTGTTGTGAA"}
else {b"AAGTGAGATGAATATGGCGGCACCAAAGGGCAACCGATTTTGGGAGGCCCGCAGTAGTCATGGGCGAAATCCTAAATTCGAATCGCCTGAGGCGCTGTGGGCTGCTTGTTGTGAATAC"};
for (record1, record2) in file1.records().zip(file2.records()) {
// take read1, filter low quality reads
let read1 = record1.unwrap();
let desc = read1.id().unwrap().split(":").skip(5).collect::<Vec<&str>>();
let description = desc[0].to_string() + ":" + desc[1];
let mut trim = 124;
let mut am = " ".to_string();
for i in 0..120 {
if qual_check(&read1.qual()[i .. i+5], &[63, 63, 63, 63, 63]) {
trim = i+1;
println!("# {} {}: Read 1 trimmed at {}.", num_records, description, trim);
break;
}
}
if trim < 18 {
println!("# {}: Useful read too short. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
// check if the read is the right read
let seq1 = String::from_utf8_lossy(&read1.seq()[0 .. trim]);
let score = |a: u8, b: u8| if a == b {1i32} else {-1i32};
let mut aligner = Aligner::with_capacity(seq1.len(), wt_read1.len(), -5, -1, &score);
let alignment = aligner.global(&seq1[8..seq1.len()].as_bytes(), wt_read1);
if alignment.score < (2 * trim as i32 - 133 - 30) {
println!("# {} {}: wrong read 1 skipping", num_records, description);
println!("# {} {}", &seq1[8..seq1.len()], alignment.score);
num_records += 1;
num_qual_skip += 1;
continue;
}
// identifying AM/WT
if &args[3] == "M" {
if trim < 33 {
println!("# {}: Useful read too short for M. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
// Allowing 1 mismatch
if hamming(&seq1[27 .. 32], "GCGGC") < 2 {
match &seq1[32 .. 33] {
"A" => am = "WT".to_string(),
"G" => am = "AM".to_string(),
_ => am = " ".to_string(),
}
println!("# 1 am_codon = {}", &seq1[27 .. 33]);
}
if am == " " {
for i in 0 .. trim-6 {
if &seq1[i .. i+5] == "GCGGC" {
match &seq1[i+5 .. i+6] {
"A" => am = "WT".to_string(),
"G" => am = "AM".to_string(),
_ => am = " ".to_string(),
}
println!("# 2 am_codon = {}", &seq1[i .. i+6]);
break;
}
}
}
}
// average quality filtering
//let avg_qual = read1.qual().iter().fold(0, |a, &b| a as u32 + b as u32);
//if avg_qual < (125 * 30) { // corresponding to an average quality of 20
// println!("# low quality read 1 skipping: {}", avg_qual);
// continue;
//}
// now deal with read2
let read2 = record2.unwrap();
// average quality filtering
//let avg_qual = read2.qual().iter().fold(0, |a, &b| a as u32 + b as u32);
//if avg_qual < 125*30 {
// println!("# {}: low quality read 2 skipping: {}", num_records, avg_qual);
// num_qual_skip += 1;
// continue;
//}
trim = 124;
for i in 0..119 {
if qual_check(&read2.qual()[i .. i+5], &[63, 63, 63, 63, 63]) {
trim = i+1;
println!("# {} {}: Read 2 trimmed at {}.", num_records, description, i);
break;
}
}
if trim < 80 {
println!("# {}: Useful read too short. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
let seq2 =String::from_utf8_lossy(&read2.seq()[0 .. trim]);
// extract barcodes
let bc1 = &seq1[0..8];
let bc2 = &seq2[0..8];
let bc = bc1.to_string() + bc2;
// check the pac sequences
let wt_pac = "AGAGAAGATTTATCTGAAGTCGTTACGCGAG";
let seq2_rc = reverse_complement(&seq2);
let qual : Vec<u8> = read2.qual().iter().cloned().rev().collect();
let mut pac_start = 0;
let mut min_score = 31;
for i in 0 .. trim - 31 {
let score = hamming(&seq2_rc[i .. i+31], &wt_pac);
if score < min_score {
min_score = score;
pac_start = i;
}
}
let pac_end = cmp::min(trim, pac_start+31);
if pac_end - pac_start < 25 {
println!("# {} {}: pac too short ({}).", num_records, description, pac_end-pac_start);
num_records += 1;
num_qual_skip += 1;
continue;
}
let pac = String::from_utf8_lossy(&seq2_rc[pac_start .. pac_end]
.as_bytes()).into_owned();
if min_score > 4 {
let mut aligner = Aligner::with_capacity(wt_pac.len(), seq2.len(), -1, -1, &score);
let alignment = aligner.local(wt_pac.as_bytes(), &seq2_rc.as_bytes());
if alignment.operations.iter().any(|&x| x == AlignmentOperation::Ins || x == AlignmentOperation::Del) {
println!("# {} {}: pac contain indels.", num_records, description);
println!("{}", alignment.pretty(wt_pac.as_bytes(), &seq2_rc.as_bytes() ));
num_records += 1;
num_qual_skip += 1;
continue;
}
}
let pac_qual_avg : f32 = qual[pac_start .. pac_end].iter().cloned().map(|x| x as f32).sum::<f32>() / (pac_end - pac_start) as f32;
if pac_qual_avg < 63.0 || pac.chars().any(|x| x == 'N') {
println!("# {} {}: pac quality too low ({}) or contains N.", num_records, description, pac_qual_avg);
if &args[3] == "M" {
println!("# {} {} {} {}", num_records, bc, pac, am);
} else {
println!("# {} {} {}", num_records, bc, pac);
}
num_records += 1;
num_qual_skip += 1;
continue;
}
if &args[3] == "M" {
let ham_mut = ham_mutations(&pac, &wt_pac);
println!("{},{},{},{},{},{}", num_records, bc, pac, am, ham_mut.0, ham_mut.1);
} else {
println!("{} {} {}", num_records, bc, pac);
}
if results.contains_key(&bc) {
if results[&bc].0 == pac {
println!("# {}: duplicate found", num_records);
num_duplicates += 1;
}
else {
println!("# {}: possible sequencing error? {} {}", num_records, &pac, results[&bc].0);
}
}
else {
results.insert(bc, (pac, qual.clone()));
}
num_records += 1;
}
println!("# {} records processed;", num_records);
println!("# {} low quality reads;", num_qual_skip);
println!("# {} possible duplicates.", num_duplicates);
data_stat(&results, &args[4]);
}
| main | identifier_name |
main.rs | //
extern crate bio;
extern crate itertools;
use std::collections::HashMap;
use std::cmp;
use std::env;
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use bio::io::fastq;
use bio::alignment::pairwise::*;
use bio::alignment::AlignmentOperation;
// fn check(rec: &fastq::Record, read: &str) -> (u16, Vec<(usize, char, char)>) {
// let mut distance : u16 = 0;
// let qual = rec.qual();
// let mut dif : Vec<(usize, char, char)> = vec![];
// let mut index : usize = 0;
// for (i, j) in String::from_utf8_lossy(rec.seq()).chars().dropping(8).zip(read.chars()) {
// if qual[index] > 63 { | // distance += 1;
// }
// }
// else {
// distance += 1;
// }
// index += 1;
// }
// (distance, dif)
// }
fn hamming(seq1: &str, seq2: &str) -> u32 {
let mut score = 0;
for (i, j) in seq1.chars().zip(seq2.chars()) {
if i != j {
score += 1;
}
}
score
}
fn ham_mutations(seq1: &str, seq2: &str) -> (u32, String) {
let mut score = 0;
let mut mutations = "".to_string();
let mut n = 1;
for (i, j) in seq1.chars().zip(seq2.chars()) {
if i != j {
score += 1;
if score == 1 {
mutations = mutations + &format!("{}{}", n, i);
} else {
mutations = mutations + &format!(" {}{}", n, i);
}
}
n += 1;
}
(score, mutations)
}
fn reverse_complement(seq: &str) -> String {
seq.chars()
.map(|t| match t {
'A' => 'T',
'T' => 'A',
'G' => 'C',
'C' => 'G',
_ => 'N',
}).rev().collect::<String>()
}
fn qual_check(a: &[u8], b: &[u8]) -> bool {
for (i, j) in a.iter().zip(b.iter()) {
if i < j {
continue;
}
return false;
}
return true
}
fn data_stat(results: &HashMap<String, (String, Vec<u8>)>, output_file: &str) -> Result<String, Box<Error>> {
// statistics on the datasets
let wt_pac = "AGAGAAGATTTATCTGAAGTCGTTACGCGAG";
let mut diff_counts : [usize; 31] = [0; 31];
let mut diff_freq : [usize; 31] = [0; 31];
let mut output = try!(File::create(output_file));
let mut pac_stat = HashMap::new();
for (_, pac_info) in results {
let ref pac = pac_info.0;
let ref qual = pac_info.1;
// mutation statistics
let mut index = 0;
let mut distance = 0;
for (i, j) in pac.chars().zip(wt_pac.chars()) {
if qual[index] > 63 && i != j {
diff_freq[index] += 1;
distance += 1;
}
index += 1;
}
diff_counts[distance] += 1;
if distance > 8 {
println!("# {} {}", distance, pac);
}
// pac sites statistics
if pac_stat.contains_key(pac) {
*pac_stat.get_mut(pac).unwrap() += 1;
}
else {
pac_stat.insert(pac, 1);
}
}
println!("# Overall statistics:");
for i in 0..31 {
println!("# {}\t{}", i, diff_counts[i]);
}
println!("# Per-base statistics:");
for i in 0..31 {
println!("# {}\t{}", i, diff_freq[i]);
}
//try!(write!(output, "{}", "# pac counts:\n"));
for (pac, counts) in &pac_stat {
try!(write!(output, "{} {} {}\n", pac, hamming(&pac, &wt_pac), counts));
}
Ok("Done".into())
}
fn main() {
let args : Vec<String> = env::args().collect();
let file1 = fastq::Reader::from_file(&args[1]).unwrap();
let file2 = fastq::Reader::from_file(&args[2]).unwrap();
let mut num_records = 0;
let mut num_duplicates = 0;
let mut num_qual_skip = 0;
let mut results : HashMap<String, (String, Vec<u8>)>= HashMap::new();
let wt_read1 = if &args[3] == "M" {b"ACTAAGTGAGATGAATATGGCGGCACCAAAGGGCAACCGATTTTGGGAGGCCCGCAGTAGTCATGGGCGAAATCCTAAATTCGAATCGCCTGAGGCGCTGTGGGCTGCTTGTTGTGAA"}
else {b"AAGTGAGATGAATATGGCGGCACCAAAGGGCAACCGATTTTGGGAGGCCCGCAGTAGTCATGGGCGAAATCCTAAATTCGAATCGCCTGAGGCGCTGTGGGCTGCTTGTTGTGAATAC"};
for (record1, record2) in file1.records().zip(file2.records()) {
// take read1, filter low quality reads
let read1 = record1.unwrap();
let desc = read1.id().unwrap().split(":").skip(5).collect::<Vec<&str>>();
let description = desc[0].to_string() + ":" + desc[1];
let mut trim = 124;
let mut am = " ".to_string();
for i in 0..120 {
if qual_check(&read1.qual()[i .. i+5], &[63, 63, 63, 63, 63]) {
trim = i+1;
println!("# {} {}: Read 1 trimmed at {}.", num_records, description, trim);
break;
}
}
if trim < 18 {
println!("# {}: Useful read too short. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
// check if the read is the right read
let seq1 = String::from_utf8_lossy(&read1.seq()[0 .. trim]);
let score = |a: u8, b: u8| if a == b {1i32} else {-1i32};
let mut aligner = Aligner::with_capacity(seq1.len(), wt_read1.len(), -5, -1, &score);
let alignment = aligner.global(&seq1[8..seq1.len()].as_bytes(), wt_read1);
if alignment.score < (2 * trim as i32 - 133 - 30) {
println!("# {} {}: wrong read 1 skipping", num_records, description);
println!("# {} {}", &seq1[8..seq1.len()], alignment.score);
num_records += 1;
num_qual_skip += 1;
continue;
}
// identifying AM/WT
if &args[3] == "M" {
if trim < 33 {
println!("# {}: Useful read too short for M. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
// Allowing 1 mismatch
if hamming(&seq1[27 .. 32], "GCGGC") < 2 {
match &seq1[32 .. 33] {
"A" => am = "WT".to_string(),
"G" => am = "AM".to_string(),
_ => am = " ".to_string(),
}
println!("# 1 am_codon = {}", &seq1[27 .. 33]);
}
if am == " " {
for i in 0 .. trim-6 {
if &seq1[i .. i+5] == "GCGGC" {
match &seq1[i+5 .. i+6] {
"A" => am = "WT".to_string(),
"G" => am = "AM".to_string(),
_ => am = " ".to_string(),
}
println!("# 2 am_codon = {}", &seq1[i .. i+6]);
break;
}
}
}
}
// average quality filtering
//let avg_qual = read1.qual().iter().fold(0, |a, &b| a as u32 + b as u32);
//if avg_qual < (125 * 30) { // corresponding to an average quality of 20
// println!("# low quality read 1 skipping: {}", avg_qual);
// continue;
//}
// now deal with read2
let read2 = record2.unwrap();
// average quality filtering
//let avg_qual = read2.qual().iter().fold(0, |a, &b| a as u32 + b as u32);
//if avg_qual < 125*30 {
// println!("# {}: low quality read 2 skipping: {}", num_records, avg_qual);
// num_qual_skip += 1;
// continue;
//}
trim = 124;
for i in 0..119 {
if qual_check(&read2.qual()[i .. i+5], &[63, 63, 63, 63, 63]) {
trim = i+1;
println!("# {} {}: Read 2 trimmed at {}.", num_records, description, i);
break;
}
}
if trim < 80 {
println!("# {}: Useful read too short. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
let seq2 =String::from_utf8_lossy(&read2.seq()[0 .. trim]);
// extract barcodes
let bc1 = &seq1[0..8];
let bc2 = &seq2[0..8];
let bc = bc1.to_string() + bc2;
// check the pac sequences
let wt_pac = "AGAGAAGATTTATCTGAAGTCGTTACGCGAG";
let seq2_rc = reverse_complement(&seq2);
let qual : Vec<u8> = read2.qual().iter().cloned().rev().collect();
let mut pac_start = 0;
let mut min_score = 31;
for i in 0 .. trim - 31 {
let score = hamming(&seq2_rc[i .. i+31], &wt_pac);
if score < min_score {
min_score = score;
pac_start = i;
}
}
let pac_end = cmp::min(trim, pac_start+31);
if pac_end - pac_start < 25 {
println!("# {} {}: pac too short ({}).", num_records, description, pac_end-pac_start);
num_records += 1;
num_qual_skip += 1;
continue;
}
let pac = String::from_utf8_lossy(&seq2_rc[pac_start .. pac_end]
.as_bytes()).into_owned();
if min_score > 4 {
let mut aligner = Aligner::with_capacity(wt_pac.len(), seq2.len(), -1, -1, &score);
let alignment = aligner.local(wt_pac.as_bytes(), &seq2_rc.as_bytes());
if alignment.operations.iter().any(|&x| x == AlignmentOperation::Ins || x == AlignmentOperation::Del) {
println!("# {} {}: pac contain indels.", num_records, description);
println!("{}", alignment.pretty(wt_pac.as_bytes(), &seq2_rc.as_bytes() ));
num_records += 1;
num_qual_skip += 1;
continue;
}
}
let pac_qual_avg : f32 = qual[pac_start .. pac_end].iter().cloned().map(|x| x as f32).sum::<f32>() / (pac_end - pac_start) as f32;
if pac_qual_avg < 63.0 || pac.chars().any(|x| x == 'N') {
println!("# {} {}: pac quality too low ({}) or contains N.", num_records, description, pac_qual_avg);
if &args[3] == "M" {
println!("# {} {} {} {}", num_records, bc, pac, am);
} else {
println!("# {} {} {}", num_records, bc, pac);
}
num_records += 1;
num_qual_skip += 1;
continue;
}
if &args[3] == "M" {
let ham_mut = ham_mutations(&pac, &wt_pac);
println!("{},{},{},{},{},{}", num_records, bc, pac, am, ham_mut.0, ham_mut.1);
} else {
println!("{} {} {}", num_records, bc, pac);
}
if results.contains_key(&bc) {
if results[&bc].0 == pac {
println!("# {}: duplicate found", num_records);
num_duplicates += 1;
}
else {
println!("# {}: possible sequencing error? {} {}", num_records, &pac, results[&bc].0);
}
}
else {
results.insert(bc, (pac, qual.clone()));
}
num_records += 1;
}
println!("# {} records processed;", num_records);
println!("# {} low quality reads;", num_qual_skip);
println!("# {} possible duplicates.", num_duplicates);
data_stat(&results, &args[4]);
} | // if i != j {
// dif.push((index, i, j)); | random_line_split |
main.rs | //
extern crate bio;
extern crate itertools;
use std::collections::HashMap;
use std::cmp;
use std::env;
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use bio::io::fastq;
use bio::alignment::pairwise::*;
use bio::alignment::AlignmentOperation;
// fn check(rec: &fastq::Record, read: &str) -> (u16, Vec<(usize, char, char)>) {
// let mut distance : u16 = 0;
// let qual = rec.qual();
// let mut dif : Vec<(usize, char, char)> = vec![];
// let mut index : usize = 0;
// for (i, j) in String::from_utf8_lossy(rec.seq()).chars().dropping(8).zip(read.chars()) {
// if qual[index] > 63 {
// if i != j {
// dif.push((index, i, j));
// distance += 1;
// }
// }
// else {
// distance += 1;
// }
// index += 1;
// }
// (distance, dif)
// }
fn hamming(seq1: &str, seq2: &str) -> u32 |
fn ham_mutations(seq1: &str, seq2: &str) -> (u32, String) {
let mut score = 0;
let mut mutations = "".to_string();
let mut n = 1;
for (i, j) in seq1.chars().zip(seq2.chars()) {
if i != j {
score += 1;
if score == 1 {
mutations = mutations + &format!("{}{}", n, i);
} else {
mutations = mutations + &format!(" {}{}", n, i);
}
}
n += 1;
}
(score, mutations)
}
fn reverse_complement(seq: &str) -> String {
seq.chars()
.map(|t| match t {
'A' => 'T',
'T' => 'A',
'G' => 'C',
'C' => 'G',
_ => 'N',
}).rev().collect::<String>()
}
fn qual_check(a: &[u8], b: &[u8]) -> bool {
for (i, j) in a.iter().zip(b.iter()) {
if i < j {
continue;
}
return false;
}
return true
}
fn data_stat(results: &HashMap<String, (String, Vec<u8>)>, output_file: &str) -> Result<String, Box<Error>> {
// statistics on the datasets
let wt_pac = "AGAGAAGATTTATCTGAAGTCGTTACGCGAG";
let mut diff_counts : [usize; 31] = [0; 31];
let mut diff_freq : [usize; 31] = [0; 31];
let mut output = try!(File::create(output_file));
let mut pac_stat = HashMap::new();
for (_, pac_info) in results {
let ref pac = pac_info.0;
let ref qual = pac_info.1;
// mutation statistics
let mut index = 0;
let mut distance = 0;
for (i, j) in pac.chars().zip(wt_pac.chars()) {
if qual[index] > 63 && i != j {
diff_freq[index] += 1;
distance += 1;
}
index += 1;
}
diff_counts[distance] += 1;
if distance > 8 {
println!("# {} {}", distance, pac);
}
// pac sites statistics
if pac_stat.contains_key(pac) {
*pac_stat.get_mut(pac).unwrap() += 1;
}
else {
pac_stat.insert(pac, 1);
}
}
println!("# Overall statistics:");
for i in 0..31 {
println!("# {}\t{}", i, diff_counts[i]);
}
println!("# Per-base statistics:");
for i in 0..31 {
println!("# {}\t{}", i, diff_freq[i]);
}
//try!(write!(output, "{}", "# pac counts:\n"));
for (pac, counts) in &pac_stat {
try!(write!(output, "{} {} {}\n", pac, hamming(&pac, &wt_pac), counts));
}
Ok("Done".into())
}
fn main() {
let args : Vec<String> = env::args().collect();
let file1 = fastq::Reader::from_file(&args[1]).unwrap();
let file2 = fastq::Reader::from_file(&args[2]).unwrap();
let mut num_records = 0;
let mut num_duplicates = 0;
let mut num_qual_skip = 0;
let mut results : HashMap<String, (String, Vec<u8>)>= HashMap::new();
let wt_read1 = if &args[3] == "M" {b"ACTAAGTGAGATGAATATGGCGGCACCAAAGGGCAACCGATTTTGGGAGGCCCGCAGTAGTCATGGGCGAAATCCTAAATTCGAATCGCCTGAGGCGCTGTGGGCTGCTTGTTGTGAA"}
else {b"AAGTGAGATGAATATGGCGGCACCAAAGGGCAACCGATTTTGGGAGGCCCGCAGTAGTCATGGGCGAAATCCTAAATTCGAATCGCCTGAGGCGCTGTGGGCTGCTTGTTGTGAATAC"};
for (record1, record2) in file1.records().zip(file2.records()) {
// take read1, filter low quality reads
let read1 = record1.unwrap();
let desc = read1.id().unwrap().split(":").skip(5).collect::<Vec<&str>>();
let description = desc[0].to_string() + ":" + desc[1];
let mut trim = 124;
let mut am = " ".to_string();
for i in 0..120 {
if qual_check(&read1.qual()[i .. i+5], &[63, 63, 63, 63, 63]) {
trim = i+1;
println!("# {} {}: Read 1 trimmed at {}.", num_records, description, trim);
break;
}
}
if trim < 18 {
println!("# {}: Useful read too short. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
// check if the read is the right read
let seq1 = String::from_utf8_lossy(&read1.seq()[0 .. trim]);
let score = |a: u8, b: u8| if a == b {1i32} else {-1i32};
let mut aligner = Aligner::with_capacity(seq1.len(), wt_read1.len(), -5, -1, &score);
let alignment = aligner.global(&seq1[8..seq1.len()].as_bytes(), wt_read1);
if alignment.score < (2 * trim as i32 - 133 - 30) {
println!("# {} {}: wrong read 1 skipping", num_records, description);
println!("# {} {}", &seq1[8..seq1.len()], alignment.score);
num_records += 1;
num_qual_skip += 1;
continue;
}
// identifying AM/WT
if &args[3] == "M" {
if trim < 33 {
println!("# {}: Useful read too short for M. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
// Allowing 1 mismatch
if hamming(&seq1[27 .. 32], "GCGGC") < 2 {
match &seq1[32 .. 33] {
"A" => am = "WT".to_string(),
"G" => am = "AM".to_string(),
_ => am = " ".to_string(),
}
println!("# 1 am_codon = {}", &seq1[27 .. 33]);
}
if am == " " {
for i in 0 .. trim-6 {
if &seq1[i .. i+5] == "GCGGC" {
match &seq1[i+5 .. i+6] {
"A" => am = "WT".to_string(),
"G" => am = "AM".to_string(),
_ => am = " ".to_string(),
}
println!("# 2 am_codon = {}", &seq1[i .. i+6]);
break;
}
}
}
}
// average quality filtering
//let avg_qual = read1.qual().iter().fold(0, |a, &b| a as u32 + b as u32);
//if avg_qual < (125 * 30) { // corresponding to an average quality of 20
// println!("# low quality read 1 skipping: {}", avg_qual);
// continue;
//}
// now deal with read2
let read2 = record2.unwrap();
// average quality filtering
//let avg_qual = read2.qual().iter().fold(0, |a, &b| a as u32 + b as u32);
//if avg_qual < 125*30 {
// println!("# {}: low quality read 2 skipping: {}", num_records, avg_qual);
// num_qual_skip += 1;
// continue;
//}
trim = 124;
for i in 0..119 {
if qual_check(&read2.qual()[i .. i+5], &[63, 63, 63, 63, 63]) {
trim = i+1;
println!("# {} {}: Read 2 trimmed at {}.", num_records, description, i);
break;
}
}
if trim < 80 {
println!("# {}: Useful read too short. Skipping. L = {}", num_records, trim);
num_qual_skip += 1;
num_records += 1;
continue;
}
let seq2 =String::from_utf8_lossy(&read2.seq()[0 .. trim]);
// extract barcodes
let bc1 = &seq1[0..8];
let bc2 = &seq2[0..8];
let bc = bc1.to_string() + bc2;
// check the pac sequences
let wt_pac = "AGAGAAGATTTATCTGAAGTCGTTACGCGAG";
let seq2_rc = reverse_complement(&seq2);
let qual : Vec<u8> = read2.qual().iter().cloned().rev().collect();
let mut pac_start = 0;
let mut min_score = 31;
for i in 0 .. trim - 31 {
let score = hamming(&seq2_rc[i .. i+31], &wt_pac);
if score < min_score {
min_score = score;
pac_start = i;
}
}
let pac_end = cmp::min(trim, pac_start+31);
if pac_end - pac_start < 25 {
println!("# {} {}: pac too short ({}).", num_records, description, pac_end-pac_start);
num_records += 1;
num_qual_skip += 1;
continue;
}
let pac = String::from_utf8_lossy(&seq2_rc[pac_start .. pac_end]
.as_bytes()).into_owned();
if min_score > 4 {
let mut aligner = Aligner::with_capacity(wt_pac.len(), seq2.len(), -1, -1, &score);
let alignment = aligner.local(wt_pac.as_bytes(), &seq2_rc.as_bytes());
if alignment.operations.iter().any(|&x| x == AlignmentOperation::Ins || x == AlignmentOperation::Del) {
println!("# {} {}: pac contain indels.", num_records, description);
println!("{}", alignment.pretty(wt_pac.as_bytes(), &seq2_rc.as_bytes() ));
num_records += 1;
num_qual_skip += 1;
continue;
}
}
let pac_qual_avg : f32 = qual[pac_start .. pac_end].iter().cloned().map(|x| x as f32).sum::<f32>() / (pac_end - pac_start) as f32;
if pac_qual_avg < 63.0 || pac.chars().any(|x| x == 'N') {
println!("# {} {}: pac quality too low ({}) or contains N.", num_records, description, pac_qual_avg);
if &args[3] == "M" {
println!("# {} {} {} {}", num_records, bc, pac, am);
} else {
println!("# {} {} {}", num_records, bc, pac);
}
num_records += 1;
num_qual_skip += 1;
continue;
}
if &args[3] == "M" {
let ham_mut = ham_mutations(&pac, &wt_pac);
println!("{},{},{},{},{},{}", num_records, bc, pac, am, ham_mut.0, ham_mut.1);
} else {
println!("{} {} {}", num_records, bc, pac);
}
if results.contains_key(&bc) {
if results[&bc].0 == pac {
println!("# {}: duplicate found", num_records);
num_duplicates += 1;
}
else {
println!("# {}: possible sequencing error? {} {}", num_records, &pac, results[&bc].0);
}
}
else {
results.insert(bc, (pac, qual.clone()));
}
num_records += 1;
}
println!("# {} records processed;", num_records);
println!("# {} low quality reads;", num_qual_skip);
println!("# {} possible duplicates.", num_duplicates);
data_stat(&results, &args[4]);
}
| {
let mut score = 0;
for (i, j) in seq1.chars().zip(seq2.chars()) {
if i != j {
score += 1;
}
}
score
} | identifier_body |
get.go | package configutil
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sync"
homedir "github.com/mitchellh/go-homedir"
"github.com/pkg/errors"
yaml "gopkg.in/yaml.v2"
"github.com/devspace-cloud/devspace/pkg/util/log"
"github.com/devspace-cloud/devspace/pkg/devspace/config/constants"
"github.com/devspace-cloud/devspace/pkg/devspace/config/generated"
"github.com/devspace-cloud/devspace/pkg/devspace/config/versions/latest"
"github.com/devspace-cloud/devspace/pkg/devspace/deploy/helm/merge"
"github.com/devspace-cloud/devspace/pkg/util/yamlutil"
)
// Global config vars
var config *latest.Config // merged config
// Thread-safety helper
var getConfigOnce sync.Once
var getConfigOnceErr error
var getConfigOnceMutex sync.Mutex
// ConfigExists checks whether the yaml file for the config exists or the configs.yaml exists
func ConfigExists() bool {
return configExistsInPath(".")
}
// configExistsInPath checks wheter a devspace configuration exists at a certain path
func configExistsInPath(path string) bool {
// Needed for testing
if config != nil {
return true
}
// Check devspace.yaml
_, err := os.Stat(filepath.Join(path, constants.DefaultConfigPath))
if err == nil {
return true
}
// Check devspace-configs.yaml
_, err = os.Stat(filepath.Join(path, constants.DefaultConfigsPath))
if err == nil {
return true
}
return false // Normal config file found
}
// ResetConfig resets the current config
func ResetConfig() {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce = sync.Once{}
getConfigOnceErr = nil
}
// InitConfig initializes the config objects
func InitConfig() *latest.Config {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
config = latest.New().(*latest.Config)
})
return config
}
// ConfigOptions defines options to load the config
type ConfigOptions struct {
Profile string
KubeContext string
LoadedVars map[string]string
Vars []string
}
// Clone clones the config options
func (co *ConfigOptions) Clone() (*ConfigOptions, error) {
out, err := yaml.Marshal(co)
if err != nil {
return nil, err
}
newCo := &ConfigOptions{}
err = yaml.Unmarshal(out, newCo)
if err != nil {
return nil, err
}
return newCo, nil
}
// GetBaseConfig returns the config
func GetBaseConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, false)
}
// GetConfig returns the config merged with all potential overwrite files
func GetConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, true)
}
// GetRawConfig loads the raw config from a given path
func GetRawConfig(configPath string) (map[interface{}]interface{}, error) {
fileContent, err := ioutil.ReadFile(configPath)
if err != nil {
return nil, err
}
rawMap := map[interface{}]interface{}{}
err = yaml.Unmarshal(fileContent, &rawMap)
if err != nil {
return nil, err
}
return rawMap, nil
}
// GetConfigFromPath loads the config from a given base path
func GetConfigFromPath(generatedConfig *generated.Config, basePath string, options *ConfigOptions, log log.Logger) (*latest.Config, error) {
if options == nil {
options = &ConfigOptions{}
}
configPath := filepath.Join(basePath, constants.DefaultConfigPath)
// Check devspace.yaml
_, err := os.Stat(configPath)
if err != nil {
// Check for legacy devspace-configs.yaml
_, configErr := os.Stat(filepath.Join(basePath, constants.DefaultConfigsPath))
if configErr == nil {
return nil, errors.Errorf("devspace-configs.yaml is not supported anymore in devspace v4. Please use 'profiles' in 'devspace.yaml' instead")
}
return nil, errors.Errorf("Couldn't find '%s': %v", configPath, err)
}
rawMap, err := GetRawConfig(configPath)
if err != nil {
return nil, err
}
loadedConfig, err := ParseConfig(generatedConfig, rawMap, options, log)
if err != nil {
return nil, err
}
// Now we validate the config
err = validate(loadedConfig)
if err != nil {
return nil, err
}
return loadedConfig, nil
}
// loadConfigOnce loads the config globally once
func loadConfigOnce(options *ConfigOptions, allowProfile bool) (*latest.Config, error) {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
if options == nil {
options = &ConfigOptions{}
}
// Get generated config
generatedConfig, err := generated.LoadConfig(options.Profile)
if err != nil {
getConfigOnceErr = err
return
}
// Check if we should load a specific config
if allowProfile && generatedConfig.ActiveProfile != "" && options.Profile == "" {
options.Profile = generatedConfig.ActiveProfile
} else if !allowProfile {
options.Profile = ""
}
// Set loaded vars for this
options.LoadedVars = LoadedVars
// Load base config
config, err = GetConfigFromPath(generatedConfig, ".", options, log.GetInstance())
if err != nil {
getConfigOnceErr = err
return
}
// Save generated config
err = generated.SaveConfig(generatedConfig)
if err != nil {
getConfigOnceErr = err
return
}
})
return config, getConfigOnceErr
}
func validate(config *latest.Config) error {
if config.Dev != nil {
if config.Dev.Ports != nil {
for index, port := range config.Dev.Ports {
if port.ImageName == "" && port.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in port config at index %d", index)
}
if port.PortMappings == nil {
return errors.Errorf("Error in config: portMappings is empty in port config at index %d", index)
}
}
}
if config.Dev.Sync != nil {
for index, sync := range config.Dev.Sync {
if sync.ImageName == "" && sync.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in sync config at index %d", index)
}
}
}
if config.Dev.Interactive != nil {
for index, imageConf := range config.Dev.Interactive.Images {
if imageConf.Name == "" {
return errors.Errorf("Error in config: Unnamed interactive image config at index %d", index)
}
}
}
}
if config.Commands != nil {
for index, command := range config.Commands {
if command.Name == "" {
return errors.Errorf("commands[%d].name is required", index)
}
if command.Command == "" {
return errors.Errorf("commands[%d].command is required", index)
}
}
}
if config.Hooks != nil {
for index, hookConfig := range config.Hooks {
if hookConfig.Command == "" {
return errors.Errorf("hooks[%d].command is required", index)
}
}
}
if config.Images != nil {
for imageConfigName, imageConf := range config.Images {
if imageConfigName == "" {
return errors.Errorf("images keys cannot be an empty string")
}
if imageConf.Image == "" {
return errors.Errorf("images.%s.image is required", imageConfigName)
}
if imageConf.Build != nil && imageConf.Build.Custom != nil && imageConf.Build.Custom.Command == "" {
return errors.Errorf("images.%s.build.custom.command is required", imageConfigName)
}
if imageConf.Image == "" {
return fmt.Errorf("images.%s.image is required", imageConfigName)
}
}
}
if config.Deployments != nil {
for index, deployConfig := range config.Deployments {
if deployConfig.Name == "" {
return errors.Errorf("deployments[%d].name is required", index)
}
if deployConfig.Helm == nil && deployConfig.Kubectl == nil {
return errors.Errorf("Please specify either helm or kubectl as deployment type in deployment %s", deployConfig.Name)
}
if deployConfig.Helm != nil && (deployConfig.Helm.Chart == nil || deployConfig.Helm.Chart.Name == "") && (deployConfig.Helm.ComponentChart == nil || *deployConfig.Helm.ComponentChart == false) {
return errors.Errorf("deployments[%d].helm.chart and deployments[%d].helm.chart.name or deployments[%d].helm.componentChart is required", index, index, index)
}
if deployConfig.Kubectl != nil && deployConfig.Kubectl.Manifests == nil {
return errors.Errorf("deployments[%d].kubectl.manifests is required", index)
}
if deployConfig.Helm != nil && deployConfig.Helm.ComponentChart != nil && *deployConfig.Helm.ComponentChart == true {
// Load override values from path
overwriteValues := map[interface{}]interface{}{}
if deployConfig.Helm.ValuesFiles != nil {
for _, overridePath := range deployConfig.Helm.ValuesFiles {
overwriteValuesPath, err := filepath.Abs(overridePath)
if err != nil {
return errors.Errorf("deployments[%d].helm.valuesFiles: Error retrieving absolute path from %s: %v", index, overridePath, err)
}
overwriteValuesFromPath := map[interface{}]interface{}{}
err = yamlutil.ReadYamlFromFile(overwriteValuesPath, overwriteValuesFromPath)
if err == nil {
merge.Values(overwriteValues).MergeInto(overwriteValuesFromPath)
}
}
}
// Load override values from data and merge them
if deployConfig.Helm.Values != nil {
merge.Values(overwriteValues).MergeInto(deployConfig.Helm.Values)
}
bytes, err := yaml.Marshal(overwriteValues)
if err != nil {
return errors.Errorf("deployments[%d].helm: Error marshaling overwrite values: %v", index, err)
}
componentValues := &latest.ComponentConfig{}
err = yaml.UnmarshalStrict(bytes, componentValues)
if err != nil {
return errors.Errorf("deployments[%d].helm.componentChart: component values are incorrect: %v", index, err)
}
}
}
}
return nil
}
// SetDevSpaceRoot checks the current directory and all parent directories for a .devspace folder with a config and sets the current working directory accordingly
func SetDevSpaceRoot(log log.Logger) (bool, error) | {
cwd, err := os.Getwd()
if err != nil {
return false, err
}
originalCwd := cwd
homedir, err := homedir.Dir()
if err != nil {
return false, err
}
lastLength := 0
for len(cwd) != lastLength {
if cwd != homedir {
configExists := configExistsInPath(cwd)
if configExists {
// Change working directory
err = os.Chdir(cwd)
if err != nil {
return false, err
}
// Notify user that we are not using the current working directory
if originalCwd != cwd {
log.Infof("Using devspace config in %s", filepath.ToSlash(cwd))
}
return true, nil
}
}
lastLength = len(cwd)
cwd = filepath.Dir(cwd)
}
return false, nil
} | identifier_body | |
get.go | package configutil
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sync"
homedir "github.com/mitchellh/go-homedir"
"github.com/pkg/errors"
yaml "gopkg.in/yaml.v2"
"github.com/devspace-cloud/devspace/pkg/util/log"
"github.com/devspace-cloud/devspace/pkg/devspace/config/constants"
"github.com/devspace-cloud/devspace/pkg/devspace/config/generated"
"github.com/devspace-cloud/devspace/pkg/devspace/config/versions/latest"
"github.com/devspace-cloud/devspace/pkg/devspace/deploy/helm/merge"
"github.com/devspace-cloud/devspace/pkg/util/yamlutil"
)
// Global config vars
var config *latest.Config // merged config
// Thread-safety helper
var getConfigOnce sync.Once
var getConfigOnceErr error
var getConfigOnceMutex sync.Mutex
// ConfigExists checks whether the yaml file for the config exists or the configs.yaml exists
func ConfigExists() bool {
return configExistsInPath(".")
}
// configExistsInPath checks wheter a devspace configuration exists at a certain path
func configExistsInPath(path string) bool {
// Needed for testing
if config != nil {
return true
}
// Check devspace.yaml
_, err := os.Stat(filepath.Join(path, constants.DefaultConfigPath))
if err == nil {
return true
}
// Check devspace-configs.yaml
_, err = os.Stat(filepath.Join(path, constants.DefaultConfigsPath))
if err == nil {
return true
}
return false // Normal config file found
}
// ResetConfig resets the current config
func ResetConfig() {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce = sync.Once{}
getConfigOnceErr = nil
}
// InitConfig initializes the config objects
func InitConfig() *latest.Config {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
config = latest.New().(*latest.Config)
})
return config
}
// ConfigOptions defines options to load the config
type ConfigOptions struct {
Profile string
KubeContext string
LoadedVars map[string]string
Vars []string
}
// Clone clones the config options
func (co *ConfigOptions) Clone() (*ConfigOptions, error) {
out, err := yaml.Marshal(co)
if err != nil {
return nil, err
}
newCo := &ConfigOptions{}
err = yaml.Unmarshal(out, newCo)
if err != nil {
return nil, err
}
return newCo, nil
}
// GetBaseConfig returns the config
func GetBaseConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, false)
}
// GetConfig returns the config merged with all potential overwrite files
func GetConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, true)
}
// GetRawConfig loads the raw config from a given path
func GetRawConfig(configPath string) (map[interface{}]interface{}, error) {
fileContent, err := ioutil.ReadFile(configPath)
if err != nil {
return nil, err
}
rawMap := map[interface{}]interface{}{}
err = yaml.Unmarshal(fileContent, &rawMap)
if err != nil {
return nil, err
}
return rawMap, nil
}
// GetConfigFromPath loads the config from a given base path
func GetConfigFromPath(generatedConfig *generated.Config, basePath string, options *ConfigOptions, log log.Logger) (*latest.Config, error) {
if options == nil {
options = &ConfigOptions{}
}
configPath := filepath.Join(basePath, constants.DefaultConfigPath)
// Check devspace.yaml
_, err := os.Stat(configPath)
if err != nil {
// Check for legacy devspace-configs.yaml
_, configErr := os.Stat(filepath.Join(basePath, constants.DefaultConfigsPath))
if configErr == nil {
return nil, errors.Errorf("devspace-configs.yaml is not supported anymore in devspace v4. Please use 'profiles' in 'devspace.yaml' instead")
}
return nil, errors.Errorf("Couldn't find '%s': %v", configPath, err)
}
rawMap, err := GetRawConfig(configPath)
if err != nil {
return nil, err
}
loadedConfig, err := ParseConfig(generatedConfig, rawMap, options, log)
if err != nil {
return nil, err
}
// Now we validate the config
err = validate(loadedConfig)
if err != nil {
return nil, err
}
return loadedConfig, nil
}
// loadConfigOnce loads the config globally once
func loadConfigOnce(options *ConfigOptions, allowProfile bool) (*latest.Config, error) {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
if options == nil {
options = &ConfigOptions{}
}
// Get generated config
generatedConfig, err := generated.LoadConfig(options.Profile)
if err != nil {
getConfigOnceErr = err
return
}
// Check if we should load a specific config
if allowProfile && generatedConfig.ActiveProfile != "" && options.Profile == "" {
options.Profile = generatedConfig.ActiveProfile
} else if !allowProfile {
options.Profile = ""
}
// Set loaded vars for this
options.LoadedVars = LoadedVars
// Load base config
config, err = GetConfigFromPath(generatedConfig, ".", options, log.GetInstance())
if err != nil {
getConfigOnceErr = err
return
}
// Save generated config
err = generated.SaveConfig(generatedConfig)
if err != nil {
getConfigOnceErr = err
return
}
})
return config, getConfigOnceErr
}
func validate(config *latest.Config) error {
if config.Dev != nil {
if config.Dev.Ports != nil {
for index, port := range config.Dev.Ports {
if port.ImageName == "" && port.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in port config at index %d", index)
}
if port.PortMappings == nil {
return errors.Errorf("Error in config: portMappings is empty in port config at index %d", index)
}
}
}
if config.Dev.Sync != nil {
for index, sync := range config.Dev.Sync {
if sync.ImageName == "" && sync.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in sync config at index %d", index)
}
}
}
if config.Dev.Interactive != nil {
for index, imageConf := range config.Dev.Interactive.Images {
if imageConf.Name == "" {
return errors.Errorf("Error in config: Unnamed interactive image config at index %d", index)
}
}
}
}
if config.Commands != nil {
for index, command := range config.Commands {
if command.Name == "" {
return errors.Errorf("commands[%d].name is required", index)
}
if command.Command == "" {
return errors.Errorf("commands[%d].command is required", index)
}
}
}
if config.Hooks != nil {
for index, hookConfig := range config.Hooks {
if hookConfig.Command == "" {
return errors.Errorf("hooks[%d].command is required", index)
}
}
}
if config.Images != nil {
for imageConfigName, imageConf := range config.Images {
if imageConfigName == "" {
return errors.Errorf("images keys cannot be an empty string")
}
if imageConf.Image == "" {
return errors.Errorf("images.%s.image is required", imageConfigName)
}
if imageConf.Build != nil && imageConf.Build.Custom != nil && imageConf.Build.Custom.Command == "" {
return errors.Errorf("images.%s.build.custom.command is required", imageConfigName)
}
if imageConf.Image == "" {
return fmt.Errorf("images.%s.image is required", imageConfigName)
}
}
}
if config.Deployments != nil {
for index, deployConfig := range config.Deployments {
if deployConfig.Name == "" {
return errors.Errorf("deployments[%d].name is required", index)
}
if deployConfig.Helm == nil && deployConfig.Kubectl == nil {
return errors.Errorf("Please specify either helm or kubectl as deployment type in deployment %s", deployConfig.Name)
}
if deployConfig.Helm != nil && (deployConfig.Helm.Chart == nil || deployConfig.Helm.Chart.Name == "") && (deployConfig.Helm.ComponentChart == nil || *deployConfig.Helm.ComponentChart == false) {
return errors.Errorf("deployments[%d].helm.chart and deployments[%d].helm.chart.name or deployments[%d].helm.componentChart is required", index, index, index)
}
if deployConfig.Kubectl != nil && deployConfig.Kubectl.Manifests == nil {
return errors.Errorf("deployments[%d].kubectl.manifests is required", index)
}
if deployConfig.Helm != nil && deployConfig.Helm.ComponentChart != nil && *deployConfig.Helm.ComponentChart == true {
// Load override values from path
overwriteValues := map[interface{}]interface{}{}
if deployConfig.Helm.ValuesFiles != nil {
for _, overridePath := range deployConfig.Helm.ValuesFiles {
overwriteValuesPath, err := filepath.Abs(overridePath)
if err != nil {
return errors.Errorf("deployments[%d].helm.valuesFiles: Error retrieving absolute path from %s: %v", index, overridePath, err)
}
overwriteValuesFromPath := map[interface{}]interface{}{}
err = yamlutil.ReadYamlFromFile(overwriteValuesPath, overwriteValuesFromPath)
if err == nil {
merge.Values(overwriteValues).MergeInto(overwriteValuesFromPath)
}
}
}
// Load override values from data and merge them
if deployConfig.Helm.Values != nil {
merge.Values(overwriteValues).MergeInto(deployConfig.Helm.Values)
}
bytes, err := yaml.Marshal(overwriteValues)
if err != nil {
return errors.Errorf("deployments[%d].helm: Error marshaling overwrite values: %v", index, err)
}
componentValues := &latest.ComponentConfig{}
err = yaml.UnmarshalStrict(bytes, componentValues)
if err != nil {
return errors.Errorf("deployments[%d].helm.componentChart: component values are incorrect: %v", index, err)
}
}
}
}
return nil
}
// SetDevSpaceRoot checks the current directory and all parent directories for a .devspace folder with a config and sets the current working directory accordingly
func SetDevSpaceRoot(log log.Logger) (bool, error) {
cwd, err := os.Getwd()
if err != nil {
return false, err
}
originalCwd := cwd
homedir, err := homedir.Dir()
if err != nil {
return false, err
}
lastLength := 0
for len(cwd) != lastLength {
if cwd != homedir |
lastLength = len(cwd)
cwd = filepath.Dir(cwd)
}
return false, nil
}
| {
configExists := configExistsInPath(cwd)
if configExists {
// Change working directory
err = os.Chdir(cwd)
if err != nil {
return false, err
}
// Notify user that we are not using the current working directory
if originalCwd != cwd {
log.Infof("Using devspace config in %s", filepath.ToSlash(cwd))
}
return true, nil
}
} | conditional_block |
get.go | package configutil
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sync"
homedir "github.com/mitchellh/go-homedir"
"github.com/pkg/errors"
yaml "gopkg.in/yaml.v2"
"github.com/devspace-cloud/devspace/pkg/util/log"
"github.com/devspace-cloud/devspace/pkg/devspace/config/constants"
"github.com/devspace-cloud/devspace/pkg/devspace/config/generated"
"github.com/devspace-cloud/devspace/pkg/devspace/config/versions/latest"
"github.com/devspace-cloud/devspace/pkg/devspace/deploy/helm/merge"
"github.com/devspace-cloud/devspace/pkg/util/yamlutil"
)
// Global config vars
var config *latest.Config // merged config
// Thread-safety helper
var getConfigOnce sync.Once
var getConfigOnceErr error
var getConfigOnceMutex sync.Mutex
// ConfigExists checks whether the yaml file for the config exists or the configs.yaml exists
func ConfigExists() bool {
return configExistsInPath(".")
}
// configExistsInPath checks wheter a devspace configuration exists at a certain path
func configExistsInPath(path string) bool {
// Needed for testing
if config != nil {
return true
}
// Check devspace.yaml
_, err := os.Stat(filepath.Join(path, constants.DefaultConfigPath))
if err == nil {
return true
}
// Check devspace-configs.yaml
_, err = os.Stat(filepath.Join(path, constants.DefaultConfigsPath))
if err == nil {
return true
}
return false // Normal config file found
}
// ResetConfig resets the current config
func ResetConfig() {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce = sync.Once{}
getConfigOnceErr = nil
}
// InitConfig initializes the config objects
func InitConfig() *latest.Config {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
config = latest.New().(*latest.Config)
})
return config
}
// ConfigOptions defines options to load the config
type ConfigOptions struct {
Profile string
KubeContext string
LoadedVars map[string]string
Vars []string
}
// Clone clones the config options
func (co *ConfigOptions) Clone() (*ConfigOptions, error) {
out, err := yaml.Marshal(co)
if err != nil {
return nil, err
}
newCo := &ConfigOptions{}
err = yaml.Unmarshal(out, newCo)
if err != nil {
return nil, err
}
return newCo, nil
}
// GetBaseConfig returns the config
func GetBaseConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, false)
}
// GetConfig returns the config merged with all potential overwrite files
func GetConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, true)
}
// GetRawConfig loads the raw config from a given path
func GetRawConfig(configPath string) (map[interface{}]interface{}, error) {
fileContent, err := ioutil.ReadFile(configPath)
if err != nil {
return nil, err
}
rawMap := map[interface{}]interface{}{}
err = yaml.Unmarshal(fileContent, &rawMap)
if err != nil {
return nil, err
}
return rawMap, nil
}
// GetConfigFromPath loads the config from a given base path
func | (generatedConfig *generated.Config, basePath string, options *ConfigOptions, log log.Logger) (*latest.Config, error) {
if options == nil {
options = &ConfigOptions{}
}
configPath := filepath.Join(basePath, constants.DefaultConfigPath)
// Check devspace.yaml
_, err := os.Stat(configPath)
if err != nil {
// Check for legacy devspace-configs.yaml
_, configErr := os.Stat(filepath.Join(basePath, constants.DefaultConfigsPath))
if configErr == nil {
return nil, errors.Errorf("devspace-configs.yaml is not supported anymore in devspace v4. Please use 'profiles' in 'devspace.yaml' instead")
}
return nil, errors.Errorf("Couldn't find '%s': %v", configPath, err)
}
rawMap, err := GetRawConfig(configPath)
if err != nil {
return nil, err
}
loadedConfig, err := ParseConfig(generatedConfig, rawMap, options, log)
if err != nil {
return nil, err
}
// Now we validate the config
err = validate(loadedConfig)
if err != nil {
return nil, err
}
return loadedConfig, nil
}
// loadConfigOnce loads the config globally once
func loadConfigOnce(options *ConfigOptions, allowProfile bool) (*latest.Config, error) {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
if options == nil {
options = &ConfigOptions{}
}
// Get generated config
generatedConfig, err := generated.LoadConfig(options.Profile)
if err != nil {
getConfigOnceErr = err
return
}
// Check if we should load a specific config
if allowProfile && generatedConfig.ActiveProfile != "" && options.Profile == "" {
options.Profile = generatedConfig.ActiveProfile
} else if !allowProfile {
options.Profile = ""
}
// Set loaded vars for this
options.LoadedVars = LoadedVars
// Load base config
config, err = GetConfigFromPath(generatedConfig, ".", options, log.GetInstance())
if err != nil {
getConfigOnceErr = err
return
}
// Save generated config
err = generated.SaveConfig(generatedConfig)
if err != nil {
getConfigOnceErr = err
return
}
})
return config, getConfigOnceErr
}
func validate(config *latest.Config) error {
if config.Dev != nil {
if config.Dev.Ports != nil {
for index, port := range config.Dev.Ports {
if port.ImageName == "" && port.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in port config at index %d", index)
}
if port.PortMappings == nil {
return errors.Errorf("Error in config: portMappings is empty in port config at index %d", index)
}
}
}
if config.Dev.Sync != nil {
for index, sync := range config.Dev.Sync {
if sync.ImageName == "" && sync.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in sync config at index %d", index)
}
}
}
if config.Dev.Interactive != nil {
for index, imageConf := range config.Dev.Interactive.Images {
if imageConf.Name == "" {
return errors.Errorf("Error in config: Unnamed interactive image config at index %d", index)
}
}
}
}
if config.Commands != nil {
for index, command := range config.Commands {
if command.Name == "" {
return errors.Errorf("commands[%d].name is required", index)
}
if command.Command == "" {
return errors.Errorf("commands[%d].command is required", index)
}
}
}
if config.Hooks != nil {
for index, hookConfig := range config.Hooks {
if hookConfig.Command == "" {
return errors.Errorf("hooks[%d].command is required", index)
}
}
}
if config.Images != nil {
for imageConfigName, imageConf := range config.Images {
if imageConfigName == "" {
return errors.Errorf("images keys cannot be an empty string")
}
if imageConf.Image == "" {
return errors.Errorf("images.%s.image is required", imageConfigName)
}
if imageConf.Build != nil && imageConf.Build.Custom != nil && imageConf.Build.Custom.Command == "" {
return errors.Errorf("images.%s.build.custom.command is required", imageConfigName)
}
if imageConf.Image == "" {
return fmt.Errorf("images.%s.image is required", imageConfigName)
}
}
}
if config.Deployments != nil {
for index, deployConfig := range config.Deployments {
if deployConfig.Name == "" {
return errors.Errorf("deployments[%d].name is required", index)
}
if deployConfig.Helm == nil && deployConfig.Kubectl == nil {
return errors.Errorf("Please specify either helm or kubectl as deployment type in deployment %s", deployConfig.Name)
}
if deployConfig.Helm != nil && (deployConfig.Helm.Chart == nil || deployConfig.Helm.Chart.Name == "") && (deployConfig.Helm.ComponentChart == nil || *deployConfig.Helm.ComponentChart == false) {
return errors.Errorf("deployments[%d].helm.chart and deployments[%d].helm.chart.name or deployments[%d].helm.componentChart is required", index, index, index)
}
if deployConfig.Kubectl != nil && deployConfig.Kubectl.Manifests == nil {
return errors.Errorf("deployments[%d].kubectl.manifests is required", index)
}
if deployConfig.Helm != nil && deployConfig.Helm.ComponentChart != nil && *deployConfig.Helm.ComponentChart == true {
// Load override values from path
overwriteValues := map[interface{}]interface{}{}
if deployConfig.Helm.ValuesFiles != nil {
for _, overridePath := range deployConfig.Helm.ValuesFiles {
overwriteValuesPath, err := filepath.Abs(overridePath)
if err != nil {
return errors.Errorf("deployments[%d].helm.valuesFiles: Error retrieving absolute path from %s: %v", index, overridePath, err)
}
overwriteValuesFromPath := map[interface{}]interface{}{}
err = yamlutil.ReadYamlFromFile(overwriteValuesPath, overwriteValuesFromPath)
if err == nil {
merge.Values(overwriteValues).MergeInto(overwriteValuesFromPath)
}
}
}
// Load override values from data and merge them
if deployConfig.Helm.Values != nil {
merge.Values(overwriteValues).MergeInto(deployConfig.Helm.Values)
}
bytes, err := yaml.Marshal(overwriteValues)
if err != nil {
return errors.Errorf("deployments[%d].helm: Error marshaling overwrite values: %v", index, err)
}
componentValues := &latest.ComponentConfig{}
err = yaml.UnmarshalStrict(bytes, componentValues)
if err != nil {
return errors.Errorf("deployments[%d].helm.componentChart: component values are incorrect: %v", index, err)
}
}
}
}
return nil
}
// SetDevSpaceRoot checks the current directory and all parent directories for a .devspace folder with a config and sets the current working directory accordingly
func SetDevSpaceRoot(log log.Logger) (bool, error) {
cwd, err := os.Getwd()
if err != nil {
return false, err
}
originalCwd := cwd
homedir, err := homedir.Dir()
if err != nil {
return false, err
}
lastLength := 0
for len(cwd) != lastLength {
if cwd != homedir {
configExists := configExistsInPath(cwd)
if configExists {
// Change working directory
err = os.Chdir(cwd)
if err != nil {
return false, err
}
// Notify user that we are not using the current working directory
if originalCwd != cwd {
log.Infof("Using devspace config in %s", filepath.ToSlash(cwd))
}
return true, nil
}
}
lastLength = len(cwd)
cwd = filepath.Dir(cwd)
}
return false, nil
}
| GetConfigFromPath | identifier_name |
get.go | package configutil
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"sync"
homedir "github.com/mitchellh/go-homedir"
"github.com/pkg/errors"
yaml "gopkg.in/yaml.v2"
"github.com/devspace-cloud/devspace/pkg/util/log"
"github.com/devspace-cloud/devspace/pkg/devspace/config/constants"
"github.com/devspace-cloud/devspace/pkg/devspace/config/generated"
"github.com/devspace-cloud/devspace/pkg/devspace/config/versions/latest"
"github.com/devspace-cloud/devspace/pkg/devspace/deploy/helm/merge"
"github.com/devspace-cloud/devspace/pkg/util/yamlutil"
)
// Global config vars
var config *latest.Config // merged config
// Thread-safety helper
var getConfigOnce sync.Once
var getConfigOnceErr error
var getConfigOnceMutex sync.Mutex
// ConfigExists checks whether the yaml file for the config exists or the configs.yaml exists
func ConfigExists() bool {
return configExistsInPath(".")
}
// configExistsInPath checks wheter a devspace configuration exists at a certain path
func configExistsInPath(path string) bool {
// Needed for testing
if config != nil {
return true
}
// Check devspace.yaml
_, err := os.Stat(filepath.Join(path, constants.DefaultConfigPath))
if err == nil {
return true
}
// Check devspace-configs.yaml
_, err = os.Stat(filepath.Join(path, constants.DefaultConfigsPath))
if err == nil {
return true
}
return false // Normal config file found
}
// ResetConfig resets the current config
func ResetConfig() {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce = sync.Once{}
getConfigOnceErr = nil
}
// InitConfig initializes the config objects
func InitConfig() *latest.Config {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
config = latest.New().(*latest.Config)
})
return config
}
// ConfigOptions defines options to load the config
type ConfigOptions struct {
Profile string
KubeContext string
LoadedVars map[string]string
Vars []string
}
// Clone clones the config options
func (co *ConfigOptions) Clone() (*ConfigOptions, error) {
out, err := yaml.Marshal(co)
if err != nil {
return nil, err
}
newCo := &ConfigOptions{}
err = yaml.Unmarshal(out, newCo)
if err != nil {
return nil, err
}
return newCo, nil
}
// GetBaseConfig returns the config
func GetBaseConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, false)
}
// GetConfig returns the config merged with all potential overwrite files
func GetConfig(options *ConfigOptions) (*latest.Config, error) {
return loadConfigOnce(options, true)
}
// GetRawConfig loads the raw config from a given path | if err != nil {
return nil, err
}
rawMap := map[interface{}]interface{}{}
err = yaml.Unmarshal(fileContent, &rawMap)
if err != nil {
return nil, err
}
return rawMap, nil
}
// GetConfigFromPath loads the config from a given base path
func GetConfigFromPath(generatedConfig *generated.Config, basePath string, options *ConfigOptions, log log.Logger) (*latest.Config, error) {
if options == nil {
options = &ConfigOptions{}
}
configPath := filepath.Join(basePath, constants.DefaultConfigPath)
// Check devspace.yaml
_, err := os.Stat(configPath)
if err != nil {
// Check for legacy devspace-configs.yaml
_, configErr := os.Stat(filepath.Join(basePath, constants.DefaultConfigsPath))
if configErr == nil {
return nil, errors.Errorf("devspace-configs.yaml is not supported anymore in devspace v4. Please use 'profiles' in 'devspace.yaml' instead")
}
return nil, errors.Errorf("Couldn't find '%s': %v", configPath, err)
}
rawMap, err := GetRawConfig(configPath)
if err != nil {
return nil, err
}
loadedConfig, err := ParseConfig(generatedConfig, rawMap, options, log)
if err != nil {
return nil, err
}
// Now we validate the config
err = validate(loadedConfig)
if err != nil {
return nil, err
}
return loadedConfig, nil
}
// loadConfigOnce loads the config globally once
func loadConfigOnce(options *ConfigOptions, allowProfile bool) (*latest.Config, error) {
getConfigOnceMutex.Lock()
defer getConfigOnceMutex.Unlock()
getConfigOnce.Do(func() {
if options == nil {
options = &ConfigOptions{}
}
// Get generated config
generatedConfig, err := generated.LoadConfig(options.Profile)
if err != nil {
getConfigOnceErr = err
return
}
// Check if we should load a specific config
if allowProfile && generatedConfig.ActiveProfile != "" && options.Profile == "" {
options.Profile = generatedConfig.ActiveProfile
} else if !allowProfile {
options.Profile = ""
}
// Set loaded vars for this
options.LoadedVars = LoadedVars
// Load base config
config, err = GetConfigFromPath(generatedConfig, ".", options, log.GetInstance())
if err != nil {
getConfigOnceErr = err
return
}
// Save generated config
err = generated.SaveConfig(generatedConfig)
if err != nil {
getConfigOnceErr = err
return
}
})
return config, getConfigOnceErr
}
func validate(config *latest.Config) error {
if config.Dev != nil {
if config.Dev.Ports != nil {
for index, port := range config.Dev.Ports {
if port.ImageName == "" && port.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in port config at index %d", index)
}
if port.PortMappings == nil {
return errors.Errorf("Error in config: portMappings is empty in port config at index %d", index)
}
}
}
if config.Dev.Sync != nil {
for index, sync := range config.Dev.Sync {
if sync.ImageName == "" && sync.LabelSelector == nil {
return errors.Errorf("Error in config: imageName and label selector are nil in sync config at index %d", index)
}
}
}
if config.Dev.Interactive != nil {
for index, imageConf := range config.Dev.Interactive.Images {
if imageConf.Name == "" {
return errors.Errorf("Error in config: Unnamed interactive image config at index %d", index)
}
}
}
}
if config.Commands != nil {
for index, command := range config.Commands {
if command.Name == "" {
return errors.Errorf("commands[%d].name is required", index)
}
if command.Command == "" {
return errors.Errorf("commands[%d].command is required", index)
}
}
}
if config.Hooks != nil {
for index, hookConfig := range config.Hooks {
if hookConfig.Command == "" {
return errors.Errorf("hooks[%d].command is required", index)
}
}
}
if config.Images != nil {
for imageConfigName, imageConf := range config.Images {
if imageConfigName == "" {
return errors.Errorf("images keys cannot be an empty string")
}
if imageConf.Image == "" {
return errors.Errorf("images.%s.image is required", imageConfigName)
}
if imageConf.Build != nil && imageConf.Build.Custom != nil && imageConf.Build.Custom.Command == "" {
return errors.Errorf("images.%s.build.custom.command is required", imageConfigName)
}
if imageConf.Image == "" {
return fmt.Errorf("images.%s.image is required", imageConfigName)
}
}
}
if config.Deployments != nil {
for index, deployConfig := range config.Deployments {
if deployConfig.Name == "" {
return errors.Errorf("deployments[%d].name is required", index)
}
if deployConfig.Helm == nil && deployConfig.Kubectl == nil {
return errors.Errorf("Please specify either helm or kubectl as deployment type in deployment %s", deployConfig.Name)
}
if deployConfig.Helm != nil && (deployConfig.Helm.Chart == nil || deployConfig.Helm.Chart.Name == "") && (deployConfig.Helm.ComponentChart == nil || *deployConfig.Helm.ComponentChart == false) {
return errors.Errorf("deployments[%d].helm.chart and deployments[%d].helm.chart.name or deployments[%d].helm.componentChart is required", index, index, index)
}
if deployConfig.Kubectl != nil && deployConfig.Kubectl.Manifests == nil {
return errors.Errorf("deployments[%d].kubectl.manifests is required", index)
}
if deployConfig.Helm != nil && deployConfig.Helm.ComponentChart != nil && *deployConfig.Helm.ComponentChart == true {
// Load override values from path
overwriteValues := map[interface{}]interface{}{}
if deployConfig.Helm.ValuesFiles != nil {
for _, overridePath := range deployConfig.Helm.ValuesFiles {
overwriteValuesPath, err := filepath.Abs(overridePath)
if err != nil {
return errors.Errorf("deployments[%d].helm.valuesFiles: Error retrieving absolute path from %s: %v", index, overridePath, err)
}
overwriteValuesFromPath := map[interface{}]interface{}{}
err = yamlutil.ReadYamlFromFile(overwriteValuesPath, overwriteValuesFromPath)
if err == nil {
merge.Values(overwriteValues).MergeInto(overwriteValuesFromPath)
}
}
}
// Load override values from data and merge them
if deployConfig.Helm.Values != nil {
merge.Values(overwriteValues).MergeInto(deployConfig.Helm.Values)
}
bytes, err := yaml.Marshal(overwriteValues)
if err != nil {
return errors.Errorf("deployments[%d].helm: Error marshaling overwrite values: %v", index, err)
}
componentValues := &latest.ComponentConfig{}
err = yaml.UnmarshalStrict(bytes, componentValues)
if err != nil {
return errors.Errorf("deployments[%d].helm.componentChart: component values are incorrect: %v", index, err)
}
}
}
}
return nil
}
// SetDevSpaceRoot checks the current directory and all parent directories for a .devspace folder with a config and sets the current working directory accordingly
func SetDevSpaceRoot(log log.Logger) (bool, error) {
cwd, err := os.Getwd()
if err != nil {
return false, err
}
originalCwd := cwd
homedir, err := homedir.Dir()
if err != nil {
return false, err
}
lastLength := 0
for len(cwd) != lastLength {
if cwd != homedir {
configExists := configExistsInPath(cwd)
if configExists {
// Change working directory
err = os.Chdir(cwd)
if err != nil {
return false, err
}
// Notify user that we are not using the current working directory
if originalCwd != cwd {
log.Infof("Using devspace config in %s", filepath.ToSlash(cwd))
}
return true, nil
}
}
lastLength = len(cwd)
cwd = filepath.Dir(cwd)
}
return false, nil
} | func GetRawConfig(configPath string) (map[interface{}]interface{}, error) {
fileContent, err := ioutil.ReadFile(configPath) | random_line_split |
charm.py | #!/usr/bin/env python3
# Copyright 2021 Canonical
# See LICENSE file for licensing details.
""" Module defining the Charmed operator for the FINOS Legend SDLC Server. """
import logging
import yaml
from charms.finos_legend_libs.v0 import legend_operator_base
from ops import charm, main, model
logger = logging.getLogger(__name__)
SDLC_SERVICE_NAME = "sdlc"
SDLC_CONTAINER_NAME = "sdlc"
LEGEND_DB_RELATION_NAME = "legend-db"
LEGEND_GITLAB_RELATION_NAME = "legend-sdlc-gitlab"
LEGEND_STUDIO_RELATION_NAME = "legend-sdlc"
SDLC_SERVICE_URL_FORMAT = "%(schema)s://%(host)s:%(port)s%(path)s"
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH = "/sdlc-config.yaml"
SDLC_MAIN_GITLAB_REDIRECT_URL = "%(base_url)s/auth/callback"
SDLC_GITLAB_REDIRECT_URI_FORMATS = [
SDLC_MAIN_GITLAB_REDIRECT_URL,
"%(base_url)s/pac4j/login/callback",
]
TRUSTSTORE_PASSPHRASE = "Legend SDLC"
TRUSTSTORE_CONTAINER_LOCAL_PATH = "/truststore.jks"
APPLICATION_CONNECTOR_PORT_HTTP = 7070
APPLICATION_ADMIN_CONNECTOR_PORT_HTTP = 7076
APPLICATION_ROOT_PATH = "/api"
APPLICATION_LOGGING_FORMAT = "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%thread] %c - %m%n"
GITLAB_PROJECT_VISIBILITY_PUBLIC = "public"
GITLAB_PROJECT_VISIBILITY_PRIVATE = "private"
GITLAB_REQUIRED_SCOPES = ["openid", "profile", "api"]
class LegendSDLCServerCharm(legend_operator_base.BaseFinosLegendCoreServiceCharm):
"""Charmed operator for the FINOS Legend SDLC Server."""
def __init__(self, *args):
super().__init__(*args)
# Studio relation events:
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_joined, self._on_studio_relation_joined
)
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_changed, self._on_studio_relation_changed
)
@classmethod
def _get_application_connector_port(cls):
return APPLICATION_CONNECTOR_PORT_HTTP
@classmethod
def _get_workload_container_name(cls):
return SDLC_CONTAINER_NAME
@classmethod
def _get_workload_service_names(cls):
return [SDLC_SERVICE_NAME]
@classmethod
def _get_workload_pebble_layers(cls):
return {
"sdlc": {
"summary": "SDLC layer.",
"description": "Pebble config layer for FINOS Legend SDLC.",
"services": {
"sdlc": {
"override": "replace",
"summary": "sdlc",
"command": (
# NOTE(aznashwan): starting through bash is needed
# for the classpath glob (-cp ...) to be expanded:
"/bin/sh -c 'java -XX:+ExitOnOutOfMemoryError "
"-XX:MaxRAMPercentage=60 -Xss4M -cp /app/bin/*.jar"
" -Dfile.encoding=UTF8 "
'-Djavax.net.ssl.trustStore="%s" '
'-Djavax.net.ssl.trustStorePassword="%s" '
"org.finos.legend.sdlc.server.LegendSDLCServer "
'server "%s"\''
% (
TRUSTSTORE_CONTAINER_LOCAL_PATH,
TRUSTSTORE_PASSPHRASE,
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH,
)
),
# NOTE(aznashwan): considering the SDLC service expects
# a singular config file which already contains all
# relevant options in it (some of which will require
# the relation with DB/Gitlab to have already been
# established), we do not auto-start:
"startup": "disabled",
# TODO(aznashwan): determine any env vars we could pass
# (most notably, things like the RAM percentage etc...)
"environment": {},
}
},
}
}
def _get_jks_truststore_preferences(self):
jks_prefs = {
"truststore_path": TRUSTSTORE_CONTAINER_LOCAL_PATH,
"truststore_passphrase": TRUSTSTORE_PASSPHRASE,
"trusted_certificates": {},
}
cert = self._get_legend_gitlab_certificate()
if cert:
# NOTE(aznashwan): cert label 'gitlab-sdlc' is arbitrary:
jks_prefs["trusted_certificates"]["gitlab-sdlc"] = cert
return jks_prefs
@classmethod
def _get_legend_gitlab_relation_name(cls):
return LEGEND_GITLAB_RELATION_NAME
@classmethod
def _get_legend_db_relation_name(cls):
return LEGEND_DB_RELATION_NAME
def _get_sdlc_service_url(self):
ip_address = legend_operator_base.get_ip_address()
return SDLC_SERVICE_URL_FORMAT % (
{
# NOTE(aznashwan): we always return the plain HTTP endpoint:
"schema": "http",
"host": ip_address,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"path": APPLICATION_ROOT_PATH,
}
)
def _get_legend_gitlab_redirect_uris(self):
base_url = self._get_sdlc_service_url()
redirect_uris = [fmt % {"base_url": base_url} for fmt in SDLC_GITLAB_REDIRECT_URI_FORMATS]
return redirect_uris
def _get_core_legend_service_configs(self, legend_db_credentials, legend_gitlab_credentials):
# Check DB-related options:
if not legend_db_credentials:
|
legend_db_uri = legend_db_credentials["uri"]
legend_db = legend_db_credentials["database"]
# Check gitlab-related options:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PRIVATE
if self.model.config["gitlab-create-new-projects-as-public"]:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PUBLIC
if not legend_gitlab_credentials:
return model.WaitingStatus("no legend gitlab info present in relation yet")
gitlab_client_id = legend_gitlab_credentials["client_id"]
gitlab_client_secret = legend_gitlab_credentials["client_secret"]
gitlab_openid_discovery_url = legend_gitlab_credentials["openid_discovery_url"]
gitlab_project_tag = self.model.config["gitlab-project-tag"]
gitlab_project_creation_group_pattern = self.model.config[
"gitlab-project-creation-group-pattern"
]
# Check Java logging options:
request_logging_level = self._get_logging_level_from_config(
"server-requests-logging-level"
)
server_logging_level = self._get_logging_level_from_config("server-logging-level")
if not all([server_logging_level, request_logging_level]):
return model.BlockedStatus(
"one or more logging config options are improperly formatted "
"or missing, please review the debug-log for more details"
)
# Compile base config:
sdlc_config = {
"applicationName": "Legend SDLC",
"server": {
"rootPath": APPLICATION_ROOT_PATH,
"applicationConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"maxRequestHeaderSize": "128KiB",
}
],
"adminConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_ADMIN_CONNECTOR_PORT_HTTP,
}
],
"gzip": {"includedMethods": ["GET", "POST"]},
"requestLog": {
"type": "classic",
"level": request_logging_level,
"appenders": [{"type": "console", "logFormat": "OFF"}],
},
},
"filterPriorities": {
"GitLab": 1,
"org.pac4j.j2e.filter.CallbackFilter": 2,
"org.pac4j.j2e.filter.SecurityFilter": 3,
"CORS": 4,
},
"pac4j": {
"callbackPrefix": "/api/pac4j/login",
"mongoUri": legend_db_uri,
"mongoDb": legend_db,
"clients": [
{
"org.finos.legend.server.pac4j.gitlab.GitlabClient": {
"name": "gitlab",
"clientId": gitlab_client_id,
"secret": gitlab_client_secret,
"discoveryUri": gitlab_openid_discovery_url,
# NOTE(aznashwan): needs to be a space-separated str:
"scope": " ".join(GITLAB_REQUIRED_SCOPES),
}
}
],
"mongoSession": {"enabled": True, "collection": "userSessions"},
"bypassPaths": ["/api/info"],
},
"gitLab": {
"newProjectVisibility": gitlab_project_visibility,
"projectTag": gitlab_project_tag,
"uat": {
"server": {
"scheme": legend_gitlab_credentials["gitlab_scheme"],
"host": "%s:%s"
% (
legend_gitlab_credentials["gitlab_host"],
legend_gitlab_credentials["gitlab_port"],
),
},
"app": {
"id": gitlab_client_id,
"secret": gitlab_client_secret,
"redirectURI": (
SDLC_MAIN_GITLAB_REDIRECT_URL
% {"base_url": self._get_sdlc_service_url()}
),
},
},
},
"projectStructure": {
"projectCreation": {"groupIdPattern": gitlab_project_creation_group_pattern},
"extensionProvider": {
"org.finos.legend.sdlc.server.gitlab.finos."
"FinosGitlabProjectStructureExtensionProvider": {}
},
},
"logging": {
"level": server_logging_level,
"appenders": [
{
"type": "console",
"logFormat": APPLICATION_LOGGING_FORMAT,
}
],
},
"swagger": {
"title": "Legend SDLC",
"resourcePackage": "org.finos.legend.sdlc.server.resources",
"version": "local-snapshot",
"schemes": [],
},
}
return {SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH: yaml.dump(sdlc_config)}
def _on_studio_relation_joined(self, event: charm.RelationJoinedEvent) -> None:
rel = event.relation
sdlc_url = self._get_sdlc_service_url()
logger.info("Providing following SDLC URL to Studio: %s", sdlc_url)
rel.data[self.app]["legend-sdlc-url"] = sdlc_url
def _on_studio_relation_changed(self, event: charm.RelationChangedEvent) -> None:
pass
if __name__ == "__main__":
main.main(LegendSDLCServerCharm)
| return model.WaitingStatus("no legend db info present in relation yet") | conditional_block |
charm.py | #!/usr/bin/env python3
# Copyright 2021 Canonical
# See LICENSE file for licensing details.
""" Module defining the Charmed operator for the FINOS Legend SDLC Server. """
import logging
import yaml
from charms.finos_legend_libs.v0 import legend_operator_base
from ops import charm, main, model
logger = logging.getLogger(__name__)
SDLC_SERVICE_NAME = "sdlc"
SDLC_CONTAINER_NAME = "sdlc"
LEGEND_DB_RELATION_NAME = "legend-db"
LEGEND_GITLAB_RELATION_NAME = "legend-sdlc-gitlab"
LEGEND_STUDIO_RELATION_NAME = "legend-sdlc"
SDLC_SERVICE_URL_FORMAT = "%(schema)s://%(host)s:%(port)s%(path)s"
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH = "/sdlc-config.yaml"
SDLC_MAIN_GITLAB_REDIRECT_URL = "%(base_url)s/auth/callback"
SDLC_GITLAB_REDIRECT_URI_FORMATS = [
SDLC_MAIN_GITLAB_REDIRECT_URL,
"%(base_url)s/pac4j/login/callback",
]
TRUSTSTORE_PASSPHRASE = "Legend SDLC"
TRUSTSTORE_CONTAINER_LOCAL_PATH = "/truststore.jks"
APPLICATION_CONNECTOR_PORT_HTTP = 7070
APPLICATION_ADMIN_CONNECTOR_PORT_HTTP = 7076
APPLICATION_ROOT_PATH = "/api"
APPLICATION_LOGGING_FORMAT = "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%thread] %c - %m%n"
GITLAB_PROJECT_VISIBILITY_PUBLIC = "public"
GITLAB_PROJECT_VISIBILITY_PRIVATE = "private"
GITLAB_REQUIRED_SCOPES = ["openid", "profile", "api"]
class LegendSDLCServerCharm(legend_operator_base.BaseFinosLegendCoreServiceCharm):
"""Charmed operator for the FINOS Legend SDLC Server."""
def __init__(self, *args):
super().__init__(*args)
# Studio relation events:
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_joined, self._on_studio_relation_joined
)
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_changed, self._on_studio_relation_changed
)
@classmethod
def _get_application_connector_port(cls):
return APPLICATION_CONNECTOR_PORT_HTTP
@classmethod
def _get_workload_container_name(cls):
return SDLC_CONTAINER_NAME
@classmethod
def _get_workload_service_names(cls):
return [SDLC_SERVICE_NAME]
@classmethod
def _get_workload_pebble_layers(cls):
return {
"sdlc": {
"summary": "SDLC layer.",
"description": "Pebble config layer for FINOS Legend SDLC.",
"services": {
"sdlc": {
"override": "replace",
"summary": "sdlc",
"command": (
# NOTE(aznashwan): starting through bash is needed
# for the classpath glob (-cp ...) to be expanded:
"/bin/sh -c 'java -XX:+ExitOnOutOfMemoryError "
"-XX:MaxRAMPercentage=60 -Xss4M -cp /app/bin/*.jar"
" -Dfile.encoding=UTF8 "
'-Djavax.net.ssl.trustStore="%s" '
'-Djavax.net.ssl.trustStorePassword="%s" '
"org.finos.legend.sdlc.server.LegendSDLCServer "
'server "%s"\''
% (
TRUSTSTORE_CONTAINER_LOCAL_PATH,
TRUSTSTORE_PASSPHRASE,
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH,
)
),
# NOTE(aznashwan): considering the SDLC service expects
# a singular config file which already contains all
# relevant options in it (some of which will require
# the relation with DB/Gitlab to have already been
# established), we do not auto-start:
"startup": "disabled",
# TODO(aznashwan): determine any env vars we could pass
# (most notably, things like the RAM percentage etc...)
"environment": {},
}
},
}
}
def _get_jks_truststore_preferences(self):
jks_prefs = {
"truststore_path": TRUSTSTORE_CONTAINER_LOCAL_PATH,
"truststore_passphrase": TRUSTSTORE_PASSPHRASE,
"trusted_certificates": {},
}
cert = self._get_legend_gitlab_certificate()
if cert:
# NOTE(aznashwan): cert label 'gitlab-sdlc' is arbitrary:
jks_prefs["trusted_certificates"]["gitlab-sdlc"] = cert
return jks_prefs
@classmethod
def _get_legend_gitlab_relation_name(cls):
return LEGEND_GITLAB_RELATION_NAME
@classmethod
def _get_legend_db_relation_name(cls):
return LEGEND_DB_RELATION_NAME
def _get_sdlc_service_url(self):
ip_address = legend_operator_base.get_ip_address()
return SDLC_SERVICE_URL_FORMAT % (
{
# NOTE(aznashwan): we always return the plain HTTP endpoint:
"schema": "http",
"host": ip_address,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"path": APPLICATION_ROOT_PATH,
}
)
def _get_legend_gitlab_redirect_uris(self):
base_url = self._get_sdlc_service_url()
redirect_uris = [fmt % {"base_url": base_url} for fmt in SDLC_GITLAB_REDIRECT_URI_FORMATS]
return redirect_uris
def _get_core_legend_service_configs(self, legend_db_credentials, legend_gitlab_credentials):
# Check DB-related options:
if not legend_db_credentials:
return model.WaitingStatus("no legend db info present in relation yet")
legend_db_uri = legend_db_credentials["uri"]
legend_db = legend_db_credentials["database"]
# Check gitlab-related options:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PRIVATE
if self.model.config["gitlab-create-new-projects-as-public"]:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PUBLIC
if not legend_gitlab_credentials:
return model.WaitingStatus("no legend gitlab info present in relation yet")
gitlab_client_id = legend_gitlab_credentials["client_id"]
gitlab_client_secret = legend_gitlab_credentials["client_secret"]
gitlab_openid_discovery_url = legend_gitlab_credentials["openid_discovery_url"]
gitlab_project_tag = self.model.config["gitlab-project-tag"]
gitlab_project_creation_group_pattern = self.model.config[
"gitlab-project-creation-group-pattern"
]
# Check Java logging options:
request_logging_level = self._get_logging_level_from_config(
"server-requests-logging-level"
)
server_logging_level = self._get_logging_level_from_config("server-logging-level")
if not all([server_logging_level, request_logging_level]):
return model.BlockedStatus(
"one or more logging config options are improperly formatted "
"or missing, please review the debug-log for more details"
)
# Compile base config:
sdlc_config = {
"applicationName": "Legend SDLC",
"server": {
"rootPath": APPLICATION_ROOT_PATH,
"applicationConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"maxRequestHeaderSize": "128KiB",
}
],
"adminConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_ADMIN_CONNECTOR_PORT_HTTP,
}
],
"gzip": {"includedMethods": ["GET", "POST"]},
"requestLog": {
"type": "classic",
"level": request_logging_level,
"appenders": [{"type": "console", "logFormat": "OFF"}],
},
},
"filterPriorities": {
"GitLab": 1,
"org.pac4j.j2e.filter.CallbackFilter": 2,
"org.pac4j.j2e.filter.SecurityFilter": 3,
"CORS": 4,
},
"pac4j": {
"callbackPrefix": "/api/pac4j/login",
"mongoUri": legend_db_uri,
"mongoDb": legend_db,
"clients": [
{
"org.finos.legend.server.pac4j.gitlab.GitlabClient": {
"name": "gitlab",
"clientId": gitlab_client_id,
"secret": gitlab_client_secret,
"discoveryUri": gitlab_openid_discovery_url,
# NOTE(aznashwan): needs to be a space-separated str:
"scope": " ".join(GITLAB_REQUIRED_SCOPES),
}
}
],
"mongoSession": {"enabled": True, "collection": "userSessions"},
"bypassPaths": ["/api/info"],
},
"gitLab": {
"newProjectVisibility": gitlab_project_visibility,
"projectTag": gitlab_project_tag,
"uat": {
"server": {
"scheme": legend_gitlab_credentials["gitlab_scheme"],
"host": "%s:%s"
% (
legend_gitlab_credentials["gitlab_host"],
legend_gitlab_credentials["gitlab_port"],
),
},
"app": {
"id": gitlab_client_id,
"secret": gitlab_client_secret,
"redirectURI": (
SDLC_MAIN_GITLAB_REDIRECT_URL
% {"base_url": self._get_sdlc_service_url()}
),
},
},
},
"projectStructure": {
"projectCreation": {"groupIdPattern": gitlab_project_creation_group_pattern},
"extensionProvider": {
"org.finos.legend.sdlc.server.gitlab.finos."
"FinosGitlabProjectStructureExtensionProvider": {}
},
},
"logging": {
"level": server_logging_level,
"appenders": [
{
"type": "console",
"logFormat": APPLICATION_LOGGING_FORMAT,
}
],
},
"swagger": {
"title": "Legend SDLC",
"resourcePackage": "org.finos.legend.sdlc.server.resources",
"version": "local-snapshot",
"schemes": [],
},
}
return {SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH: yaml.dump(sdlc_config)}
def | (self, event: charm.RelationJoinedEvent) -> None:
rel = event.relation
sdlc_url = self._get_sdlc_service_url()
logger.info("Providing following SDLC URL to Studio: %s", sdlc_url)
rel.data[self.app]["legend-sdlc-url"] = sdlc_url
def _on_studio_relation_changed(self, event: charm.RelationChangedEvent) -> None:
pass
if __name__ == "__main__":
main.main(LegendSDLCServerCharm)
| _on_studio_relation_joined | identifier_name |
charm.py | #!/usr/bin/env python3
# Copyright 2021 Canonical
# See LICENSE file for licensing details.
""" Module defining the Charmed operator for the FINOS Legend SDLC Server. """
import logging
import yaml
from charms.finos_legend_libs.v0 import legend_operator_base
from ops import charm, main, model
logger = logging.getLogger(__name__)
SDLC_SERVICE_NAME = "sdlc"
SDLC_CONTAINER_NAME = "sdlc"
LEGEND_DB_RELATION_NAME = "legend-db"
LEGEND_GITLAB_RELATION_NAME = "legend-sdlc-gitlab"
LEGEND_STUDIO_RELATION_NAME = "legend-sdlc"
SDLC_SERVICE_URL_FORMAT = "%(schema)s://%(host)s:%(port)s%(path)s"
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH = "/sdlc-config.yaml"
SDLC_MAIN_GITLAB_REDIRECT_URL = "%(base_url)s/auth/callback"
SDLC_GITLAB_REDIRECT_URI_FORMATS = [
SDLC_MAIN_GITLAB_REDIRECT_URL,
"%(base_url)s/pac4j/login/callback",
]
TRUSTSTORE_PASSPHRASE = "Legend SDLC"
TRUSTSTORE_CONTAINER_LOCAL_PATH = "/truststore.jks"
APPLICATION_CONNECTOR_PORT_HTTP = 7070
APPLICATION_ADMIN_CONNECTOR_PORT_HTTP = 7076
APPLICATION_ROOT_PATH = "/api"
APPLICATION_LOGGING_FORMAT = "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%thread] %c - %m%n"
GITLAB_PROJECT_VISIBILITY_PUBLIC = "public"
GITLAB_PROJECT_VISIBILITY_PRIVATE = "private"
GITLAB_REQUIRED_SCOPES = ["openid", "profile", "api"]
class LegendSDLCServerCharm(legend_operator_base.BaseFinosLegendCoreServiceCharm):
"""Charmed operator for the FINOS Legend SDLC Server."""
def __init__(self, *args):
super().__init__(*args)
# Studio relation events:
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_joined, self._on_studio_relation_joined
)
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_changed, self._on_studio_relation_changed
)
@classmethod
def _get_application_connector_port(cls):
return APPLICATION_CONNECTOR_PORT_HTTP
@classmethod
def _get_workload_container_name(cls):
return SDLC_CONTAINER_NAME
@classmethod | def _get_workload_service_names(cls):
return [SDLC_SERVICE_NAME]
@classmethod
def _get_workload_pebble_layers(cls):
return {
"sdlc": {
"summary": "SDLC layer.",
"description": "Pebble config layer for FINOS Legend SDLC.",
"services": {
"sdlc": {
"override": "replace",
"summary": "sdlc",
"command": (
# NOTE(aznashwan): starting through bash is needed
# for the classpath glob (-cp ...) to be expanded:
"/bin/sh -c 'java -XX:+ExitOnOutOfMemoryError "
"-XX:MaxRAMPercentage=60 -Xss4M -cp /app/bin/*.jar"
" -Dfile.encoding=UTF8 "
'-Djavax.net.ssl.trustStore="%s" '
'-Djavax.net.ssl.trustStorePassword="%s" '
"org.finos.legend.sdlc.server.LegendSDLCServer "
'server "%s"\''
% (
TRUSTSTORE_CONTAINER_LOCAL_PATH,
TRUSTSTORE_PASSPHRASE,
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH,
)
),
# NOTE(aznashwan): considering the SDLC service expects
# a singular config file which already contains all
# relevant options in it (some of which will require
# the relation with DB/Gitlab to have already been
# established), we do not auto-start:
"startup": "disabled",
# TODO(aznashwan): determine any env vars we could pass
# (most notably, things like the RAM percentage etc...)
"environment": {},
}
},
}
}
def _get_jks_truststore_preferences(self):
jks_prefs = {
"truststore_path": TRUSTSTORE_CONTAINER_LOCAL_PATH,
"truststore_passphrase": TRUSTSTORE_PASSPHRASE,
"trusted_certificates": {},
}
cert = self._get_legend_gitlab_certificate()
if cert:
# NOTE(aznashwan): cert label 'gitlab-sdlc' is arbitrary:
jks_prefs["trusted_certificates"]["gitlab-sdlc"] = cert
return jks_prefs
@classmethod
def _get_legend_gitlab_relation_name(cls):
return LEGEND_GITLAB_RELATION_NAME
@classmethod
def _get_legend_db_relation_name(cls):
return LEGEND_DB_RELATION_NAME
def _get_sdlc_service_url(self):
ip_address = legend_operator_base.get_ip_address()
return SDLC_SERVICE_URL_FORMAT % (
{
# NOTE(aznashwan): we always return the plain HTTP endpoint:
"schema": "http",
"host": ip_address,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"path": APPLICATION_ROOT_PATH,
}
)
def _get_legend_gitlab_redirect_uris(self):
base_url = self._get_sdlc_service_url()
redirect_uris = [fmt % {"base_url": base_url} for fmt in SDLC_GITLAB_REDIRECT_URI_FORMATS]
return redirect_uris
def _get_core_legend_service_configs(self, legend_db_credentials, legend_gitlab_credentials):
# Check DB-related options:
if not legend_db_credentials:
return model.WaitingStatus("no legend db info present in relation yet")
legend_db_uri = legend_db_credentials["uri"]
legend_db = legend_db_credentials["database"]
# Check gitlab-related options:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PRIVATE
if self.model.config["gitlab-create-new-projects-as-public"]:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PUBLIC
if not legend_gitlab_credentials:
return model.WaitingStatus("no legend gitlab info present in relation yet")
gitlab_client_id = legend_gitlab_credentials["client_id"]
gitlab_client_secret = legend_gitlab_credentials["client_secret"]
gitlab_openid_discovery_url = legend_gitlab_credentials["openid_discovery_url"]
gitlab_project_tag = self.model.config["gitlab-project-tag"]
gitlab_project_creation_group_pattern = self.model.config[
"gitlab-project-creation-group-pattern"
]
# Check Java logging options:
request_logging_level = self._get_logging_level_from_config(
"server-requests-logging-level"
)
server_logging_level = self._get_logging_level_from_config("server-logging-level")
if not all([server_logging_level, request_logging_level]):
return model.BlockedStatus(
"one or more logging config options are improperly formatted "
"or missing, please review the debug-log for more details"
)
# Compile base config:
sdlc_config = {
"applicationName": "Legend SDLC",
"server": {
"rootPath": APPLICATION_ROOT_PATH,
"applicationConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"maxRequestHeaderSize": "128KiB",
}
],
"adminConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_ADMIN_CONNECTOR_PORT_HTTP,
}
],
"gzip": {"includedMethods": ["GET", "POST"]},
"requestLog": {
"type": "classic",
"level": request_logging_level,
"appenders": [{"type": "console", "logFormat": "OFF"}],
},
},
"filterPriorities": {
"GitLab": 1,
"org.pac4j.j2e.filter.CallbackFilter": 2,
"org.pac4j.j2e.filter.SecurityFilter": 3,
"CORS": 4,
},
"pac4j": {
"callbackPrefix": "/api/pac4j/login",
"mongoUri": legend_db_uri,
"mongoDb": legend_db,
"clients": [
{
"org.finos.legend.server.pac4j.gitlab.GitlabClient": {
"name": "gitlab",
"clientId": gitlab_client_id,
"secret": gitlab_client_secret,
"discoveryUri": gitlab_openid_discovery_url,
# NOTE(aznashwan): needs to be a space-separated str:
"scope": " ".join(GITLAB_REQUIRED_SCOPES),
}
}
],
"mongoSession": {"enabled": True, "collection": "userSessions"},
"bypassPaths": ["/api/info"],
},
"gitLab": {
"newProjectVisibility": gitlab_project_visibility,
"projectTag": gitlab_project_tag,
"uat": {
"server": {
"scheme": legend_gitlab_credentials["gitlab_scheme"],
"host": "%s:%s"
% (
legend_gitlab_credentials["gitlab_host"],
legend_gitlab_credentials["gitlab_port"],
),
},
"app": {
"id": gitlab_client_id,
"secret": gitlab_client_secret,
"redirectURI": (
SDLC_MAIN_GITLAB_REDIRECT_URL
% {"base_url": self._get_sdlc_service_url()}
),
},
},
},
"projectStructure": {
"projectCreation": {"groupIdPattern": gitlab_project_creation_group_pattern},
"extensionProvider": {
"org.finos.legend.sdlc.server.gitlab.finos."
"FinosGitlabProjectStructureExtensionProvider": {}
},
},
"logging": {
"level": server_logging_level,
"appenders": [
{
"type": "console",
"logFormat": APPLICATION_LOGGING_FORMAT,
}
],
},
"swagger": {
"title": "Legend SDLC",
"resourcePackage": "org.finos.legend.sdlc.server.resources",
"version": "local-snapshot",
"schemes": [],
},
}
return {SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH: yaml.dump(sdlc_config)}
def _on_studio_relation_joined(self, event: charm.RelationJoinedEvent) -> None:
rel = event.relation
sdlc_url = self._get_sdlc_service_url()
logger.info("Providing following SDLC URL to Studio: %s", sdlc_url)
rel.data[self.app]["legend-sdlc-url"] = sdlc_url
def _on_studio_relation_changed(self, event: charm.RelationChangedEvent) -> None:
pass
if __name__ == "__main__":
main.main(LegendSDLCServerCharm) | random_line_split | |
charm.py | #!/usr/bin/env python3
# Copyright 2021 Canonical
# See LICENSE file for licensing details.
""" Module defining the Charmed operator for the FINOS Legend SDLC Server. """
import logging
import yaml
from charms.finos_legend_libs.v0 import legend_operator_base
from ops import charm, main, model
logger = logging.getLogger(__name__)
SDLC_SERVICE_NAME = "sdlc"
SDLC_CONTAINER_NAME = "sdlc"
LEGEND_DB_RELATION_NAME = "legend-db"
LEGEND_GITLAB_RELATION_NAME = "legend-sdlc-gitlab"
LEGEND_STUDIO_RELATION_NAME = "legend-sdlc"
SDLC_SERVICE_URL_FORMAT = "%(schema)s://%(host)s:%(port)s%(path)s"
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH = "/sdlc-config.yaml"
SDLC_MAIN_GITLAB_REDIRECT_URL = "%(base_url)s/auth/callback"
SDLC_GITLAB_REDIRECT_URI_FORMATS = [
SDLC_MAIN_GITLAB_REDIRECT_URL,
"%(base_url)s/pac4j/login/callback",
]
TRUSTSTORE_PASSPHRASE = "Legend SDLC"
TRUSTSTORE_CONTAINER_LOCAL_PATH = "/truststore.jks"
APPLICATION_CONNECTOR_PORT_HTTP = 7070
APPLICATION_ADMIN_CONNECTOR_PORT_HTTP = 7076
APPLICATION_ROOT_PATH = "/api"
APPLICATION_LOGGING_FORMAT = "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%thread] %c - %m%n"
GITLAB_PROJECT_VISIBILITY_PUBLIC = "public"
GITLAB_PROJECT_VISIBILITY_PRIVATE = "private"
GITLAB_REQUIRED_SCOPES = ["openid", "profile", "api"]
class LegendSDLCServerCharm(legend_operator_base.BaseFinosLegendCoreServiceCharm):
"""Charmed operator for the FINOS Legend SDLC Server."""
def __init__(self, *args):
super().__init__(*args)
# Studio relation events:
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_joined, self._on_studio_relation_joined
)
self.framework.observe(
self.on[LEGEND_STUDIO_RELATION_NAME].relation_changed, self._on_studio_relation_changed
)
@classmethod
def _get_application_connector_port(cls):
return APPLICATION_CONNECTOR_PORT_HTTP
@classmethod
def _get_workload_container_name(cls):
return SDLC_CONTAINER_NAME
@classmethod
def _get_workload_service_names(cls):
return [SDLC_SERVICE_NAME]
@classmethod
def _get_workload_pebble_layers(cls):
return {
"sdlc": {
"summary": "SDLC layer.",
"description": "Pebble config layer for FINOS Legend SDLC.",
"services": {
"sdlc": {
"override": "replace",
"summary": "sdlc",
"command": (
# NOTE(aznashwan): starting through bash is needed
# for the classpath glob (-cp ...) to be expanded:
"/bin/sh -c 'java -XX:+ExitOnOutOfMemoryError "
"-XX:MaxRAMPercentage=60 -Xss4M -cp /app/bin/*.jar"
" -Dfile.encoding=UTF8 "
'-Djavax.net.ssl.trustStore="%s" '
'-Djavax.net.ssl.trustStorePassword="%s" '
"org.finos.legend.sdlc.server.LegendSDLCServer "
'server "%s"\''
% (
TRUSTSTORE_CONTAINER_LOCAL_PATH,
TRUSTSTORE_PASSPHRASE,
SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH,
)
),
# NOTE(aznashwan): considering the SDLC service expects
# a singular config file which already contains all
# relevant options in it (some of which will require
# the relation with DB/Gitlab to have already been
# established), we do not auto-start:
"startup": "disabled",
# TODO(aznashwan): determine any env vars we could pass
# (most notably, things like the RAM percentage etc...)
"environment": {},
}
},
}
}
def _get_jks_truststore_preferences(self):
jks_prefs = {
"truststore_path": TRUSTSTORE_CONTAINER_LOCAL_PATH,
"truststore_passphrase": TRUSTSTORE_PASSPHRASE,
"trusted_certificates": {},
}
cert = self._get_legend_gitlab_certificate()
if cert:
# NOTE(aznashwan): cert label 'gitlab-sdlc' is arbitrary:
jks_prefs["trusted_certificates"]["gitlab-sdlc"] = cert
return jks_prefs
@classmethod
def _get_legend_gitlab_relation_name(cls):
return LEGEND_GITLAB_RELATION_NAME
@classmethod
def _get_legend_db_relation_name(cls):
|
def _get_sdlc_service_url(self):
ip_address = legend_operator_base.get_ip_address()
return SDLC_SERVICE_URL_FORMAT % (
{
# NOTE(aznashwan): we always return the plain HTTP endpoint:
"schema": "http",
"host": ip_address,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"path": APPLICATION_ROOT_PATH,
}
)
def _get_legend_gitlab_redirect_uris(self):
base_url = self._get_sdlc_service_url()
redirect_uris = [fmt % {"base_url": base_url} for fmt in SDLC_GITLAB_REDIRECT_URI_FORMATS]
return redirect_uris
def _get_core_legend_service_configs(self, legend_db_credentials, legend_gitlab_credentials):
# Check DB-related options:
if not legend_db_credentials:
return model.WaitingStatus("no legend db info present in relation yet")
legend_db_uri = legend_db_credentials["uri"]
legend_db = legend_db_credentials["database"]
# Check gitlab-related options:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PRIVATE
if self.model.config["gitlab-create-new-projects-as-public"]:
gitlab_project_visibility = GITLAB_PROJECT_VISIBILITY_PUBLIC
if not legend_gitlab_credentials:
return model.WaitingStatus("no legend gitlab info present in relation yet")
gitlab_client_id = legend_gitlab_credentials["client_id"]
gitlab_client_secret = legend_gitlab_credentials["client_secret"]
gitlab_openid_discovery_url = legend_gitlab_credentials["openid_discovery_url"]
gitlab_project_tag = self.model.config["gitlab-project-tag"]
gitlab_project_creation_group_pattern = self.model.config[
"gitlab-project-creation-group-pattern"
]
# Check Java logging options:
request_logging_level = self._get_logging_level_from_config(
"server-requests-logging-level"
)
server_logging_level = self._get_logging_level_from_config("server-logging-level")
if not all([server_logging_level, request_logging_level]):
return model.BlockedStatus(
"one or more logging config options are improperly formatted "
"or missing, please review the debug-log for more details"
)
# Compile base config:
sdlc_config = {
"applicationName": "Legend SDLC",
"server": {
"rootPath": APPLICATION_ROOT_PATH,
"applicationConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_CONNECTOR_PORT_HTTP,
"maxRequestHeaderSize": "128KiB",
}
],
"adminConnectors": [
{
"type": legend_operator_base.APPLICATION_CONNECTOR_TYPE_HTTP,
"port": APPLICATION_ADMIN_CONNECTOR_PORT_HTTP,
}
],
"gzip": {"includedMethods": ["GET", "POST"]},
"requestLog": {
"type": "classic",
"level": request_logging_level,
"appenders": [{"type": "console", "logFormat": "OFF"}],
},
},
"filterPriorities": {
"GitLab": 1,
"org.pac4j.j2e.filter.CallbackFilter": 2,
"org.pac4j.j2e.filter.SecurityFilter": 3,
"CORS": 4,
},
"pac4j": {
"callbackPrefix": "/api/pac4j/login",
"mongoUri": legend_db_uri,
"mongoDb": legend_db,
"clients": [
{
"org.finos.legend.server.pac4j.gitlab.GitlabClient": {
"name": "gitlab",
"clientId": gitlab_client_id,
"secret": gitlab_client_secret,
"discoveryUri": gitlab_openid_discovery_url,
# NOTE(aznashwan): needs to be a space-separated str:
"scope": " ".join(GITLAB_REQUIRED_SCOPES),
}
}
],
"mongoSession": {"enabled": True, "collection": "userSessions"},
"bypassPaths": ["/api/info"],
},
"gitLab": {
"newProjectVisibility": gitlab_project_visibility,
"projectTag": gitlab_project_tag,
"uat": {
"server": {
"scheme": legend_gitlab_credentials["gitlab_scheme"],
"host": "%s:%s"
% (
legend_gitlab_credentials["gitlab_host"],
legend_gitlab_credentials["gitlab_port"],
),
},
"app": {
"id": gitlab_client_id,
"secret": gitlab_client_secret,
"redirectURI": (
SDLC_MAIN_GITLAB_REDIRECT_URL
% {"base_url": self._get_sdlc_service_url()}
),
},
},
},
"projectStructure": {
"projectCreation": {"groupIdPattern": gitlab_project_creation_group_pattern},
"extensionProvider": {
"org.finos.legend.sdlc.server.gitlab.finos."
"FinosGitlabProjectStructureExtensionProvider": {}
},
},
"logging": {
"level": server_logging_level,
"appenders": [
{
"type": "console",
"logFormat": APPLICATION_LOGGING_FORMAT,
}
],
},
"swagger": {
"title": "Legend SDLC",
"resourcePackage": "org.finos.legend.sdlc.server.resources",
"version": "local-snapshot",
"schemes": [],
},
}
return {SDLC_CONFIG_FILE_CONTAINER_LOCAL_PATH: yaml.dump(sdlc_config)}
def _on_studio_relation_joined(self, event: charm.RelationJoinedEvent) -> None:
rel = event.relation
sdlc_url = self._get_sdlc_service_url()
logger.info("Providing following SDLC URL to Studio: %s", sdlc_url)
rel.data[self.app]["legend-sdlc-url"] = sdlc_url
def _on_studio_relation_changed(self, event: charm.RelationChangedEvent) -> None:
pass
if __name__ == "__main__":
main.main(LegendSDLCServerCharm)
| return LEGEND_DB_RELATION_NAME | identifier_body |
run_test.go | // Copyright © 2017-2018 Ricardo Aravena <raravena@branch.io>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exec
import (
"fmt"
glssh "github.com/gliderlabs/ssh"
"golang.org/x/crypto/ssh"
"golang.org/x/crypto/ssh/agent"
"golang.org/x/crypto/ssh/testdata"
"io"
"io/ioutil"
"math/rand"
"net"
"os"
"reflect"
"testing"
"time"
)
type mockSSHKey struct {
keyname string
content []byte
privkey agent.AddedKey
pubkey ssh.PublicKey
}
var (
testPrivateKeys map[string]interface{}
testSigners map[string]ssh.Signer
testPublicKeys map[string]ssh.PublicKey
sshAgentSocket string
)
func init() {
var err error
n := len(testdata.PEMBytes)
testSigners = make(map[string]ssh.Signer, n)
testPrivateKeys = make(map[string]interface{}, n)
testPublicKeys = make(map[string]ssh.PublicKey, n)
for t, k := range testdata.PEMBytes {
testPrivateKeys[t], err = ssh.ParseRawPrivateKey(k)
if err != nil {
panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
}
testSigners[t], err = ssh.NewSignerFromKey(testPrivateKeys[t])
if err != nil {
panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
}
testPublicKeys[t] = testSigners[t].PublicKey()
}
randomStr := fmt.Sprintf("%v", rand.Intn(5000))
socketFile := "/tmp/gosocket" + randomStr + ".sock"
setupSSHAgent(socketFile)
time.Sleep(2 * time.Second)
startSSHServer()
}
func setupSSHAgent(socketFile string) {
done := make(chan string, 1)
a := agent.NewKeyring()
go func(done chan<- string) {
ln, err := net.Listen("unix", socketFile)
if err != nil {
panic(fmt.Sprintf("Couldn't create socket for tests %v", err))
}
defer ln.Close()
// Need to wait until the socket is setup
firstTime := true
for {
if firstTime == true {
done <- socketFile
firstTime = false
}
c, err := ln.Accept()
if err != nil {
panic(fmt.Sprintf("Couldn't accept connection to agent tests %v", err))
}
defer c.Close()
go func(c io.ReadWriter) {
err = agent.ServeAgent(a, c)
if err != nil {
fmt.Sprintf("Couldn't serve ssh agent for tests %v", err)
}
}(c)
}
}(done)
sshAgentSocket = <-done
}
func addKeytoSSHAgent(key agent.AddedKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Add(key)
}
func removeKeyfromSSHAgent(key ssh.PublicKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Remove(key)
}
func startSSHServer() {
done := make(chan bool, 1)
go func(done chan<- bool) {
glssh.Handle(func(s glssh.Session) {
authorizedKey := ssh.MarshalAuthorizedKey(s.PublicKey())
io.WriteString(s, fmt.Sprintf("public key used by %s:\n", s.User()))
s.Write(authorizedKey)
s.Exit(0)
})
publicKeyOption := glssh.PublicKeyAuth(func(ctx glssh.Context, key glssh.PublicKey) bool {
for _, pubk := range testPublicKeys {
if glssh.KeysEqual(key, pubk) {
return true
}
}
return false
})
fmt.Println("starting ssh server on port 2222...")
done <- true
panic(glssh.ListenAndServe(":2222", nil, publicKeyOption))
}(done)
<-done
}
func TestMakeSigner(t *testing.T) {
tests := []struct {
name string
key mockSSHKey
expected ssh.Signer
}{
{name: "Basic key signer with valid rsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: testSigners["rsa"],
},
{name: "Basic key signer with valid dsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["dsa"],
},
expected: testSigners["dsa"],
},
{name: "Basic key signer with valid ecdsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["ecdsa"],
},
expected: testSigners["ecdsa"],
},
{name: "Basic key signer with valid user key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["user"],
},
expected: testSigners["user"],
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Write content of the key to the keyname file
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
returned, _ := makeSigner(tt.key.keyname)
if !reflect.DeepEqual(returned, tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
os.Remove(tt.key.keyname)
})
}
}
func TestMakeKeyring(t *testing.T) {
tests := []struct {
name string
useagent bool
key mockSSHKey
expected ssh.AuthMethod
}{
{name: "Basic key ring with valid rsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring with valid dsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey11",
content: testdata.PEMBytes["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring with valid ecdsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey12",
content: testdata.PEMBytes["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
{name: "Basic key ring with valid user key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey13",
content: testdata.PEMBytes["user"],
},
expected: ssh.PublicKeys(testSigners["user"]),
},
{name: "Basic key ring agent with valid rsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["rsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["rsa"]},
pubkey: testPublicKeys["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring agent with valid dsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["dsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["dsa"]},
pubkey: testPublicKeys["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring agent with valid ecdsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["ecdsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["ecdsa"]},
pubkey: testPublicKeys["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := makeKeyring(tt.key.keyname, sshAgentSocket, tt.useagent)
// DeepEqual always returns false for functions unless nil
// hence converting to string to compare
check1 := reflect.ValueOf(returned).String()
check2 := reflect.ValueOf(tt.expected).String()
if !reflect.DeepEqual(check1, check2) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestRun(t *testing.T) {
tests := []struct {
name string
machines []string
user string
cmd string
key mockSSHKey
port int
useagent bool
expected bool
}{
{name: "Basic with valid rsa key",
machines: []string{"localhost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey21",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: true,
},
{name: "Basic with valid rsa key wrong hostname",
machines: []string{"bogushost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey22",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key wrong port",
machines: []string{"localhost"},
port: 2223,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey23",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key Google endpoint",
machines: []string{"www.google.com"},
port: 22,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey24",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := Run(Machines(tt.machines),
User(tt.user),
Port(tt.port),
Cmd(tt.cmd),
Key(tt.key.keyname),
UseAgent(tt.useagent),
AgentSocket(sshAgentSocket))
if !(returned == tt.expected) { |
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestTearDown(t *testing.T) {
tests := []struct {
name string
id string
}{
{name: "Teardown SSH Agent",
id: "sshAgentTdown"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.id == "sshAgentTdown" {
os.Remove(sshAgentSocket)
}
})
}
}
|
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
| conditional_block |
run_test.go | // Copyright © 2017-2018 Ricardo Aravena <raravena@branch.io>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exec
import (
"fmt"
glssh "github.com/gliderlabs/ssh"
"golang.org/x/crypto/ssh"
"golang.org/x/crypto/ssh/agent"
"golang.org/x/crypto/ssh/testdata"
"io"
"io/ioutil"
"math/rand"
"net"
"os"
"reflect"
"testing"
"time"
)
type mockSSHKey struct {
keyname string
content []byte
privkey agent.AddedKey
pubkey ssh.PublicKey
}
var (
testPrivateKeys map[string]interface{}
testSigners map[string]ssh.Signer
testPublicKeys map[string]ssh.PublicKey
sshAgentSocket string
)
func init() {
var err error
n := len(testdata.PEMBytes)
testSigners = make(map[string]ssh.Signer, n)
testPrivateKeys = make(map[string]interface{}, n)
testPublicKeys = make(map[string]ssh.PublicKey, n)
for t, k := range testdata.PEMBytes {
testPrivateKeys[t], err = ssh.ParseRawPrivateKey(k)
if err != nil {
panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
}
testSigners[t], err = ssh.NewSignerFromKey(testPrivateKeys[t])
if err != nil {
panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
}
testPublicKeys[t] = testSigners[t].PublicKey()
}
randomStr := fmt.Sprintf("%v", rand.Intn(5000))
socketFile := "/tmp/gosocket" + randomStr + ".sock"
setupSSHAgent(socketFile)
time.Sleep(2 * time.Second)
startSSHServer()
}
func setupSSHAgent(socketFile string) {
done := make(chan string, 1)
a := agent.NewKeyring()
go func(done chan<- string) {
ln, err := net.Listen("unix", socketFile)
if err != nil {
panic(fmt.Sprintf("Couldn't create socket for tests %v", err))
}
defer ln.Close()
// Need to wait until the socket is setup
firstTime := true
for {
if firstTime == true {
done <- socketFile
firstTime = false
}
c, err := ln.Accept()
if err != nil {
panic(fmt.Sprintf("Couldn't accept connection to agent tests %v", err))
}
defer c.Close()
go func(c io.ReadWriter) {
err = agent.ServeAgent(a, c)
if err != nil {
fmt.Sprintf("Couldn't serve ssh agent for tests %v", err)
}
}(c)
}
}(done)
sshAgentSocket = <-done
}
func addKeytoSSHAgent(key agent.AddedKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Add(key)
}
func removeKeyfromSSHAgent(key ssh.PublicKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Remove(key)
}
func startSSHServer() {
done := make(chan bool, 1)
go func(done chan<- bool) {
glssh.Handle(func(s glssh.Session) {
authorizedKey := ssh.MarshalAuthorizedKey(s.PublicKey())
io.WriteString(s, fmt.Sprintf("public key used by %s:\n", s.User()))
s.Write(authorizedKey)
s.Exit(0)
})
publicKeyOption := glssh.PublicKeyAuth(func(ctx glssh.Context, key glssh.PublicKey) bool {
for _, pubk := range testPublicKeys {
if glssh.KeysEqual(key, pubk) {
return true
}
}
return false
})
fmt.Println("starting ssh server on port 2222...")
done <- true
panic(glssh.ListenAndServe(":2222", nil, publicKeyOption))
}(done)
<-done
}
func TestMakeSigner(t *testing.T) {
tests := []struct {
name string
key mockSSHKey
expected ssh.Signer
}{
{name: "Basic key signer with valid rsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: testSigners["rsa"],
},
{name: "Basic key signer with valid dsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["dsa"],
},
expected: testSigners["dsa"],
},
{name: "Basic key signer with valid ecdsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["ecdsa"],
},
expected: testSigners["ecdsa"],
},
{name: "Basic key signer with valid user key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["user"],
},
expected: testSigners["user"],
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Write content of the key to the keyname file
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
returned, _ := makeSigner(tt.key.keyname)
if !reflect.DeepEqual(returned, tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
os.Remove(tt.key.keyname)
})
}
}
func TestMakeKeyring(t *testing.T) {
tests := []struct {
name string
useagent bool
key mockSSHKey
expected ssh.AuthMethod
}{
{name: "Basic key ring with valid rsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring with valid dsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey11",
content: testdata.PEMBytes["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring with valid ecdsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey12",
content: testdata.PEMBytes["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
{name: "Basic key ring with valid user key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey13",
content: testdata.PEMBytes["user"],
},
expected: ssh.PublicKeys(testSigners["user"]),
},
{name: "Basic key ring agent with valid rsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["rsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["rsa"]},
pubkey: testPublicKeys["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring agent with valid dsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["dsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["dsa"]},
pubkey: testPublicKeys["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring agent with valid ecdsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["ecdsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["ecdsa"]},
pubkey: testPublicKeys["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := makeKeyring(tt.key.keyname, sshAgentSocket, tt.useagent)
// DeepEqual always returns false for functions unless nil
// hence converting to string to compare
check1 := reflect.ValueOf(returned).String()
check2 := reflect.ValueOf(tt.expected).String()
if !reflect.DeepEqual(check1, check2) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestRun(t *testing.T) {
tests := []struct {
name string
machines []string
user string
cmd string
key mockSSHKey
port int
useagent bool
expected bool
}{
{name: "Basic with valid rsa key",
machines: []string{"localhost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey21",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: true,
},
{name: "Basic with valid rsa key wrong hostname",
machines: []string{"bogushost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey22",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key wrong port",
machines: []string{"localhost"},
port: 2223,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey23",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key Google endpoint",
machines: []string{"www.google.com"},
port: 22,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey24",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := Run(Machines(tt.machines),
User(tt.user),
Port(tt.port),
Cmd(tt.cmd),
Key(tt.key.keyname),
UseAgent(tt.useagent),
AgentSocket(sshAgentSocket))
if !(returned == tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey) | if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestTearDown(t *testing.T) {
tests := []struct {
name string
id string
}{
{name: "Teardown SSH Agent",
id: "sshAgentTdown"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.id == "sshAgentTdown" {
os.Remove(sshAgentSocket)
}
})
}
} | } | random_line_split |
run_test.go | // Copyright © 2017-2018 Ricardo Aravena <raravena@branch.io>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exec
import (
"fmt"
glssh "github.com/gliderlabs/ssh"
"golang.org/x/crypto/ssh"
"golang.org/x/crypto/ssh/agent"
"golang.org/x/crypto/ssh/testdata"
"io"
"io/ioutil"
"math/rand"
"net"
"os"
"reflect"
"testing"
"time"
)
type mockSSHKey struct {
keyname string
content []byte
privkey agent.AddedKey
pubkey ssh.PublicKey
}
var (
testPrivateKeys map[string]interface{}
testSigners map[string]ssh.Signer
testPublicKeys map[string]ssh.PublicKey
sshAgentSocket string
)
func init() {
var err error
n := len(testdata.PEMBytes)
testSigners = make(map[string]ssh.Signer, n)
testPrivateKeys = make(map[string]interface{}, n)
testPublicKeys = make(map[string]ssh.PublicKey, n)
for t, k := range testdata.PEMBytes {
testPrivateKeys[t], err = ssh.ParseRawPrivateKey(k)
if err != nil {
panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
}
testSigners[t], err = ssh.NewSignerFromKey(testPrivateKeys[t])
if err != nil {
panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
}
testPublicKeys[t] = testSigners[t].PublicKey()
}
randomStr := fmt.Sprintf("%v", rand.Intn(5000))
socketFile := "/tmp/gosocket" + randomStr + ".sock"
setupSSHAgent(socketFile)
time.Sleep(2 * time.Second)
startSSHServer()
}
func setupSSHAgent(socketFile string) {
done := make(chan string, 1)
a := agent.NewKeyring()
go func(done chan<- string) {
ln, err := net.Listen("unix", socketFile)
if err != nil {
panic(fmt.Sprintf("Couldn't create socket for tests %v", err))
}
defer ln.Close()
// Need to wait until the socket is setup
firstTime := true
for {
if firstTime == true {
done <- socketFile
firstTime = false
}
c, err := ln.Accept()
if err != nil {
panic(fmt.Sprintf("Couldn't accept connection to agent tests %v", err))
}
defer c.Close()
go func(c io.ReadWriter) {
err = agent.ServeAgent(a, c)
if err != nil {
fmt.Sprintf("Couldn't serve ssh agent for tests %v", err)
}
}(c)
}
}(done)
sshAgentSocket = <-done
}
func addKeytoSSHAgent(key agent.AddedKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Add(key)
}
func removeKeyfromSSHAgent(key ssh.PublicKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Remove(key)
}
func startSSHServer() {
done := make(chan bool, 1)
go func(done chan<- bool) {
glssh.Handle(func(s glssh.Session) {
authorizedKey := ssh.MarshalAuthorizedKey(s.PublicKey())
io.WriteString(s, fmt.Sprintf("public key used by %s:\n", s.User()))
s.Write(authorizedKey)
s.Exit(0)
})
publicKeyOption := glssh.PublicKeyAuth(func(ctx glssh.Context, key glssh.PublicKey) bool {
for _, pubk := range testPublicKeys {
if glssh.KeysEqual(key, pubk) {
return true
}
}
return false
})
fmt.Println("starting ssh server on port 2222...")
done <- true
panic(glssh.ListenAndServe(":2222", nil, publicKeyOption))
}(done)
<-done
}
func T | t *testing.T) {
tests := []struct {
name string
key mockSSHKey
expected ssh.Signer
}{
{name: "Basic key signer with valid rsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: testSigners["rsa"],
},
{name: "Basic key signer with valid dsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["dsa"],
},
expected: testSigners["dsa"],
},
{name: "Basic key signer with valid ecdsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["ecdsa"],
},
expected: testSigners["ecdsa"],
},
{name: "Basic key signer with valid user key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["user"],
},
expected: testSigners["user"],
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Write content of the key to the keyname file
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
returned, _ := makeSigner(tt.key.keyname)
if !reflect.DeepEqual(returned, tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
os.Remove(tt.key.keyname)
})
}
}
func TestMakeKeyring(t *testing.T) {
tests := []struct {
name string
useagent bool
key mockSSHKey
expected ssh.AuthMethod
}{
{name: "Basic key ring with valid rsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring with valid dsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey11",
content: testdata.PEMBytes["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring with valid ecdsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey12",
content: testdata.PEMBytes["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
{name: "Basic key ring with valid user key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey13",
content: testdata.PEMBytes["user"],
},
expected: ssh.PublicKeys(testSigners["user"]),
},
{name: "Basic key ring agent with valid rsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["rsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["rsa"]},
pubkey: testPublicKeys["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring agent with valid dsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["dsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["dsa"]},
pubkey: testPublicKeys["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring agent with valid ecdsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["ecdsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["ecdsa"]},
pubkey: testPublicKeys["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := makeKeyring(tt.key.keyname, sshAgentSocket, tt.useagent)
// DeepEqual always returns false for functions unless nil
// hence converting to string to compare
check1 := reflect.ValueOf(returned).String()
check2 := reflect.ValueOf(tt.expected).String()
if !reflect.DeepEqual(check1, check2) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestRun(t *testing.T) {
tests := []struct {
name string
machines []string
user string
cmd string
key mockSSHKey
port int
useagent bool
expected bool
}{
{name: "Basic with valid rsa key",
machines: []string{"localhost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey21",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: true,
},
{name: "Basic with valid rsa key wrong hostname",
machines: []string{"bogushost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey22",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key wrong port",
machines: []string{"localhost"},
port: 2223,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey23",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key Google endpoint",
machines: []string{"www.google.com"},
port: 22,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey24",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := Run(Machines(tt.machines),
User(tt.user),
Port(tt.port),
Cmd(tt.cmd),
Key(tt.key.keyname),
UseAgent(tt.useagent),
AgentSocket(sshAgentSocket))
if !(returned == tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestTearDown(t *testing.T) {
tests := []struct {
name string
id string
}{
{name: "Teardown SSH Agent",
id: "sshAgentTdown"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.id == "sshAgentTdown" {
os.Remove(sshAgentSocket)
}
})
}
}
| estMakeSigner( | identifier_name |
run_test.go | // Copyright © 2017-2018 Ricardo Aravena <raravena@branch.io>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exec
import (
"fmt"
glssh "github.com/gliderlabs/ssh"
"golang.org/x/crypto/ssh"
"golang.org/x/crypto/ssh/agent"
"golang.org/x/crypto/ssh/testdata"
"io"
"io/ioutil"
"math/rand"
"net"
"os"
"reflect"
"testing"
"time"
)
type mockSSHKey struct {
keyname string
content []byte
privkey agent.AddedKey
pubkey ssh.PublicKey
}
var (
testPrivateKeys map[string]interface{}
testSigners map[string]ssh.Signer
testPublicKeys map[string]ssh.PublicKey
sshAgentSocket string
)
func init() {
var err error
n := len(testdata.PEMBytes)
testSigners = make(map[string]ssh.Signer, n)
testPrivateKeys = make(map[string]interface{}, n)
testPublicKeys = make(map[string]ssh.PublicKey, n)
for t, k := range testdata.PEMBytes {
testPrivateKeys[t], err = ssh.ParseRawPrivateKey(k)
if err != nil {
panic(fmt.Sprintf("Unable to parse test key %s: %v", t, err))
}
testSigners[t], err = ssh.NewSignerFromKey(testPrivateKeys[t])
if err != nil {
panic(fmt.Sprintf("Unable to create signer for test key %s: %v", t, err))
}
testPublicKeys[t] = testSigners[t].PublicKey()
}
randomStr := fmt.Sprintf("%v", rand.Intn(5000))
socketFile := "/tmp/gosocket" + randomStr + ".sock"
setupSSHAgent(socketFile)
time.Sleep(2 * time.Second)
startSSHServer()
}
func setupSSHAgent(socketFile string) {
done := make(chan string, 1)
a := agent.NewKeyring()
go func(done chan<- string) {
ln, err := net.Listen("unix", socketFile)
if err != nil {
panic(fmt.Sprintf("Couldn't create socket for tests %v", err))
}
defer ln.Close()
// Need to wait until the socket is setup
firstTime := true
for {
if firstTime == true {
done <- socketFile
firstTime = false
}
c, err := ln.Accept()
if err != nil {
panic(fmt.Sprintf("Couldn't accept connection to agent tests %v", err))
}
defer c.Close()
go func(c io.ReadWriter) {
err = agent.ServeAgent(a, c)
if err != nil {
fmt.Sprintf("Couldn't serve ssh agent for tests %v", err)
}
}(c)
}
}(done)
sshAgentSocket = <-done
}
func addKeytoSSHAgent(key agent.AddedKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Add(key)
}
func removeKeyfromSSHAgent(key ssh.PublicKey) {
aConn, _ := net.Dial("unix", sshAgentSocket)
sshAgent := agent.NewClient(aConn)
sshAgent.Remove(key)
}
func startSSHServer() {
done := make(chan bool, 1)
go func(done chan<- bool) {
glssh.Handle(func(s glssh.Session) {
authorizedKey := ssh.MarshalAuthorizedKey(s.PublicKey())
io.WriteString(s, fmt.Sprintf("public key used by %s:\n", s.User()))
s.Write(authorizedKey)
s.Exit(0)
})
publicKeyOption := glssh.PublicKeyAuth(func(ctx glssh.Context, key glssh.PublicKey) bool {
for _, pubk := range testPublicKeys {
if glssh.KeysEqual(key, pubk) {
return true
}
}
return false
})
fmt.Println("starting ssh server on port 2222...")
done <- true
panic(glssh.ListenAndServe(":2222", nil, publicKeyOption))
}(done)
<-done
}
func TestMakeSigner(t *testing.T) {
tests := []struct {
name string
key mockSSHKey
expected ssh.Signer
}{
{name: "Basic key signer with valid rsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: testSigners["rsa"],
},
{name: "Basic key signer with valid dsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["dsa"],
},
expected: testSigners["dsa"],
},
{name: "Basic key signer with valid ecdsa key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["ecdsa"],
},
expected: testSigners["ecdsa"],
},
{name: "Basic key signer with valid user key",
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["user"],
},
expected: testSigners["user"],
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Write content of the key to the keyname file
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
returned, _ := makeSigner(tt.key.keyname)
if !reflect.DeepEqual(returned, tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
os.Remove(tt.key.keyname)
})
}
}
func TestMakeKeyring(t *testing.T) {
tests := []struct {
name string
useagent bool
key mockSSHKey
expected ssh.AuthMethod
}{
{name: "Basic key ring with valid rsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey",
content: testdata.PEMBytes["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring with valid dsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey11",
content: testdata.PEMBytes["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring with valid ecdsa key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey12",
content: testdata.PEMBytes["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
{name: "Basic key ring with valid user key",
useagent: false,
key: mockSSHKey{
keyname: "/tmp/mockkey13",
content: testdata.PEMBytes["user"],
},
expected: ssh.PublicKeys(testSigners["user"]),
},
{name: "Basic key ring agent with valid rsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["rsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["rsa"]},
pubkey: testPublicKeys["rsa"],
},
expected: ssh.PublicKeys(testSigners["rsa"]),
},
{name: "Basic key ring agent with valid dsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["dsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["dsa"]},
pubkey: testPublicKeys["dsa"],
},
expected: ssh.PublicKeys(testSigners["dsa"]),
},
{name: "Basic key ring agent with valid ecdsa key",
useagent: true,
key: mockSSHKey{
keyname: "",
content: testdata.PEMBytes["ecdsa"],
privkey: agent.AddedKey{PrivateKey: testPrivateKeys["ecdsa"]},
pubkey: testPublicKeys["ecdsa"],
},
expected: ssh.PublicKeys(testSigners["ecdsa"]),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := makeKeyring(tt.key.keyname, sshAgentSocket, tt.useagent)
// DeepEqual always returns false for functions unless nil
// hence converting to string to compare
check1 := reflect.ValueOf(returned).String()
check2 := reflect.ValueOf(tt.expected).String()
if !reflect.DeepEqual(check1, check2) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestRun(t *testing.T) {
tests := []struct {
name string
machines []string
user string
cmd string
key mockSSHKey
port int
useagent bool
expected bool
}{
{name: "Basic with valid rsa key",
machines: []string{"localhost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey21",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: true,
},
{name: "Basic with valid rsa key wrong hostname",
machines: []string{"bogushost"},
port: 2222,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey22",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key wrong port",
machines: []string{"localhost"},
port: 2223,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey23",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
{name: "Basic with valid rsa key Google endpoint",
machines: []string{"www.google.com"},
port: 22,
cmd: "ls",
user: "testuser",
key: mockSSHKey{
keyname: "/tmp/mockkey24",
content: testdata.PEMBytes["rsa"],
},
useagent: false,
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.useagent == true {
addKeytoSSHAgent(tt.key.privkey)
}
// Write content of the key to the keyname file
if tt.key.keyname != "" {
ioutil.WriteFile(tt.key.keyname, tt.key.content, 0644)
}
returned := Run(Machines(tt.machines),
User(tt.user),
Port(tt.port),
Cmd(tt.cmd),
Key(tt.key.keyname),
UseAgent(tt.useagent),
AgentSocket(sshAgentSocket))
if !(returned == tt.expected) {
t.Errorf("Value received: %v expected %v", returned, tt.expected)
}
if tt.useagent == true {
removeKeyfromSSHAgent(tt.key.pubkey)
}
if tt.key.keyname != "" {
os.Remove(tt.key.keyname)
}
})
}
}
func TestTearDown(t *testing.T) { |
tests := []struct {
name string
id string
}{
{name: "Teardown SSH Agent",
id: "sshAgentTdown"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.id == "sshAgentTdown" {
os.Remove(sshAgentSocket)
}
})
}
}
| identifier_body | |
main.rs | #[macro_use]
extern crate microprofile;
//
use rand::{distributions as distr, distributions::Distribution};
use starframe::{
self as sf,
game::{self, Game},
graph, graphics as gx,
input::{Key, MouseButton},
math::{self as m, uv},
physics as phys,
};
mod mousegrab;
use mousegrab::MouseGrabber;
mod player;
mod recipes;
use recipes::Recipe;
fn main() {
microprofile::init!();
microprofile::set_enable_all_groups!(true);
let game = Game::init(
60,
winit::window::WindowBuilder::new()
.with_title("starframe test")
.with_inner_size(winit::dpi::LogicalSize {
width: 800.0,
height: 600.0,
}),
);
let state = State::init(&game.renderer.device);
game.run(state);
microprofile::shutdown!();
}
//
// Types
//
pub enum StateEnum {
Playing,
Paused,
}
pub struct State {
scene: Scene,
state: StateEnum,
graph: MyGraph,
player: player::PlayerController,
mouse_mode: MouseMode,
mouse_grabber: MouseGrabber,
physics: phys::Physics,
camera: gx::camera::MouseDragCamera,
shape_renderer: gx::ShapeRenderer,
}
impl State {
fn init(device: &wgpu::Device) -> Self {
State {
scene: Scene::default(),
state: StateEnum::Playing,
graph: MyGraph::new(),
player: player::PlayerController::new(),
mouse_mode: MouseMode::Grab,
mouse_grabber: MouseGrabber::new(),
physics: phys::Physics::with_substeps(10),
camera: gx::camera::MouseDragCamera::new(
gx::camera::ScalingStrategy::ConstantDisplayArea {
width: 20.0,
height: 10.0,
},
),
shape_renderer: gx::ShapeRenderer::new(device),
}
}
fn reset(&mut self) {
self.physics.clear_constraints();
self.graph = MyGraph::new();
}
fn read_scene(&mut self, file_idx: usize) {
let dir = std::fs::read_dir("./examples/testgame/scenes");
match dir {
Err(err) => eprintln!("Scenes dir not found: {}", err),
Ok(mut dir) => {
if let Some(Ok(entry)) = dir.nth(file_idx) {
let file = std::fs::File::open(entry.path());
match file {
Ok(file) => {
let scene = Scene::read_from_file(file);
match scene {
Err(err) => eprintln!("Failed to parse file: {}", err),
Ok(scene) => self.scene = scene,
}
}
Err(err) => eprintln!("Failed to open file: {}", err),
}
}
}
}
}
fn instantiate_scene(&mut self) {
self.scene.instantiate(&mut self.graph, &mut self.physics);
}
}
#[derive(Clone, Copy, Debug)]
pub enum MouseMode {
/// Grab objects with the mouse
Grab,
/// Move the camera with the mouse
Camera,
}
/// The recipes in a scene plus some adjustable parameters.
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(default)]
pub struct Scene {
gravity: [f64; 2],
recipes: Vec<Recipe>,
}
impl Default for Scene {
fn default() -> Self {
Self {
gravity: [0.0, -9.81],
recipes: vec![],
}
}
}
impl Scene {
pub fn read_from_file(file: std::fs::File) -> Result<Self, ron::de::Error> {
use serde::Deserialize;
use std::io::Read;
let mut reader = std::io::BufReader::new(file);
let mut bytes = Vec::new();
reader.read_to_end(&mut bytes)?;
let mut deser = ron::de::Deserializer::from_bytes(bytes.as_slice())?;
Scene::deserialize(&mut deser)
}
pub fn instantiate(&self, graph: &mut crate::MyGraph, physics: &mut phys::Physics) {
for recipe in &self.recipes {
recipe.spawn(graph, physics);
}
}
}
/// The entity graph.
pub struct | {
graph: graph::Graph,
l_pose: graph::Layer<m::Pose>,
l_collider: graph::Layer<phys::Collider>,
l_body: graph::Layer<phys::RigidBody>,
l_shape: graph::Layer<gx::Shape>,
l_player: graph::Layer<player::Player>,
l_evt_sink: sf::event::EventSinkLayer<MyGraph>,
}
impl MyGraph {
pub fn new() -> Self {
let mut graph = graph::Graph::new();
let l_pose = graph.create_layer();
let l_collider = graph.create_layer();
let l_body = graph.create_layer();
let l_shape = graph.create_layer();
let l_player = graph.create_layer();
let l_evt_sinks = graph.create_layer();
MyGraph {
graph,
l_pose,
l_collider,
l_body,
l_shape,
l_player,
l_evt_sink: l_evt_sinks,
}
}
}
//
// State updates
//
impl game::GameState for State {
fn tick(&mut self, dt: f64, game: &Game) -> Option<()> {
microprofile::flip();
microprofile::scope!("update", "all");
//
// State-independent stuff
//
// exit on esc
if game.input.is_key_pressed(Key::Escape, None) {
return None;
}
// adjust physics substeps
if game.input.is_key_pressed(Key::NumpadAdd, Some(0)) {
self.physics.substeps += 1;
println!("Substeps: {}", self.physics.substeps);
} else if game.input.is_key_pressed(Key::NumpadSubtract, Some(0))
&& self.physics.substeps > 1
{
self.physics.substeps -= 1;
println!("Substeps: {}", self.physics.substeps);
}
// mouse controls
if game.input.is_key_pressed(Key::V, Some(0)) {
self.mouse_mode = match self.mouse_mode {
MouseMode::Grab => MouseMode::Camera,
MouseMode::Camera => MouseMode::Grab,
};
println!("Mouse mode: {:?}", self.mouse_mode);
}
match self.mouse_mode {
MouseMode::Grab => {
self.mouse_grabber.update(
&game.input,
&self.camera,
game.renderer.window_size().into(),
&mut self.physics,
&self.graph,
);
}
MouseMode::Camera => {
self.camera
.update(&game.input, game.renderer.window_size().into());
if (game.input).is_mouse_button_pressed(MouseButton::Middle, Some(0)) {
self.camera.pose = uv::DSimilarity2::identity();
}
}
}
// reload
for (idx, num_key) in [
Key::Key1,
Key::Key2,
Key::Key3,
Key::Key4,
Key::Key5,
Key::Key6,
Key::Key7,
Key::Key8,
Key::Key9,
]
.iter()
.enumerate()
{
if game.input.is_key_pressed(*num_key, Some(0)) {
self.reset();
self.read_scene(idx);
self.instantiate_scene();
}
}
// reload current scene
if game.input.is_key_pressed(Key::Return, Some(0)) {
self.reset();
self.instantiate_scene();
}
// spawn stuff also when paused
let random_pos = || {
let mut rng = rand::thread_rng();
m::Vec2::new(
distr::Uniform::from(-5.0..5.0).sample(&mut rng),
distr::Uniform::from(1.0..4.0).sample(&mut rng),
)
};
let random_angle =
|| m::Angle::Deg(distr::Uniform::from(0.0..360.0).sample(&mut rand::thread_rng()));
let random_vel = || {
let mut rng = rand::thread_rng();
[
distr::Uniform::from(-5.0..5.0).sample(&mut rng),
distr::Uniform::from(-5.0..5.0).sample(&mut rng),
]
};
let mut rng = rand::thread_rng();
if game.input.is_key_pressed(Key::S, Some(0)) {
Recipe::DynamicBlock(recipes::Block {
pose: m::IsometryBuilder::new()
.with_position(random_pos())
.with_rotation(random_angle()),
width: distr::Uniform::from(0.6..1.0).sample(&mut rng),
height: distr::Uniform::from(0.5..0.8).sample(&mut rng),
})
.spawn(&mut self.graph, &mut self.physics);
}
if game.input.is_key_pressed(Key::T, Some(0)) {
Recipe::Ball(recipes::Ball {
position: random_pos().into(),
radius: distr::Uniform::from(0.1..0.4).sample(&mut rng),
restitution: 1.0,
start_velocity: random_vel(),
})
.spawn(&mut self.graph, &mut self.physics);
}
match (&self.state, game.input.is_key_pressed(Key::Space, Some(0))) {
//
// Playing or stepping manually
//
(StateEnum::Playing, _) | (StateEnum::Paused, true) => {
if game.input.is_key_pressed(Key::P, Some(0)) {
self.state = StateEnum::Paused;
return Some(());
}
{
microprofile::scope!("update", "physics");
let grav = phys::forcefield::Gravity(self.scene.gravity.into());
self.physics.tick(
&self.graph.graph,
&mut self.graph.l_pose,
&mut self.graph.l_body,
&self.graph.l_collider,
&mut self.graph.l_evt_sink,
dt,
&grav,
);
}
{
microprofile::scope!("update", "player");
self.player.tick(&mut self.graph, &game.input);
}
self.graph.l_evt_sink.flush(&self.graph.graph)(&mut self.graph);
Some(())
}
//
// Paused
//
(StateEnum::Paused, false) => {
if game.input.is_key_pressed(Key::P, Some(0)) {
self.state = StateEnum::Playing;
return Some(());
}
Some(())
}
}
}
fn draw(&mut self, renderer: &mut gx::Renderer) {
microprofile::scope!("render", "all");
let mut ctx = renderer.draw_to_window();
ctx.clear(wgpu::Color {
r: 0.1,
g: 0.1,
b: 0.1,
a: 1.0,
});
self.shape_renderer.draw(
&self.graph.l_shape,
&self.graph.l_pose,
&self.graph.graph,
&self.camera,
&mut ctx,
);
ctx.submit();
}
}
| MyGraph | identifier_name |
main.rs | #[macro_use]
extern crate microprofile;
//
use rand::{distributions as distr, distributions::Distribution};
use starframe::{
self as sf,
game::{self, Game},
graph, graphics as gx,
input::{Key, MouseButton},
math::{self as m, uv},
physics as phys,
};
mod mousegrab;
use mousegrab::MouseGrabber;
mod player;
mod recipes;
use recipes::Recipe;
fn main() {
microprofile::init!();
microprofile::set_enable_all_groups!(true);
let game = Game::init(
60,
winit::window::WindowBuilder::new()
.with_title("starframe test")
.with_inner_size(winit::dpi::LogicalSize {
width: 800.0,
height: 600.0,
}),
);
let state = State::init(&game.renderer.device);
game.run(state);
microprofile::shutdown!();
}
//
// Types
//
pub enum StateEnum {
Playing,
Paused,
}
pub struct State {
scene: Scene,
state: StateEnum,
graph: MyGraph,
player: player::PlayerController,
mouse_mode: MouseMode,
mouse_grabber: MouseGrabber,
physics: phys::Physics,
camera: gx::camera::MouseDragCamera,
shape_renderer: gx::ShapeRenderer,
}
impl State {
fn init(device: &wgpu::Device) -> Self {
State {
scene: Scene::default(),
state: StateEnum::Playing,
graph: MyGraph::new(),
player: player::PlayerController::new(),
mouse_mode: MouseMode::Grab,
mouse_grabber: MouseGrabber::new(),
physics: phys::Physics::with_substeps(10),
camera: gx::camera::MouseDragCamera::new(
gx::camera::ScalingStrategy::ConstantDisplayArea {
width: 20.0,
height: 10.0,
},
),
shape_renderer: gx::ShapeRenderer::new(device),
}
}
fn reset(&mut self) {
self.physics.clear_constraints();
self.graph = MyGraph::new();
}
fn read_scene(&mut self, file_idx: usize) {
let dir = std::fs::read_dir("./examples/testgame/scenes");
match dir {
Err(err) => eprintln!("Scenes dir not found: {}", err),
Ok(mut dir) => {
if let Some(Ok(entry)) = dir.nth(file_idx) {
let file = std::fs::File::open(entry.path());
match file {
Ok(file) => {
let scene = Scene::read_from_file(file);
match scene {
Err(err) => eprintln!("Failed to parse file: {}", err),
Ok(scene) => self.scene = scene,
}
}
Err(err) => eprintln!("Failed to open file: {}", err),
}
}
}
}
}
fn instantiate_scene(&mut self) {
self.scene.instantiate(&mut self.graph, &mut self.physics);
}
}
#[derive(Clone, Copy, Debug)]
pub enum MouseMode {
/// Grab objects with the mouse
Grab,
/// Move the camera with the mouse
Camera,
}
/// The recipes in a scene plus some adjustable parameters.
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(default)]
pub struct Scene {
gravity: [f64; 2],
recipes: Vec<Recipe>,
}
impl Default for Scene {
fn default() -> Self {
Self {
gravity: [0.0, -9.81],
recipes: vec![],
}
}
}
impl Scene {
pub fn read_from_file(file: std::fs::File) -> Result<Self, ron::de::Error> {
use serde::Deserialize;
use std::io::Read;
let mut reader = std::io::BufReader::new(file);
let mut bytes = Vec::new();
reader.read_to_end(&mut bytes)?;
let mut deser = ron::de::Deserializer::from_bytes(bytes.as_slice())?;
Scene::deserialize(&mut deser)
}
pub fn instantiate(&self, graph: &mut crate::MyGraph, physics: &mut phys::Physics) {
for recipe in &self.recipes {
recipe.spawn(graph, physics);
}
}
}
/// The entity graph.
pub struct MyGraph {
graph: graph::Graph,
l_pose: graph::Layer<m::Pose>,
l_collider: graph::Layer<phys::Collider>,
l_body: graph::Layer<phys::RigidBody>,
l_shape: graph::Layer<gx::Shape>,
l_player: graph::Layer<player::Player>,
l_evt_sink: sf::event::EventSinkLayer<MyGraph>,
}
impl MyGraph {
pub fn new() -> Self {
let mut graph = graph::Graph::new();
let l_pose = graph.create_layer();
let l_collider = graph.create_layer();
let l_body = graph.create_layer();
let l_shape = graph.create_layer();
let l_player = graph.create_layer();
let l_evt_sinks = graph.create_layer();
MyGraph {
graph,
l_pose,
l_collider,
l_body,
l_shape,
l_player,
l_evt_sink: l_evt_sinks,
}
}
}
//
// State updates
//
impl game::GameState for State {
fn tick(&mut self, dt: f64, game: &Game) -> Option<()> {
microprofile::flip();
microprofile::scope!("update", "all");
//
// State-independent stuff
//
// exit on esc
if game.input.is_key_pressed(Key::Escape, None) {
return None;
}
// adjust physics substeps
if game.input.is_key_pressed(Key::NumpadAdd, Some(0)) {
self.physics.substeps += 1;
println!("Substeps: {}", self.physics.substeps);
} else if game.input.is_key_pressed(Key::NumpadSubtract, Some(0))
&& self.physics.substeps > 1
{
self.physics.substeps -= 1;
println!("Substeps: {}", self.physics.substeps);
}
// mouse controls
if game.input.is_key_pressed(Key::V, Some(0)) {
self.mouse_mode = match self.mouse_mode {
MouseMode::Grab => MouseMode::Camera,
MouseMode::Camera => MouseMode::Grab,
};
println!("Mouse mode: {:?}", self.mouse_mode);
}
match self.mouse_mode {
MouseMode::Grab => {
self.mouse_grabber.update(
&game.input,
&self.camera,
game.renderer.window_size().into(),
&mut self.physics,
&self.graph,
);
}
MouseMode::Camera => {
self.camera
.update(&game.input, game.renderer.window_size().into());
if (game.input).is_mouse_button_pressed(MouseButton::Middle, Some(0)) {
self.camera.pose = uv::DSimilarity2::identity();
}
}
}
// reload
for (idx, num_key) in [
Key::Key1,
Key::Key2,
Key::Key3,
Key::Key4,
Key::Key5,
Key::Key6,
Key::Key7,
Key::Key8,
Key::Key9,
]
.iter()
.enumerate()
{
if game.input.is_key_pressed(*num_key, Some(0)) {
self.reset();
self.read_scene(idx);
self.instantiate_scene();
}
}
// reload current scene
if game.input.is_key_pressed(Key::Return, Some(0)) {
self.reset();
self.instantiate_scene();
}
// spawn stuff also when paused
let random_pos = || {
let mut rng = rand::thread_rng();
m::Vec2::new(
distr::Uniform::from(-5.0..5.0).sample(&mut rng),
distr::Uniform::from(1.0..4.0).sample(&mut rng),
)
};
let random_angle =
|| m::Angle::Deg(distr::Uniform::from(0.0..360.0).sample(&mut rand::thread_rng())); | distr::Uniform::from(-5.0..5.0).sample(&mut rng),
distr::Uniform::from(-5.0..5.0).sample(&mut rng),
]
};
let mut rng = rand::thread_rng();
if game.input.is_key_pressed(Key::S, Some(0)) {
Recipe::DynamicBlock(recipes::Block {
pose: m::IsometryBuilder::new()
.with_position(random_pos())
.with_rotation(random_angle()),
width: distr::Uniform::from(0.6..1.0).sample(&mut rng),
height: distr::Uniform::from(0.5..0.8).sample(&mut rng),
})
.spawn(&mut self.graph, &mut self.physics);
}
if game.input.is_key_pressed(Key::T, Some(0)) {
Recipe::Ball(recipes::Ball {
position: random_pos().into(),
radius: distr::Uniform::from(0.1..0.4).sample(&mut rng),
restitution: 1.0,
start_velocity: random_vel(),
})
.spawn(&mut self.graph, &mut self.physics);
}
match (&self.state, game.input.is_key_pressed(Key::Space, Some(0))) {
//
// Playing or stepping manually
//
(StateEnum::Playing, _) | (StateEnum::Paused, true) => {
if game.input.is_key_pressed(Key::P, Some(0)) {
self.state = StateEnum::Paused;
return Some(());
}
{
microprofile::scope!("update", "physics");
let grav = phys::forcefield::Gravity(self.scene.gravity.into());
self.physics.tick(
&self.graph.graph,
&mut self.graph.l_pose,
&mut self.graph.l_body,
&self.graph.l_collider,
&mut self.graph.l_evt_sink,
dt,
&grav,
);
}
{
microprofile::scope!("update", "player");
self.player.tick(&mut self.graph, &game.input);
}
self.graph.l_evt_sink.flush(&self.graph.graph)(&mut self.graph);
Some(())
}
//
// Paused
//
(StateEnum::Paused, false) => {
if game.input.is_key_pressed(Key::P, Some(0)) {
self.state = StateEnum::Playing;
return Some(());
}
Some(())
}
}
}
fn draw(&mut self, renderer: &mut gx::Renderer) {
microprofile::scope!("render", "all");
let mut ctx = renderer.draw_to_window();
ctx.clear(wgpu::Color {
r: 0.1,
g: 0.1,
b: 0.1,
a: 1.0,
});
self.shape_renderer.draw(
&self.graph.l_shape,
&self.graph.l_pose,
&self.graph.graph,
&self.camera,
&mut ctx,
);
ctx.submit();
}
} | let random_vel = || {
let mut rng = rand::thread_rng();
[ | random_line_split |
main.rs | use bzip2::bufread::BzDecoder;
use bzip2::write::BzEncoder;
use bzip2::Compression;
use clap::Parser;
use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::process::exit;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Instant;
const BATCH_SIZE: u64 = 100;
const PROGRESS_COUNT: u64 = 100000;
#[macro_use]
extern crate lazy_static_include;
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
struct | {
#[clap(long)]
labels: bool,
#[clap(long)]
statement_counts: bool,
#[clap(short, long, default_value = "0")]
skip: u64,
#[clap(short, long)]
threads: Option<usize>,
#[clap(required = true)]
paths: Vec<String>,
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Extra<'a> {
None,
Type(&'a str),
Lang(&'a str),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Subject<'a> {
IRI(&'a str),
Blank(&'a str),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Object<'a> {
IRI(&'a str),
Blank(&'a str),
Literal(&'a str, Extra<'a>),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub struct Statement<'a> {
subject: Subject<'a>,
predicate: &'a str,
object: Object<'a>,
}
pub enum Work {
LINES(u64, Vec<String>),
DONE,
}
pub struct WorkResult {
statement_counts: Option<HashMap<String, u64>>,
}
lazy_static! {
static ref RE: Regex = Regex::new(
r#"(?x)
^
\s*
# subject
(?:
# IRI
(?:<([^>]*)>)
|
# Blank
(?:_:([^\s]+))
)
\s*
# predicate IRI
<([^>]*)>
\s*
# object
(?:
# IRI
(?:<([^>]*)>)
|
# Blank
(?:_:([^\s]+))
|
# literal
(?:
"([^"]*)"
# optional extra
(?:
# language
(?:@([a-zA-Z]+(?:-[a-zA-Z0-9]+)*))
|
# data type
(?:\^\^<([^>]*)>)
)?
)
)
"#
)
.unwrap();
}
pub fn parse<'a>(line: u64, input: &'a str, regex: &Regex) -> Statement<'a> {
let captures = regex
.captures(input)
.unwrap_or_else(|| panic!("Invalid line: {}: {:?}", line, input));
let subject = captures
.get(1)
.map(|object| Subject::IRI(object.as_str()))
.or_else(|| captures.get(2).map(|blank| Subject::Blank(blank.as_str())))
.expect("failed to parse subject");
let predicate = captures.get(3).expect("failed to parse predicate").as_str();
let object = captures
.get(4)
.map(|object| Object::IRI(object.as_str()))
.or_else(|| captures.get(5).map(|blank| Object::Blank(blank.as_str())))
.unwrap_or_else(|| {
let literal = captures.get(6).expect("failed to parse object").as_str();
let extra = captures
.get(7)
.map(|lang| Extra::Lang(lang.as_str()))
.or_else(|| {
captures
.get(8)
.map(|data_type| Extra::Type(data_type.as_str()))
})
.unwrap_or(Extra::None);
Object::Literal(literal, extra)
});
Statement {
subject,
predicate,
object,
}
}
lazy_static_include_str! {
PROPERTIES_DATA => "properties",
IDENTIFIER_PROPERTIES_DATA => "identifier-properties",
LANGUAGES_DATA => "languages",
LABELS_DATA => "labels",
}
lazy_static! {
static ref PROPERTIES: HashSet<&'static str> = line_set(&PROPERTIES_DATA);
}
lazy_static! {
static ref IDENTIFIER_PROPERTIES: HashSet<String> = line_set(&IDENTIFIER_PROPERTIES_DATA)
.iter()
.flat_map(|id| vec![
format!("http://www.wikidata.org/prop/direct/P{}", id),
format!("http://www.wikidata.org/prop/direct-normalized/P{}", id)
])
.collect();
}
lazy_static! {
static ref LANGUAGES: HashSet<&'static str> = line_set(&LANGUAGES_DATA);
}
lazy_static! {
static ref LABELS: HashSet<&'static str> = line_set(&LABELS_DATA);
}
fn line_set(data: &str) -> HashSet<&str> {
data.lines().collect()
}
fn ignored_subject(iri: &str) -> bool {
iri.starts_with("https://www.wikidata.org/wiki/Special:EntityData")
}
fn produce<T: Read>(
running: Arc<AtomicBool>,
skip: u64,
reader: T,
s: &Sender<Work>,
) -> (bool, u64) {
let mut total = 0;
let mut buf_reader = BufReader::new(reader);
let mut lines = Vec::new();
if skip > 0 {
eprintln!("# skipping {}", skip)
}
loop {
if !running.load(Ordering::SeqCst) {
eprintln!("# interrupted after {}", total);
return (false, total);
}
let mut line = String::new();
if buf_reader.read_line(&mut line).unwrap() == 0 {
break;
}
total += 1;
let skipped = total < skip;
if !skipped {
lines.push(line);
if total % BATCH_SIZE == 0 {
s.send(Work::LINES(total, lines)).unwrap();
lines = Vec::new();
}
}
if total % PROGRESS_COUNT == 0 {
let status = if skipped { "skipped" } else { "" };
eprintln!("# {} {}", status, total);
}
}
if !lines.is_empty() {
s.send(Work::LINES(total, lines)).unwrap();
}
(true, total)
}
fn consume(
name: String,
work_receiver: Receiver<Work>,
result_sender: Sender<WorkResult>,
labels: bool,
statement_counts: bool,
) {
let regex = RE.clone();
let lines_path = format!("{}.nt.bz2", name);
let lines_file = File::create(&lines_path)
.unwrap_or_else(|_| panic!("unable to create file: {}", &lines_path));
let mut lines_encoder = BzEncoder::new(BufWriter::new(lines_file), Compression::best());
let mut labels_encoder = if labels {
let labels_path = format!("labels_{}.bz2", name);
let labels_file = File::create(&labels_path)
.unwrap_or_else(|_| panic!("unable to create file: {}", &labels_path));
Some(BzEncoder::new(
BufWriter::new(labels_file),
Compression::best(),
))
} else {
None
};
let mut statement_counter = if statement_counts {
Some(HashMap::new())
} else {
None
};
loop {
match work_receiver.recv().unwrap() {
Work::LINES(number, lines) => {
for line in lines {
handle(
&mut lines_encoder,
labels_encoder.as_mut(),
statement_counter.as_mut(),
number,
line,
®ex,
);
}
lines_encoder.flush().unwrap();
if let Some(labels_encoder) = labels_encoder.as_mut() {
labels_encoder.flush().unwrap()
}
}
Work::DONE => {
eprintln!("# stopping thread {}", name);
lines_encoder.try_finish().unwrap();
if let Some(labels_encoder) = labels_encoder.as_mut() {
labels_encoder.try_finish().unwrap()
}
result_sender
.send(WorkResult {
statement_counts: statement_counter,
})
.unwrap();
return;
}
}
}
}
fn handle<T: Write, U: Write>(
lines_writer: &mut T,
labels_writer: Option<&mut U>,
statement_counter: Option<&mut HashMap<String, u64>>,
number: u64,
line: String,
regex: &Regex,
) -> Option<()> {
let statement = parse(number, &line, regex);
maybe_write_line(lines_writer, &line, statement);
let id = entity(statement.subject)?;
maybe_count_statement(statement_counter, id, statement);
maybe_write_label(labels_writer, id, statement);
None
}
fn maybe_write_line<T: Write>(lines_writer: &mut T, line: &str, statement: Statement) {
if !is_acceptable(statement) {
return;
}
lines_writer.write_all(line.as_bytes()).unwrap();
}
fn maybe_write_label<T: Write>(
labels_writer: Option<&mut T>,
id: &str,
statement: Statement,
) -> Option<()> {
let labels_writer = labels_writer?;
let label = label(statement)?;
labels_writer
.write_fmt(format_args!("{} {}\n", id, label))
.unwrap();
None
}
fn maybe_count_statement(
statement_counter: Option<&mut HashMap<String, u64>>,
id: &str,
statement: Statement,
) -> Option<()> {
let statement_counter = statement_counter?;
direct_property(statement.predicate)?;
*statement_counter.entry(id.to_string()).or_insert(0) += 1;
None
}
fn is_acceptable(statement: Statement) -> bool {
if PROPERTIES.contains(statement.predicate)
|| IDENTIFIER_PROPERTIES.contains(statement.predicate)
{
return false;
}
match statement.subject {
Subject::Blank(_) => return false,
Subject::IRI(iri) if ignored_subject(iri) => return false,
_ => (),
}
match statement.object {
Object::Blank(_) => return false,
Object::Literal(_, Extra::Lang(lang)) if !LANGUAGES.contains(lang) => return false,
// non-Earth geo coordinates are not supported by some triple stores
Object::Literal(
literal,
Extra::Type("http://www.opengis.net/ont/geosparql#wktLiteral"),
) if literal.starts_with('<') => return false,
_ => (),
}
true
}
fn label(statement: Statement) -> Option<String> {
if !LABELS.contains(statement.predicate) {
return None;
}
if let Object::Literal(label, Extra::Lang(lang)) = statement.object {
if !LANGUAGES.contains(lang) {
return None;
}
return Some(unescape(label));
}
None
}
static ENTITY_IRI_PREFIX: &str = "http://www.wikidata.org/entity/Q";
fn entity(subject: Subject) -> Option<&str> {
if let Subject::IRI(iri) = subject {
iri.strip_prefix(ENTITY_IRI_PREFIX)
} else {
None
}
}
static DIRECT_PROPERTY_IRI_PREFIX: &str = "http://www.wikidata.org/prop/direct/";
fn direct_property(predicate: &str) -> Option<&str> {
predicate.strip_prefix(DIRECT_PROPERTY_IRI_PREFIX)
}
pub fn unescape(s: &str) -> String {
let mut chars = s.chars().enumerate();
let mut res = String::with_capacity(s.len());
while let Some((idx, c)) = chars.next() {
if c == '\\' {
match chars.next() {
None => {
panic!("invalid escape at {} in {}", idx, s);
}
Some((idx, c2)) => {
res.push(match c2 {
't' => '\t',
'b' => '\u{08}',
'n' => '\n',
'r' => '\r',
'f' => '\u{0C}',
'\\' => '\\',
'u' => match parse_unicode(&mut chars, 4) {
Ok(c3) => c3,
Err(err) => {
panic!("invalid escape {}{} at {} in {}: {}", c, c2, idx, s, err);
}
},
'U' => match parse_unicode(&mut chars, 8) {
Ok(c3) => c3,
Err(err) => {
panic!("invalid escape {}{} at {} in {}: {}", c, c2, idx, s, err);
}
},
_ => {
panic!("invalid escape {}{} at {} in {}", c, c2, idx, s);
}
});
continue;
}
};
}
res.push(c);
}
res
}
fn parse_unicode<I>(chars: &mut I, count: usize) -> Result<char, String>
where
I: Iterator<Item = (usize, char)>,
{
let unicode_seq: String = chars.take(count).map(|(_, c)| c).collect();
u32::from_str_radix(&unicode_seq, 16)
.map_err(|e| format!("could not parse {} as u32 hex: {}", unicode_seq, e))
.and_then(|u| {
std::char::from_u32(u).ok_or_else(|| format!("could not parse {} as a unicode char", u))
})
}
fn main() {
let opts: Opts = Opts::parse();
let labels = opts.labels;
let statement_counts = opts.statement_counts;
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
if r.load(Ordering::SeqCst) {
exit(1);
}
r.store(false, Ordering::SeqCst);
})
.expect("failed to set Ctrl-C handler");
let start = Instant::now();
let (work_sender, work_receiver) = bounded::<Work>(0);
let (result_sender, result_receiver) = unbounded();
let mut threads = Vec::new();
let thread_count = opts.threads.unwrap_or_else(|| num_cpus::get() * 2);
for id in 1..=thread_count {
let work_receiver = work_receiver.clone();
let result_sender = result_sender.clone();
threads.push(thread::spawn(move || {
consume(
id.to_string(),
work_receiver,
result_sender,
labels,
statement_counts,
)
}));
}
let mut exit_code = 0;
for path in opts.paths {
let file = File::open(&path).expect("can't open file");
let decoder = BzDecoder::new(BufReader::new(file));
eprintln!("# processing {}", path);
let (finished, count) = produce(running.clone(), opts.skip, decoder, &work_sender);
eprintln!("# processed {}: {}", path, count);
if !finished {
exit_code = 1;
break;
}
}
for _ in &threads {
work_sender.send(Work::DONE).unwrap();
}
let mut statement_counter = HashMap::new();
let mut result_count = 0;
for result in result_receiver.iter() {
if let Some(statement_counts) = result.statement_counts {
for (id, count) in statement_counts.iter() {
*statement_counter.entry(id.to_string()).or_insert(0) += count;
}
}
result_count += 1;
if result_count == thread_count {
break;
}
}
if statement_counts {
eprintln!("# entities: {}", statement_counter.len());
let path = "statement_counts.bz2";
let file = File::create(path).unwrap_or_else(|_| panic!("unable to create file: {}", path));
let mut encoder = BzEncoder::new(BufWriter::new(file), Compression::best());
for (id, count) in statement_counter.iter() {
encoder
.write_fmt(format_args!("{} {}\n", id, count))
.unwrap();
}
encoder.try_finish().unwrap();
}
let duration = start.elapsed();
eprintln!("# took {:?}", duration);
exit(exit_code);
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use std::fs::read_to_string;
use std::io::{self, Lines};
use std::path::{Path, PathBuf};
#[test]
fn test_literal_with_type() {
let line = r#"<http://www.wikidata.org/entity/Q1644> <http://www.wikidata.org/prop/direct/P2043> "+1094.26"^^<http://www.w3.org/2001/XMLSchema#decimal> ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q1644"),
predicate: "http://www.wikidata.org/prop/direct/P2043",
object: Object::Literal(
"+1094.26",
Extra::Type("http://www.w3.org/2001/XMLSchema#decimal")
)
}
);
}
#[test]
fn test_literal_with_lang() {
let line = r#"<http://www.wikidata.org/entity/Q177> <http://schema.org/name> "pizza"@en ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q177"),
predicate: "http://schema.org/name",
object: Object::Literal("pizza", Extra::Lang("en"))
}
);
}
#[test]
fn test_literal() {
let line = r#"<http://www.wikidata.org/entity/Q177> <http://www.wikidata.org/prop/direct/P373> "Pizzas" ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q177"),
predicate: "http://www.wikidata.org/prop/direct/P373",
object: Object::Literal("Pizzas", Extra::None)
}
);
}
#[test]
fn test_blank_subject() {
let line = r#"_:foo <bar> <baz>"#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::Blank("foo"),
predicate: "bar",
object: Object::IRI("baz")
}
);
}
#[test]
fn test_blank_object() {
let line = r#"<foo> <bar> _:baz"#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("foo"),
predicate: "bar",
object: Object::Blank("baz")
}
);
}
#[test]
fn test_statement_count() {
let a = format!("{}a", ENTITY_IRI_PREFIX);
let b = format!("{}b", ENTITY_IRI_PREFIX);
let first_predicate = format!("{}first", DIRECT_PROPERTY_IRI_PREFIX);
let second_predicate = "second";
let third_predicate = format!("{}third", DIRECT_PROPERTY_IRI_PREFIX);
let first = Statement {
subject: Subject::IRI(a.as_str()),
predicate: first_predicate.as_str(),
object: Object::IRI(""),
};
let second = Statement {
subject: Subject::IRI(b.as_str()),
predicate: second_predicate,
object: Object::IRI(""),
};
let third = Statement {
subject: Subject::IRI(a.as_str()),
predicate: third_predicate.as_str(),
object: Object::IRI(""),
};
let mut counter = HashMap::new();
maybe_count_statement(Some(&mut counter), "a", first);
maybe_count_statement(Some(&mut counter), "b", second);
maybe_count_statement(Some(&mut counter), "a", third);
assert_eq!(counter.len(), 1);
assert_eq!(counter.get("a"), Some(&2));
assert_eq!(counter.get("b"), None);
}
#[test]
fn test_geo_literals() {
assert!(is_acceptable(parse(
1,
r#"<foo> <bar> "Point(4.6681 50.6411)"^^<http://www.opengis.net/ont/geosparql#wktLiteral> ."#,
&RE,
)));
assert!(!is_acceptable(parse(
1,
r#"<foo> <bar> "<http://www.wikidata.org/entity/Q405> Point(-141.6 42.6)"^^<http://www.opengis.net/ont/geosparql#wktLiteral> ."#,
&RE,
)));
}
fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(BufReader::new(file).lines())
}
#[test]
fn test_full() -> Result<(), ()> {
let dir = env!("CARGO_MANIFEST_DIR");
let mut in_path = PathBuf::from(dir);
in_path.push("test.in.rdf");
let in_path = in_path.as_os_str().to_str().unwrap();
let mut out_path = PathBuf::from(dir);
out_path.push("test.out.rdf");
let out_path = out_path.as_os_str().to_str().unwrap();
let mut lines_writer = Vec::new();
let mut labels_writer = Vec::new();
for (line, number) in read_lines(in_path).unwrap().zip(1u64..) {
let mut line = line.unwrap();
line.push('\n');
handle(
&mut lines_writer,
Some(&mut labels_writer),
None,
number,
line,
&RE,
);
}
let expected = read_to_string(out_path).unwrap();
assert_eq!(String::from_utf8(lines_writer).unwrap(), expected);
Ok(())
}
}
| Opts | identifier_name |
main.rs | use bzip2::bufread::BzDecoder;
use bzip2::write::BzEncoder;
use bzip2::Compression;
use clap::Parser;
use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::process::exit;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Instant;
const BATCH_SIZE: u64 = 100;
const PROGRESS_COUNT: u64 = 100000;
#[macro_use]
extern crate lazy_static_include;
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
struct Opts {
#[clap(long)]
labels: bool,
#[clap(long)]
statement_counts: bool,
#[clap(short, long, default_value = "0")]
skip: u64,
#[clap(short, long)]
threads: Option<usize>,
#[clap(required = true)]
paths: Vec<String>,
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Extra<'a> {
None,
Type(&'a str),
Lang(&'a str),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Subject<'a> {
IRI(&'a str),
Blank(&'a str),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Object<'a> {
IRI(&'a str),
Blank(&'a str),
Literal(&'a str, Extra<'a>),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub struct Statement<'a> {
subject: Subject<'a>,
predicate: &'a str,
object: Object<'a>,
}
pub enum Work {
LINES(u64, Vec<String>),
DONE,
}
pub struct WorkResult {
statement_counts: Option<HashMap<String, u64>>,
}
lazy_static! {
static ref RE: Regex = Regex::new(
r#"(?x)
^
\s*
# subject
(?:
# IRI
(?:<([^>]*)>)
|
# Blank
(?:_:([^\s]+))
)
\s*
# predicate IRI
<([^>]*)>
\s*
# object
(?:
# IRI
(?:<([^>]*)>)
|
# Blank
(?:_:([^\s]+))
|
# literal
(?:
"([^"]*)"
# optional extra
(?:
# language
(?:@([a-zA-Z]+(?:-[a-zA-Z0-9]+)*))
|
# data type
(?:\^\^<([^>]*)>)
)?
)
)
"#
)
.unwrap();
}
pub fn parse<'a>(line: u64, input: &'a str, regex: &Regex) -> Statement<'a> {
let captures = regex
.captures(input)
.unwrap_or_else(|| panic!("Invalid line: {}: {:?}", line, input));
let subject = captures
.get(1)
.map(|object| Subject::IRI(object.as_str()))
.or_else(|| captures.get(2).map(|blank| Subject::Blank(blank.as_str())))
.expect("failed to parse subject");
let predicate = captures.get(3).expect("failed to parse predicate").as_str();
let object = captures
.get(4)
.map(|object| Object::IRI(object.as_str()))
.or_else(|| captures.get(5).map(|blank| Object::Blank(blank.as_str())))
.unwrap_or_else(|| {
let literal = captures.get(6).expect("failed to parse object").as_str();
let extra = captures
.get(7)
.map(|lang| Extra::Lang(lang.as_str()))
.or_else(|| {
captures
.get(8)
.map(|data_type| Extra::Type(data_type.as_str()))
})
.unwrap_or(Extra::None);
Object::Literal(literal, extra)
});
Statement {
subject,
predicate,
object,
}
}
lazy_static_include_str! {
PROPERTIES_DATA => "properties",
IDENTIFIER_PROPERTIES_DATA => "identifier-properties",
LANGUAGES_DATA => "languages",
LABELS_DATA => "labels",
}
lazy_static! {
static ref PROPERTIES: HashSet<&'static str> = line_set(&PROPERTIES_DATA);
}
lazy_static! {
static ref IDENTIFIER_PROPERTIES: HashSet<String> = line_set(&IDENTIFIER_PROPERTIES_DATA)
.iter()
.flat_map(|id| vec![
format!("http://www.wikidata.org/prop/direct/P{}", id),
format!("http://www.wikidata.org/prop/direct-normalized/P{}", id)
])
.collect();
}
lazy_static! {
static ref LANGUAGES: HashSet<&'static str> = line_set(&LANGUAGES_DATA);
}
lazy_static! {
static ref LABELS: HashSet<&'static str> = line_set(&LABELS_DATA);
}
fn line_set(data: &str) -> HashSet<&str> {
data.lines().collect()
}
fn ignored_subject(iri: &str) -> bool {
iri.starts_with("https://www.wikidata.org/wiki/Special:EntityData")
}
fn produce<T: Read>(
running: Arc<AtomicBool>,
skip: u64,
reader: T,
s: &Sender<Work>,
) -> (bool, u64) {
let mut total = 0;
let mut buf_reader = BufReader::new(reader);
let mut lines = Vec::new();
if skip > 0 {
eprintln!("# skipping {}", skip)
}
loop {
if !running.load(Ordering::SeqCst) {
eprintln!("# interrupted after {}", total);
return (false, total);
}
let mut line = String::new();
if buf_reader.read_line(&mut line).unwrap() == 0 {
break;
}
total += 1;
let skipped = total < skip;
if !skipped {
lines.push(line);
if total % BATCH_SIZE == 0 {
s.send(Work::LINES(total, lines)).unwrap();
lines = Vec::new();
}
}
if total % PROGRESS_COUNT == 0 {
let status = if skipped { "skipped" } else { "" };
eprintln!("# {} {}", status, total);
}
}
if !lines.is_empty() {
s.send(Work::LINES(total, lines)).unwrap();
}
(true, total)
}
fn consume(
name: String,
work_receiver: Receiver<Work>,
result_sender: Sender<WorkResult>,
labels: bool,
statement_counts: bool,
) {
let regex = RE.clone();
let lines_path = format!("{}.nt.bz2", name);
let lines_file = File::create(&lines_path)
.unwrap_or_else(|_| panic!("unable to create file: {}", &lines_path));
let mut lines_encoder = BzEncoder::new(BufWriter::new(lines_file), Compression::best());
let mut labels_encoder = if labels {
let labels_path = format!("labels_{}.bz2", name);
let labels_file = File::create(&labels_path)
.unwrap_or_else(|_| panic!("unable to create file: {}", &labels_path));
Some(BzEncoder::new(
BufWriter::new(labels_file),
Compression::best(),
))
} else {
None
};
let mut statement_counter = if statement_counts {
Some(HashMap::new())
} else {
None
};
loop {
match work_receiver.recv().unwrap() {
Work::LINES(number, lines) => {
for line in lines {
handle(
&mut lines_encoder,
labels_encoder.as_mut(),
statement_counter.as_mut(),
number,
line,
®ex,
);
}
lines_encoder.flush().unwrap();
if let Some(labels_encoder) = labels_encoder.as_mut() {
labels_encoder.flush().unwrap()
}
}
Work::DONE => {
eprintln!("# stopping thread {}", name);
lines_encoder.try_finish().unwrap();
if let Some(labels_encoder) = labels_encoder.as_mut() {
labels_encoder.try_finish().unwrap()
}
result_sender
.send(WorkResult {
statement_counts: statement_counter,
})
.unwrap();
return;
}
}
}
}
fn handle<T: Write, U: Write>(
lines_writer: &mut T,
labels_writer: Option<&mut U>,
statement_counter: Option<&mut HashMap<String, u64>>,
number: u64,
line: String,
regex: &Regex,
) -> Option<()> {
let statement = parse(number, &line, regex);
maybe_write_line(lines_writer, &line, statement);
let id = entity(statement.subject)?;
maybe_count_statement(statement_counter, id, statement);
maybe_write_label(labels_writer, id, statement);
None
}
fn maybe_write_line<T: Write>(lines_writer: &mut T, line: &str, statement: Statement) {
if !is_acceptable(statement) {
return;
}
lines_writer.write_all(line.as_bytes()).unwrap();
}
fn maybe_write_label<T: Write>(
labels_writer: Option<&mut T>,
id: &str,
statement: Statement,
) -> Option<()> {
let labels_writer = labels_writer?;
let label = label(statement)?;
labels_writer
.write_fmt(format_args!("{} {}\n", id, label))
.unwrap();
None
}
fn maybe_count_statement(
statement_counter: Option<&mut HashMap<String, u64>>,
id: &str,
statement: Statement,
) -> Option<()> {
let statement_counter = statement_counter?;
direct_property(statement.predicate)?;
*statement_counter.entry(id.to_string()).or_insert(0) += 1;
None
}
fn is_acceptable(statement: Statement) -> bool {
if PROPERTIES.contains(statement.predicate)
|| IDENTIFIER_PROPERTIES.contains(statement.predicate)
{
return false;
}
match statement.subject {
Subject::Blank(_) => return false,
Subject::IRI(iri) if ignored_subject(iri) => return false,
_ => (),
}
match statement.object {
Object::Blank(_) => return false,
Object::Literal(_, Extra::Lang(lang)) if !LANGUAGES.contains(lang) => return false,
// non-Earth geo coordinates are not supported by some triple stores
Object::Literal(
literal,
Extra::Type("http://www.opengis.net/ont/geosparql#wktLiteral"),
) if literal.starts_with('<') => return false,
_ => (),
}
true
}
fn label(statement: Statement) -> Option<String> {
if !LABELS.contains(statement.predicate) {
return None;
}
if let Object::Literal(label, Extra::Lang(lang)) = statement.object {
if !LANGUAGES.contains(lang) {
return None;
}
return Some(unescape(label));
}
None
}
static ENTITY_IRI_PREFIX: &str = "http://www.wikidata.org/entity/Q";
fn entity(subject: Subject) -> Option<&str> {
if let Subject::IRI(iri) = subject {
iri.strip_prefix(ENTITY_IRI_PREFIX)
} else {
None
}
}
static DIRECT_PROPERTY_IRI_PREFIX: &str = "http://www.wikidata.org/prop/direct/";
fn direct_property(predicate: &str) -> Option<&str> {
predicate.strip_prefix(DIRECT_PROPERTY_IRI_PREFIX)
}
pub fn unescape(s: &str) -> String {
let mut chars = s.chars().enumerate();
let mut res = String::with_capacity(s.len());
while let Some((idx, c)) = chars.next() {
if c == '\\' {
match chars.next() {
None => {
panic!("invalid escape at {} in {}", idx, s);
}
Some((idx, c2)) => {
res.push(match c2 {
't' => '\t',
'b' => '\u{08}',
'n' => '\n',
'r' => '\r',
'f' => '\u{0C}',
'\\' => '\\',
'u' => match parse_unicode(&mut chars, 4) {
Ok(c3) => c3,
Err(err) => {
panic!("invalid escape {}{} at {} in {}: {}", c, c2, idx, s, err);
} | }
},
_ => {
panic!("invalid escape {}{} at {} in {}", c, c2, idx, s);
}
});
continue;
}
};
}
res.push(c);
}
res
}
fn parse_unicode<I>(chars: &mut I, count: usize) -> Result<char, String>
where
I: Iterator<Item = (usize, char)>,
{
let unicode_seq: String = chars.take(count).map(|(_, c)| c).collect();
u32::from_str_radix(&unicode_seq, 16)
.map_err(|e| format!("could not parse {} as u32 hex: {}", unicode_seq, e))
.and_then(|u| {
std::char::from_u32(u).ok_or_else(|| format!("could not parse {} as a unicode char", u))
})
}
fn main() {
let opts: Opts = Opts::parse();
let labels = opts.labels;
let statement_counts = opts.statement_counts;
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
if r.load(Ordering::SeqCst) {
exit(1);
}
r.store(false, Ordering::SeqCst);
})
.expect("failed to set Ctrl-C handler");
let start = Instant::now();
let (work_sender, work_receiver) = bounded::<Work>(0);
let (result_sender, result_receiver) = unbounded();
let mut threads = Vec::new();
let thread_count = opts.threads.unwrap_or_else(|| num_cpus::get() * 2);
for id in 1..=thread_count {
let work_receiver = work_receiver.clone();
let result_sender = result_sender.clone();
threads.push(thread::spawn(move || {
consume(
id.to_string(),
work_receiver,
result_sender,
labels,
statement_counts,
)
}));
}
let mut exit_code = 0;
for path in opts.paths {
let file = File::open(&path).expect("can't open file");
let decoder = BzDecoder::new(BufReader::new(file));
eprintln!("# processing {}", path);
let (finished, count) = produce(running.clone(), opts.skip, decoder, &work_sender);
eprintln!("# processed {}: {}", path, count);
if !finished {
exit_code = 1;
break;
}
}
for _ in &threads {
work_sender.send(Work::DONE).unwrap();
}
let mut statement_counter = HashMap::new();
let mut result_count = 0;
for result in result_receiver.iter() {
if let Some(statement_counts) = result.statement_counts {
for (id, count) in statement_counts.iter() {
*statement_counter.entry(id.to_string()).or_insert(0) += count;
}
}
result_count += 1;
if result_count == thread_count {
break;
}
}
if statement_counts {
eprintln!("# entities: {}", statement_counter.len());
let path = "statement_counts.bz2";
let file = File::create(path).unwrap_or_else(|_| panic!("unable to create file: {}", path));
let mut encoder = BzEncoder::new(BufWriter::new(file), Compression::best());
for (id, count) in statement_counter.iter() {
encoder
.write_fmt(format_args!("{} {}\n", id, count))
.unwrap();
}
encoder.try_finish().unwrap();
}
let duration = start.elapsed();
eprintln!("# took {:?}", duration);
exit(exit_code);
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use std::fs::read_to_string;
use std::io::{self, Lines};
use std::path::{Path, PathBuf};
#[test]
fn test_literal_with_type() {
let line = r#"<http://www.wikidata.org/entity/Q1644> <http://www.wikidata.org/prop/direct/P2043> "+1094.26"^^<http://www.w3.org/2001/XMLSchema#decimal> ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q1644"),
predicate: "http://www.wikidata.org/prop/direct/P2043",
object: Object::Literal(
"+1094.26",
Extra::Type("http://www.w3.org/2001/XMLSchema#decimal")
)
}
);
}
#[test]
fn test_literal_with_lang() {
let line = r#"<http://www.wikidata.org/entity/Q177> <http://schema.org/name> "pizza"@en ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q177"),
predicate: "http://schema.org/name",
object: Object::Literal("pizza", Extra::Lang("en"))
}
);
}
#[test]
fn test_literal() {
let line = r#"<http://www.wikidata.org/entity/Q177> <http://www.wikidata.org/prop/direct/P373> "Pizzas" ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q177"),
predicate: "http://www.wikidata.org/prop/direct/P373",
object: Object::Literal("Pizzas", Extra::None)
}
);
}
#[test]
fn test_blank_subject() {
let line = r#"_:foo <bar> <baz>"#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::Blank("foo"),
predicate: "bar",
object: Object::IRI("baz")
}
);
}
#[test]
fn test_blank_object() {
let line = r#"<foo> <bar> _:baz"#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("foo"),
predicate: "bar",
object: Object::Blank("baz")
}
);
}
#[test]
fn test_statement_count() {
let a = format!("{}a", ENTITY_IRI_PREFIX);
let b = format!("{}b", ENTITY_IRI_PREFIX);
let first_predicate = format!("{}first", DIRECT_PROPERTY_IRI_PREFIX);
let second_predicate = "second";
let third_predicate = format!("{}third", DIRECT_PROPERTY_IRI_PREFIX);
let first = Statement {
subject: Subject::IRI(a.as_str()),
predicate: first_predicate.as_str(),
object: Object::IRI(""),
};
let second = Statement {
subject: Subject::IRI(b.as_str()),
predicate: second_predicate,
object: Object::IRI(""),
};
let third = Statement {
subject: Subject::IRI(a.as_str()),
predicate: third_predicate.as_str(),
object: Object::IRI(""),
};
let mut counter = HashMap::new();
maybe_count_statement(Some(&mut counter), "a", first);
maybe_count_statement(Some(&mut counter), "b", second);
maybe_count_statement(Some(&mut counter), "a", third);
assert_eq!(counter.len(), 1);
assert_eq!(counter.get("a"), Some(&2));
assert_eq!(counter.get("b"), None);
}
#[test]
fn test_geo_literals() {
assert!(is_acceptable(parse(
1,
r#"<foo> <bar> "Point(4.6681 50.6411)"^^<http://www.opengis.net/ont/geosparql#wktLiteral> ."#,
&RE,
)));
assert!(!is_acceptable(parse(
1,
r#"<foo> <bar> "<http://www.wikidata.org/entity/Q405> Point(-141.6 42.6)"^^<http://www.opengis.net/ont/geosparql#wktLiteral> ."#,
&RE,
)));
}
fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(BufReader::new(file).lines())
}
#[test]
fn test_full() -> Result<(), ()> {
let dir = env!("CARGO_MANIFEST_DIR");
let mut in_path = PathBuf::from(dir);
in_path.push("test.in.rdf");
let in_path = in_path.as_os_str().to_str().unwrap();
let mut out_path = PathBuf::from(dir);
out_path.push("test.out.rdf");
let out_path = out_path.as_os_str().to_str().unwrap();
let mut lines_writer = Vec::new();
let mut labels_writer = Vec::new();
for (line, number) in read_lines(in_path).unwrap().zip(1u64..) {
let mut line = line.unwrap();
line.push('\n');
handle(
&mut lines_writer,
Some(&mut labels_writer),
None,
number,
line,
&RE,
);
}
let expected = read_to_string(out_path).unwrap();
assert_eq!(String::from_utf8(lines_writer).unwrap(), expected);
Ok(())
}
} | },
'U' => match parse_unicode(&mut chars, 8) {
Ok(c3) => c3,
Err(err) => {
panic!("invalid escape {}{} at {} in {}: {}", c, c2, idx, s, err); | random_line_split |
main.rs | use bzip2::bufread::BzDecoder;
use bzip2::write::BzEncoder;
use bzip2::Compression;
use clap::Parser;
use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Read, Write};
use std::process::exit;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Instant;
const BATCH_SIZE: u64 = 100;
const PROGRESS_COUNT: u64 = 100000;
#[macro_use]
extern crate lazy_static_include;
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
struct Opts {
#[clap(long)]
labels: bool,
#[clap(long)]
statement_counts: bool,
#[clap(short, long, default_value = "0")]
skip: u64,
#[clap(short, long)]
threads: Option<usize>,
#[clap(required = true)]
paths: Vec<String>,
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Extra<'a> {
None,
Type(&'a str),
Lang(&'a str),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Subject<'a> {
IRI(&'a str),
Blank(&'a str),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Object<'a> {
IRI(&'a str),
Blank(&'a str),
Literal(&'a str, Extra<'a>),
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub struct Statement<'a> {
subject: Subject<'a>,
predicate: &'a str,
object: Object<'a>,
}
pub enum Work {
LINES(u64, Vec<String>),
DONE,
}
pub struct WorkResult {
statement_counts: Option<HashMap<String, u64>>,
}
lazy_static! {
static ref RE: Regex = Regex::new(
r#"(?x)
^
\s*
# subject
(?:
# IRI
(?:<([^>]*)>)
|
# Blank
(?:_:([^\s]+))
)
\s*
# predicate IRI
<([^>]*)>
\s*
# object
(?:
# IRI
(?:<([^>]*)>)
|
# Blank
(?:_:([^\s]+))
|
# literal
(?:
"([^"]*)"
# optional extra
(?:
# language
(?:@([a-zA-Z]+(?:-[a-zA-Z0-9]+)*))
|
# data type
(?:\^\^<([^>]*)>)
)?
)
)
"#
)
.unwrap();
}
pub fn parse<'a>(line: u64, input: &'a str, regex: &Regex) -> Statement<'a> {
let captures = regex
.captures(input)
.unwrap_or_else(|| panic!("Invalid line: {}: {:?}", line, input));
let subject = captures
.get(1)
.map(|object| Subject::IRI(object.as_str()))
.or_else(|| captures.get(2).map(|blank| Subject::Blank(blank.as_str())))
.expect("failed to parse subject");
let predicate = captures.get(3).expect("failed to parse predicate").as_str();
let object = captures
.get(4)
.map(|object| Object::IRI(object.as_str()))
.or_else(|| captures.get(5).map(|blank| Object::Blank(blank.as_str())))
.unwrap_or_else(|| {
let literal = captures.get(6).expect("failed to parse object").as_str();
let extra = captures
.get(7)
.map(|lang| Extra::Lang(lang.as_str()))
.or_else(|| {
captures
.get(8)
.map(|data_type| Extra::Type(data_type.as_str()))
})
.unwrap_or(Extra::None);
Object::Literal(literal, extra)
});
Statement {
subject,
predicate,
object,
}
}
lazy_static_include_str! {
PROPERTIES_DATA => "properties",
IDENTIFIER_PROPERTIES_DATA => "identifier-properties",
LANGUAGES_DATA => "languages",
LABELS_DATA => "labels",
}
lazy_static! {
static ref PROPERTIES: HashSet<&'static str> = line_set(&PROPERTIES_DATA);
}
lazy_static! {
static ref IDENTIFIER_PROPERTIES: HashSet<String> = line_set(&IDENTIFIER_PROPERTIES_DATA)
.iter()
.flat_map(|id| vec![
format!("http://www.wikidata.org/prop/direct/P{}", id),
format!("http://www.wikidata.org/prop/direct-normalized/P{}", id)
])
.collect();
}
lazy_static! {
static ref LANGUAGES: HashSet<&'static str> = line_set(&LANGUAGES_DATA);
}
lazy_static! {
static ref LABELS: HashSet<&'static str> = line_set(&LABELS_DATA);
}
fn line_set(data: &str) -> HashSet<&str> {
data.lines().collect()
}
fn ignored_subject(iri: &str) -> bool {
iri.starts_with("https://www.wikidata.org/wiki/Special:EntityData")
}
fn produce<T: Read>(
running: Arc<AtomicBool>,
skip: u64,
reader: T,
s: &Sender<Work>,
) -> (bool, u64) {
let mut total = 0;
let mut buf_reader = BufReader::new(reader);
let mut lines = Vec::new();
if skip > 0 {
eprintln!("# skipping {}", skip)
}
loop {
if !running.load(Ordering::SeqCst) {
eprintln!("# interrupted after {}", total);
return (false, total);
}
let mut line = String::new();
if buf_reader.read_line(&mut line).unwrap() == 0 {
break;
}
total += 1;
let skipped = total < skip;
if !skipped {
lines.push(line);
if total % BATCH_SIZE == 0 {
s.send(Work::LINES(total, lines)).unwrap();
lines = Vec::new();
}
}
if total % PROGRESS_COUNT == 0 {
let status = if skipped { "skipped" } else { "" };
eprintln!("# {} {}", status, total);
}
}
if !lines.is_empty() {
s.send(Work::LINES(total, lines)).unwrap();
}
(true, total)
}
fn consume(
name: String,
work_receiver: Receiver<Work>,
result_sender: Sender<WorkResult>,
labels: bool,
statement_counts: bool,
) {
let regex = RE.clone();
let lines_path = format!("{}.nt.bz2", name);
let lines_file = File::create(&lines_path)
.unwrap_or_else(|_| panic!("unable to create file: {}", &lines_path));
let mut lines_encoder = BzEncoder::new(BufWriter::new(lines_file), Compression::best());
let mut labels_encoder = if labels {
let labels_path = format!("labels_{}.bz2", name);
let labels_file = File::create(&labels_path)
.unwrap_or_else(|_| panic!("unable to create file: {}", &labels_path));
Some(BzEncoder::new(
BufWriter::new(labels_file),
Compression::best(),
))
} else {
None
};
let mut statement_counter = if statement_counts {
Some(HashMap::new())
} else {
None
};
loop {
match work_receiver.recv().unwrap() {
Work::LINES(number, lines) => {
for line in lines {
handle(
&mut lines_encoder,
labels_encoder.as_mut(),
statement_counter.as_mut(),
number,
line,
®ex,
);
}
lines_encoder.flush().unwrap();
if let Some(labels_encoder) = labels_encoder.as_mut() {
labels_encoder.flush().unwrap()
}
}
Work::DONE => {
eprintln!("# stopping thread {}", name);
lines_encoder.try_finish().unwrap();
if let Some(labels_encoder) = labels_encoder.as_mut() {
labels_encoder.try_finish().unwrap()
}
result_sender
.send(WorkResult {
statement_counts: statement_counter,
})
.unwrap();
return;
}
}
}
}
fn handle<T: Write, U: Write>(
lines_writer: &mut T,
labels_writer: Option<&mut U>,
statement_counter: Option<&mut HashMap<String, u64>>,
number: u64,
line: String,
regex: &Regex,
) -> Option<()> {
let statement = parse(number, &line, regex);
maybe_write_line(lines_writer, &line, statement);
let id = entity(statement.subject)?;
maybe_count_statement(statement_counter, id, statement);
maybe_write_label(labels_writer, id, statement);
None
}
fn maybe_write_line<T: Write>(lines_writer: &mut T, line: &str, statement: Statement) {
if !is_acceptable(statement) {
return;
}
lines_writer.write_all(line.as_bytes()).unwrap();
}
fn maybe_write_label<T: Write>(
labels_writer: Option<&mut T>,
id: &str,
statement: Statement,
) -> Option<()> {
let labels_writer = labels_writer?;
let label = label(statement)?;
labels_writer
.write_fmt(format_args!("{} {}\n", id, label))
.unwrap();
None
}
fn maybe_count_statement(
statement_counter: Option<&mut HashMap<String, u64>>,
id: &str,
statement: Statement,
) -> Option<()> {
let statement_counter = statement_counter?;
direct_property(statement.predicate)?;
*statement_counter.entry(id.to_string()).or_insert(0) += 1;
None
}
fn is_acceptable(statement: Statement) -> bool {
if PROPERTIES.contains(statement.predicate)
|| IDENTIFIER_PROPERTIES.contains(statement.predicate)
{
return false;
}
match statement.subject {
Subject::Blank(_) => return false,
Subject::IRI(iri) if ignored_subject(iri) => return false,
_ => (),
}
match statement.object {
Object::Blank(_) => return false,
Object::Literal(_, Extra::Lang(lang)) if !LANGUAGES.contains(lang) => return false,
// non-Earth geo coordinates are not supported by some triple stores
Object::Literal(
literal,
Extra::Type("http://www.opengis.net/ont/geosparql#wktLiteral"),
) if literal.starts_with('<') => return false,
_ => (),
}
true
}
fn label(statement: Statement) -> Option<String> {
if !LABELS.contains(statement.predicate) {
return None;
}
if let Object::Literal(label, Extra::Lang(lang)) = statement.object {
if !LANGUAGES.contains(lang) {
return None;
}
return Some(unescape(label));
}
None
}
static ENTITY_IRI_PREFIX: &str = "http://www.wikidata.org/entity/Q";
fn entity(subject: Subject) -> Option<&str> {
if let Subject::IRI(iri) = subject {
iri.strip_prefix(ENTITY_IRI_PREFIX)
} else {
None
}
}
static DIRECT_PROPERTY_IRI_PREFIX: &str = "http://www.wikidata.org/prop/direct/";
fn direct_property(predicate: &str) -> Option<&str> {
predicate.strip_prefix(DIRECT_PROPERTY_IRI_PREFIX)
}
pub fn unescape(s: &str) -> String {
let mut chars = s.chars().enumerate();
let mut res = String::with_capacity(s.len());
while let Some((idx, c)) = chars.next() {
if c == '\\' {
match chars.next() {
None => {
panic!("invalid escape at {} in {}", idx, s);
}
Some((idx, c2)) => {
res.push(match c2 {
't' => '\t',
'b' => '\u{08}',
'n' => '\n',
'r' => '\r',
'f' => '\u{0C}',
'\\' => '\\',
'u' => match parse_unicode(&mut chars, 4) {
Ok(c3) => c3,
Err(err) => {
panic!("invalid escape {}{} at {} in {}: {}", c, c2, idx, s, err);
}
},
'U' => match parse_unicode(&mut chars, 8) {
Ok(c3) => c3,
Err(err) => {
panic!("invalid escape {}{} at {} in {}: {}", c, c2, idx, s, err);
}
},
_ => {
panic!("invalid escape {}{} at {} in {}", c, c2, idx, s);
}
});
continue;
}
};
}
res.push(c);
}
res
}
fn parse_unicode<I>(chars: &mut I, count: usize) -> Result<char, String>
where
I: Iterator<Item = (usize, char)>,
{
let unicode_seq: String = chars.take(count).map(|(_, c)| c).collect();
u32::from_str_radix(&unicode_seq, 16)
.map_err(|e| format!("could not parse {} as u32 hex: {}", unicode_seq, e))
.and_then(|u| {
std::char::from_u32(u).ok_or_else(|| format!("could not parse {} as a unicode char", u))
})
}
fn main() {
let opts: Opts = Opts::parse();
let labels = opts.labels;
let statement_counts = opts.statement_counts;
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
if r.load(Ordering::SeqCst) {
exit(1);
}
r.store(false, Ordering::SeqCst);
})
.expect("failed to set Ctrl-C handler");
let start = Instant::now();
let (work_sender, work_receiver) = bounded::<Work>(0);
let (result_sender, result_receiver) = unbounded();
let mut threads = Vec::new();
let thread_count = opts.threads.unwrap_or_else(|| num_cpus::get() * 2);
for id in 1..=thread_count {
let work_receiver = work_receiver.clone();
let result_sender = result_sender.clone();
threads.push(thread::spawn(move || {
consume(
id.to_string(),
work_receiver,
result_sender,
labels,
statement_counts,
)
}));
}
let mut exit_code = 0;
for path in opts.paths {
let file = File::open(&path).expect("can't open file");
let decoder = BzDecoder::new(BufReader::new(file));
eprintln!("# processing {}", path);
let (finished, count) = produce(running.clone(), opts.skip, decoder, &work_sender);
eprintln!("# processed {}: {}", path, count);
if !finished {
exit_code = 1;
break;
}
}
for _ in &threads {
work_sender.send(Work::DONE).unwrap();
}
let mut statement_counter = HashMap::new();
let mut result_count = 0;
for result in result_receiver.iter() {
if let Some(statement_counts) = result.statement_counts {
for (id, count) in statement_counts.iter() {
*statement_counter.entry(id.to_string()).or_insert(0) += count;
}
}
result_count += 1;
if result_count == thread_count {
break;
}
}
if statement_counts {
eprintln!("# entities: {}", statement_counter.len());
let path = "statement_counts.bz2";
let file = File::create(path).unwrap_or_else(|_| panic!("unable to create file: {}", path));
let mut encoder = BzEncoder::new(BufWriter::new(file), Compression::best());
for (id, count) in statement_counter.iter() {
encoder
.write_fmt(format_args!("{} {}\n", id, count))
.unwrap();
}
encoder.try_finish().unwrap();
}
let duration = start.elapsed();
eprintln!("# took {:?}", duration);
exit(exit_code);
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use std::fs::read_to_string;
use std::io::{self, Lines};
use std::path::{Path, PathBuf};
#[test]
fn test_literal_with_type() {
let line = r#"<http://www.wikidata.org/entity/Q1644> <http://www.wikidata.org/prop/direct/P2043> "+1094.26"^^<http://www.w3.org/2001/XMLSchema#decimal> ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q1644"),
predicate: "http://www.wikidata.org/prop/direct/P2043",
object: Object::Literal(
"+1094.26",
Extra::Type("http://www.w3.org/2001/XMLSchema#decimal")
)
}
);
}
#[test]
fn test_literal_with_lang() {
let line = r#"<http://www.wikidata.org/entity/Q177> <http://schema.org/name> "pizza"@en ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q177"),
predicate: "http://schema.org/name",
object: Object::Literal("pizza", Extra::Lang("en"))
}
);
}
#[test]
fn test_literal() {
let line = r#"<http://www.wikidata.org/entity/Q177> <http://www.wikidata.org/prop/direct/P373> "Pizzas" ."#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("http://www.wikidata.org/entity/Q177"),
predicate: "http://www.wikidata.org/prop/direct/P373",
object: Object::Literal("Pizzas", Extra::None)
}
);
}
#[test]
fn test_blank_subject() {
let line = r#"_:foo <bar> <baz>"#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::Blank("foo"),
predicate: "bar",
object: Object::IRI("baz")
}
);
}
#[test]
fn test_blank_object() {
let line = r#"<foo> <bar> _:baz"#;
assert_eq!(
parse(1, line, &RE),
Statement {
subject: Subject::IRI("foo"),
predicate: "bar",
object: Object::Blank("baz")
}
);
}
#[test]
fn test_statement_count() {
let a = format!("{}a", ENTITY_IRI_PREFIX);
let b = format!("{}b", ENTITY_IRI_PREFIX);
let first_predicate = format!("{}first", DIRECT_PROPERTY_IRI_PREFIX);
let second_predicate = "second";
let third_predicate = format!("{}third", DIRECT_PROPERTY_IRI_PREFIX);
let first = Statement {
subject: Subject::IRI(a.as_str()),
predicate: first_predicate.as_str(),
object: Object::IRI(""),
};
let second = Statement {
subject: Subject::IRI(b.as_str()),
predicate: second_predicate,
object: Object::IRI(""),
};
let third = Statement {
subject: Subject::IRI(a.as_str()),
predicate: third_predicate.as_str(),
object: Object::IRI(""),
};
let mut counter = HashMap::new();
maybe_count_statement(Some(&mut counter), "a", first);
maybe_count_statement(Some(&mut counter), "b", second);
maybe_count_statement(Some(&mut counter), "a", third);
assert_eq!(counter.len(), 1);
assert_eq!(counter.get("a"), Some(&2));
assert_eq!(counter.get("b"), None);
}
#[test]
fn test_geo_literals() {
assert!(is_acceptable(parse(
1,
r#"<foo> <bar> "Point(4.6681 50.6411)"^^<http://www.opengis.net/ont/geosparql#wktLiteral> ."#,
&RE,
)));
assert!(!is_acceptable(parse(
1,
r#"<foo> <bar> "<http://www.wikidata.org/entity/Q405> Point(-141.6 42.6)"^^<http://www.opengis.net/ont/geosparql#wktLiteral> ."#,
&RE,
)));
}
fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(BufReader::new(file).lines())
}
#[test]
fn test_full() -> Result<(), ()> |
}
| {
let dir = env!("CARGO_MANIFEST_DIR");
let mut in_path = PathBuf::from(dir);
in_path.push("test.in.rdf");
let in_path = in_path.as_os_str().to_str().unwrap();
let mut out_path = PathBuf::from(dir);
out_path.push("test.out.rdf");
let out_path = out_path.as_os_str().to_str().unwrap();
let mut lines_writer = Vec::new();
let mut labels_writer = Vec::new();
for (line, number) in read_lines(in_path).unwrap().zip(1u64..) {
let mut line = line.unwrap();
line.push('\n');
handle(
&mut lines_writer,
Some(&mut labels_writer),
None,
number,
line,
&RE,
);
}
let expected = read_to_string(out_path).unwrap();
assert_eq!(String::from_utf8(lines_writer).unwrap(), expected);
Ok(())
} | identifier_body |
lease_status.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: storage/lease_status.proto
package storage
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import cockroach_roachpb2 "github.com/cockroachdb/cockroach/pkg/roachpb"
import cockroach_util_hlc "github.com/cockroachdb/cockroach/pkg/util/hlc"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
type LeaseState int32
const (
// ERROR indicates that the lease can't be used or acquired.
LeaseState_ERROR LeaseState = 0
// VALID indicates that the lease can be used.
LeaseState_VALID LeaseState = 1
// STASIS indicates that the lease has not expired, but can't be used.
LeaseState_STASIS LeaseState = 2
// EXPIRED indicates that the lease can't be used.
LeaseState_EXPIRED LeaseState = 3
// PROSCRIBED indicates that the lease's proposed timestamp is earlier than
// allowed.
LeaseState_PROSCRIBED LeaseState = 4
)
var LeaseState_name = map[int32]string{
0: "ERROR",
1: "VALID",
2: "STASIS",
3: "EXPIRED",
4: "PROSCRIBED",
}
var LeaseState_value = map[string]int32{
"ERROR": 0,
"VALID": 1,
"STASIS": 2,
"EXPIRED": 3,
"PROSCRIBED": 4,
}
func (x LeaseState) String() string {
return proto.EnumName(LeaseState_name, int32(x))
}
func (LeaseState) EnumDescriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
// LeaseStatus holds the lease state, the timestamp at which the state
// is accurate, the lease and optionally the liveness if the lease is
// epoch-based.
type LeaseStatus struct {
// Lease which this status describes.
Lease cockroach_roachpb2.Lease `protobuf:"bytes,1,opt,name=lease" json:"lease"`
// Timestamp that the lease was evaluated at.
Timestamp cockroach_util_hlc.Timestamp `protobuf:"bytes,2,opt,name=timestamp" json:"timestamp"`
// State of the lease at timestamp.
State LeaseState `protobuf:"varint,3,opt,name=state,proto3,enum=cockroach.storage.LeaseState" json:"state,omitempty"`
// Liveness if this is an epoch-based lease.
Liveness *Liveness `protobuf:"bytes,4,opt,name=liveness" json:"liveness,omitempty"`
}
func (m *LeaseStatus) Reset() { *m = LeaseStatus{} }
func (m *LeaseStatus) String() string { return proto.CompactTextString(m) }
func (*LeaseStatus) ProtoMessage() {}
func (*LeaseStatus) Descriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
func init() {
proto.RegisterType((*LeaseStatus)(nil), "cockroach.storage.LeaseStatus")
proto.RegisterEnum("cockroach.storage.LeaseState", LeaseState_name, LeaseState_value)
}
func (m *LeaseStatus) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LeaseStatus) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i | i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Lease.Size()))
n1, err := m.Lease.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n1
dAtA[i] = 0x12
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Timestamp.Size()))
n2, err := m.Timestamp.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n2
if m.State != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.State))
}
if m.Liveness != nil {
dAtA[i] = 0x22
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Liveness.Size()))
n3, err := m.Liveness.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n3
}
return i, nil
}
func encodeVarintLeaseStatus(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *LeaseStatus) Size() (n int) {
var l int
_ = l
l = m.Lease.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
l = m.Timestamp.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
if m.State != 0 {
n += 1 + sovLeaseStatus(uint64(m.State))
}
if m.Liveness != nil {
l = m.Liveness.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
}
return n
}
func sovLeaseStatus(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozLeaseStatus(x uint64) (n int) {
return sovLeaseStatus(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *LeaseStatus) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LeaseStatus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LeaseStatus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Lease", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Lease.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Timestamp", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Timestamp.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field State", wireType)
}
m.State = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.State |= (LeaseState(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Liveness", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Liveness == nil {
m.Liveness = &Liveness{}
}
if err := m.Liveness.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipLeaseStatus(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthLeaseStatus
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipLeaseStatus(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthLeaseStatus
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipLeaseStatus(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthLeaseStatus = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowLeaseStatus = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("storage/lease_status.proto", fileDescriptorLeaseStatus) }
var fileDescriptorLeaseStatus = []byte{
// 339 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xcf, 0x4a, 0xfb, 0x40,
0x10, 0xc7, 0xb3, 0xfd, 0xfb, 0xeb, 0x14, 0x4a, 0x7e, 0x8b, 0x48, 0x88, 0x18, 0xab, 0xa7, 0xe2,
0x61, 0x03, 0x56, 0xf0, 0xdc, 0xda, 0x1c, 0x82, 0x85, 0x96, 0x4d, 0x11, 0xf1, 0x22, 0xdb, 0xb8,
0xb4, 0xc5, 0xd4, 0x2d, 0xcd, 0xd6, 0xe7, 0xf0, 0xb1, 0x7a, 0xf4, 0xe8, 0x49, 0x34, 0xbe, 0x82,
0x0f, 0x20, 0xd9, 0x6c, 0x13, 0x05, 0x6f, 0x93, 0xec, 0xe7, 0x3b, 0xf3, 0x99, 0x01, 0x3b, 0x96,
0x62, 0xcd, 0x66, 0xdc, 0x8d, 0x38, 0x8b, 0xf9, 0x5d, 0x2c, 0x99, 0xdc, 0xc4, 0x64, 0xb5, 0x16,
0x52, 0xe0, 0xff, 0xa1, 0x08, 0x1f, 0xd6, 0x82, 0x85, 0x73, 0xa2, 0x29, 0x1b, 0xab, 0xcf, 0xd5,
0xd4, 0xbd, 0x67, 0x92, 0x65, 0x98, 0xbd, 0x9f, 0xb7, 0x58, 0x3c, 0xf1, 0x47, 0x1e, 0xeb, 0xb8,
0x6d, 0x6d, 0xe4, 0x22, 0x72, 0xe7, 0x51, 0xe8, 0xca, 0xc5, 0x92, 0xc7, 0x92, 0x2d, 0x57, 0xfa,
0x65, 0x6f, 0x26, 0x66, 0x42, 0x95, 0x6e, 0x5a, 0x65, 0x7f, 0x4f, 0xbe, 0x10, 0x34, 0x87, 0xa9,
0x45, 0xa0, 0x24, 0xf0, 0x39, 0x54, 0x95, 0x94, 0x85, 0xda, 0xa8, 0xd3, 0x3c, 0xb3, 0x48, 0xa1,
0xa3, 0x2d, 0x88, 0xc2, 0xfb, 0x95, 0xed, 0xdb, 0x91, 0x41, 0x33, 0x18, 0xf7, 0xa0, 0x91, 0x8f,
0xb3, 0x4a, 0x2a, 0x79, 0xf8, 0x23, 0x99, 0x3a, 0x91, 0x79, 0x14, 0x92, 0xc9, 0x0e, 0xd2, 0xf1,
0x22, 0x85, 0xbb, 0x50, 0x4d, 0xef, 0xc0, 0xad, 0x72, 0x1b, 0x75, 0x5a, 0xbf, 0xe2, 0x7a, 0x55,
0x92, 0x7b, 0x72, 0x9a, 0xb1, 0xf8, 0x02, 0xfe, 0xed, 0xf6, 0xb7, 0x2a, 0x6a, 0xec, 0xc1, 0x5f,
0x39, 0x8d, 0xd0, 0x1c, 0x3e, 0xbd, 0x02, 0x28, 0xba, 0xe1, 0x06, 0x54, 0x3d, 0x4a, 0x47, 0xd4,
0x34, 0xd2, 0xf2, 0xba, 0x37, 0xf4, 0x07, 0x26, 0xc2, 0x00, 0xb5, 0x60, 0xd2, 0x0b, 0xfc, 0xc0,
0x2c, 0xe1, 0x26, 0xd4, 0xbd, 0x9b, 0xb1, 0x4f, 0xbd, 0x81, 0x59, 0xc6, 0x2d, 0x80, 0x31, 0x1d,
0x05, 0x97, 0xd4, 0xef, 0x7b, 0x03, 0xb3, 0xd2, 0x3f, 0xde, 0x7e, 0x38, 0xc6, 0x36, 0x71, 0xd0,
0x4b, 0xe2, 0xa0, 0xd7, 0xc4, 0x41, 0xef, 0x89, 0x83, 0x9e, 0x3f, 0x1d, 0xe3, 0xb6, 0xae, 0x15,
0xa6, 0x35, 0x75, 0xed, 0xee, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x96, 0xe1, 0xb7, 0x3c, 0xfa,
0x01, 0x00, 0x00,
} | var l int
_ = l
dAtA[i] = 0xa
i++ | random_line_split |
lease_status.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: storage/lease_status.proto
package storage
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import cockroach_roachpb2 "github.com/cockroachdb/cockroach/pkg/roachpb"
import cockroach_util_hlc "github.com/cockroachdb/cockroach/pkg/util/hlc"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
type LeaseState int32
const (
// ERROR indicates that the lease can't be used or acquired.
LeaseState_ERROR LeaseState = 0
// VALID indicates that the lease can be used.
LeaseState_VALID LeaseState = 1
// STASIS indicates that the lease has not expired, but can't be used.
LeaseState_STASIS LeaseState = 2
// EXPIRED indicates that the lease can't be used.
LeaseState_EXPIRED LeaseState = 3
// PROSCRIBED indicates that the lease's proposed timestamp is earlier than
// allowed.
LeaseState_PROSCRIBED LeaseState = 4
)
var LeaseState_name = map[int32]string{
0: "ERROR",
1: "VALID",
2: "STASIS",
3: "EXPIRED",
4: "PROSCRIBED",
}
var LeaseState_value = map[string]int32{
"ERROR": 0,
"VALID": 1,
"STASIS": 2,
"EXPIRED": 3,
"PROSCRIBED": 4,
}
func (x LeaseState) String() string {
return proto.EnumName(LeaseState_name, int32(x))
}
func (LeaseState) EnumDescriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
// LeaseStatus holds the lease state, the timestamp at which the state
// is accurate, the lease and optionally the liveness if the lease is
// epoch-based.
type LeaseStatus struct {
// Lease which this status describes.
Lease cockroach_roachpb2.Lease `protobuf:"bytes,1,opt,name=lease" json:"lease"`
// Timestamp that the lease was evaluated at.
Timestamp cockroach_util_hlc.Timestamp `protobuf:"bytes,2,opt,name=timestamp" json:"timestamp"`
// State of the lease at timestamp.
State LeaseState `protobuf:"varint,3,opt,name=state,proto3,enum=cockroach.storage.LeaseState" json:"state,omitempty"`
// Liveness if this is an epoch-based lease.
Liveness *Liveness `protobuf:"bytes,4,opt,name=liveness" json:"liveness,omitempty"`
}
func (m *LeaseStatus) Reset() { *m = LeaseStatus{} }
func (m *LeaseStatus) String() string { return proto.CompactTextString(m) }
func (*LeaseStatus) ProtoMessage() {}
func (*LeaseStatus) Descriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
func init() {
proto.RegisterType((*LeaseStatus)(nil), "cockroach.storage.LeaseStatus")
proto.RegisterEnum("cockroach.storage.LeaseState", LeaseState_name, LeaseState_value)
}
func (m *LeaseStatus) | () (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LeaseStatus) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Lease.Size()))
n1, err := m.Lease.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n1
dAtA[i] = 0x12
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Timestamp.Size()))
n2, err := m.Timestamp.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n2
if m.State != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.State))
}
if m.Liveness != nil {
dAtA[i] = 0x22
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Liveness.Size()))
n3, err := m.Liveness.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n3
}
return i, nil
}
func encodeVarintLeaseStatus(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *LeaseStatus) Size() (n int) {
var l int
_ = l
l = m.Lease.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
l = m.Timestamp.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
if m.State != 0 {
n += 1 + sovLeaseStatus(uint64(m.State))
}
if m.Liveness != nil {
l = m.Liveness.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
}
return n
}
func sovLeaseStatus(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozLeaseStatus(x uint64) (n int) {
return sovLeaseStatus(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *LeaseStatus) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LeaseStatus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LeaseStatus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Lease", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Lease.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Timestamp", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Timestamp.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field State", wireType)
}
m.State = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.State |= (LeaseState(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Liveness", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Liveness == nil {
m.Liveness = &Liveness{}
}
if err := m.Liveness.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipLeaseStatus(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthLeaseStatus
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipLeaseStatus(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthLeaseStatus
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipLeaseStatus(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthLeaseStatus = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowLeaseStatus = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("storage/lease_status.proto", fileDescriptorLeaseStatus) }
var fileDescriptorLeaseStatus = []byte{
// 339 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xcf, 0x4a, 0xfb, 0x40,
0x10, 0xc7, 0xb3, 0xfd, 0xfb, 0xeb, 0x14, 0x4a, 0x7e, 0x8b, 0x48, 0x88, 0x18, 0xab, 0xa7, 0xe2,
0x61, 0x03, 0x56, 0xf0, 0xdc, 0xda, 0x1c, 0x82, 0x85, 0x96, 0x4d, 0x11, 0xf1, 0x22, 0xdb, 0xb8,
0xb4, 0xc5, 0xd4, 0x2d, 0xcd, 0xd6, 0xe7, 0xf0, 0xb1, 0x7a, 0xf4, 0xe8, 0x49, 0x34, 0xbe, 0x82,
0x0f, 0x20, 0xd9, 0x6c, 0x13, 0x05, 0x6f, 0x93, 0xec, 0xe7, 0x3b, 0xf3, 0x99, 0x01, 0x3b, 0x96,
0x62, 0xcd, 0x66, 0xdc, 0x8d, 0x38, 0x8b, 0xf9, 0x5d, 0x2c, 0x99, 0xdc, 0xc4, 0x64, 0xb5, 0x16,
0x52, 0xe0, 0xff, 0xa1, 0x08, 0x1f, 0xd6, 0x82, 0x85, 0x73, 0xa2, 0x29, 0x1b, 0xab, 0xcf, 0xd5,
0xd4, 0xbd, 0x67, 0x92, 0x65, 0x98, 0xbd, 0x9f, 0xb7, 0x58, 0x3c, 0xf1, 0x47, 0x1e, 0xeb, 0xb8,
0x6d, 0x6d, 0xe4, 0x22, 0x72, 0xe7, 0x51, 0xe8, 0xca, 0xc5, 0x92, 0xc7, 0x92, 0x2d, 0x57, 0xfa,
0x65, 0x6f, 0x26, 0x66, 0x42, 0x95, 0x6e, 0x5a, 0x65, 0x7f, 0x4f, 0xbe, 0x10, 0x34, 0x87, 0xa9,
0x45, 0xa0, 0x24, 0xf0, 0x39, 0x54, 0x95, 0x94, 0x85, 0xda, 0xa8, 0xd3, 0x3c, 0xb3, 0x48, 0xa1,
0xa3, 0x2d, 0x88, 0xc2, 0xfb, 0x95, 0xed, 0xdb, 0x91, 0x41, 0x33, 0x18, 0xf7, 0xa0, 0x91, 0x8f,
0xb3, 0x4a, 0x2a, 0x79, 0xf8, 0x23, 0x99, 0x3a, 0x91, 0x79, 0x14, 0x92, 0xc9, 0x0e, 0xd2, 0xf1,
0x22, 0x85, 0xbb, 0x50, 0x4d, 0xef, 0xc0, 0xad, 0x72, 0x1b, 0x75, 0x5a, 0xbf, 0xe2, 0x7a, 0x55,
0x92, 0x7b, 0x72, 0x9a, 0xb1, 0xf8, 0x02, 0xfe, 0xed, 0xf6, 0xb7, 0x2a, 0x6a, 0xec, 0xc1, 0x5f,
0x39, 0x8d, 0xd0, 0x1c, 0x3e, 0xbd, 0x02, 0x28, 0xba, 0xe1, 0x06, 0x54, 0x3d, 0x4a, 0x47, 0xd4,
0x34, 0xd2, 0xf2, 0xba, 0x37, 0xf4, 0x07, 0x26, 0xc2, 0x00, 0xb5, 0x60, 0xd2, 0x0b, 0xfc, 0xc0,
0x2c, 0xe1, 0x26, 0xd4, 0xbd, 0x9b, 0xb1, 0x4f, 0xbd, 0x81, 0x59, 0xc6, 0x2d, 0x80, 0x31, 0x1d,
0x05, 0x97, 0xd4, 0xef, 0x7b, 0x03, 0xb3, 0xd2, 0x3f, 0xde, 0x7e, 0x38, 0xc6, 0x36, 0x71, 0xd0,
0x4b, 0xe2, 0xa0, 0xd7, 0xc4, 0x41, 0xef, 0x89, 0x83, 0x9e, 0x3f, 0x1d, 0xe3, 0xb6, 0xae, 0x15,
0xa6, 0x35, 0x75, 0xed, 0xee, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x96, 0xe1, 0xb7, 0x3c, 0xfa,
0x01, 0x00, 0x00,
}
| Marshal | identifier_name |
lease_status.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: storage/lease_status.proto
package storage
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import cockroach_roachpb2 "github.com/cockroachdb/cockroach/pkg/roachpb"
import cockroach_util_hlc "github.com/cockroachdb/cockroach/pkg/util/hlc"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
type LeaseState int32
const (
// ERROR indicates that the lease can't be used or acquired.
LeaseState_ERROR LeaseState = 0
// VALID indicates that the lease can be used.
LeaseState_VALID LeaseState = 1
// STASIS indicates that the lease has not expired, but can't be used.
LeaseState_STASIS LeaseState = 2
// EXPIRED indicates that the lease can't be used.
LeaseState_EXPIRED LeaseState = 3
// PROSCRIBED indicates that the lease's proposed timestamp is earlier than
// allowed.
LeaseState_PROSCRIBED LeaseState = 4
)
var LeaseState_name = map[int32]string{
0: "ERROR",
1: "VALID",
2: "STASIS",
3: "EXPIRED",
4: "PROSCRIBED",
}
var LeaseState_value = map[string]int32{
"ERROR": 0,
"VALID": 1,
"STASIS": 2,
"EXPIRED": 3,
"PROSCRIBED": 4,
}
func (x LeaseState) String() string {
return proto.EnumName(LeaseState_name, int32(x))
}
func (LeaseState) EnumDescriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
// LeaseStatus holds the lease state, the timestamp at which the state
// is accurate, the lease and optionally the liveness if the lease is
// epoch-based.
type LeaseStatus struct {
// Lease which this status describes.
Lease cockroach_roachpb2.Lease `protobuf:"bytes,1,opt,name=lease" json:"lease"`
// Timestamp that the lease was evaluated at.
Timestamp cockroach_util_hlc.Timestamp `protobuf:"bytes,2,opt,name=timestamp" json:"timestamp"`
// State of the lease at timestamp.
State LeaseState `protobuf:"varint,3,opt,name=state,proto3,enum=cockroach.storage.LeaseState" json:"state,omitempty"`
// Liveness if this is an epoch-based lease.
Liveness *Liveness `protobuf:"bytes,4,opt,name=liveness" json:"liveness,omitempty"`
}
func (m *LeaseStatus) Reset() { *m = LeaseStatus{} }
func (m *LeaseStatus) String() string { return proto.CompactTextString(m) }
func (*LeaseStatus) ProtoMessage() {}
func (*LeaseStatus) Descriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
func init() {
proto.RegisterType((*LeaseStatus)(nil), "cockroach.storage.LeaseStatus")
proto.RegisterEnum("cockroach.storage.LeaseState", LeaseState_name, LeaseState_value)
}
func (m *LeaseStatus) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LeaseStatus) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Lease.Size()))
n1, err := m.Lease.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n1
dAtA[i] = 0x12
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Timestamp.Size()))
n2, err := m.Timestamp.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n2
if m.State != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.State))
}
if m.Liveness != nil {
dAtA[i] = 0x22
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Liveness.Size()))
n3, err := m.Liveness.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n3
}
return i, nil
}
func encodeVarintLeaseStatus(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *LeaseStatus) Size() (n int) {
var l int
_ = l
l = m.Lease.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
l = m.Timestamp.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
if m.State != 0 {
n += 1 + sovLeaseStatus(uint64(m.State))
}
if m.Liveness != nil {
l = m.Liveness.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
}
return n
}
func sovLeaseStatus(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozLeaseStatus(x uint64) (n int) {
return sovLeaseStatus(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *LeaseStatus) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LeaseStatus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LeaseStatus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Lease", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Lease.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Timestamp", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Timestamp.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field State", wireType)
}
m.State = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.State |= (LeaseState(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Liveness", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Liveness == nil {
m.Liveness = &Liveness{}
}
if err := m.Liveness.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipLeaseStatus(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthLeaseStatus
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipLeaseStatus(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l |
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthLeaseStatus
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipLeaseStatus(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthLeaseStatus = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowLeaseStatus = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("storage/lease_status.proto", fileDescriptorLeaseStatus) }
var fileDescriptorLeaseStatus = []byte{
// 339 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xcf, 0x4a, 0xfb, 0x40,
0x10, 0xc7, 0xb3, 0xfd, 0xfb, 0xeb, 0x14, 0x4a, 0x7e, 0x8b, 0x48, 0x88, 0x18, 0xab, 0xa7, 0xe2,
0x61, 0x03, 0x56, 0xf0, 0xdc, 0xda, 0x1c, 0x82, 0x85, 0x96, 0x4d, 0x11, 0xf1, 0x22, 0xdb, 0xb8,
0xb4, 0xc5, 0xd4, 0x2d, 0xcd, 0xd6, 0xe7, 0xf0, 0xb1, 0x7a, 0xf4, 0xe8, 0x49, 0x34, 0xbe, 0x82,
0x0f, 0x20, 0xd9, 0x6c, 0x13, 0x05, 0x6f, 0x93, 0xec, 0xe7, 0x3b, 0xf3, 0x99, 0x01, 0x3b, 0x96,
0x62, 0xcd, 0x66, 0xdc, 0x8d, 0x38, 0x8b, 0xf9, 0x5d, 0x2c, 0x99, 0xdc, 0xc4, 0x64, 0xb5, 0x16,
0x52, 0xe0, 0xff, 0xa1, 0x08, 0x1f, 0xd6, 0x82, 0x85, 0x73, 0xa2, 0x29, 0x1b, 0xab, 0xcf, 0xd5,
0xd4, 0xbd, 0x67, 0x92, 0x65, 0x98, 0xbd, 0x9f, 0xb7, 0x58, 0x3c, 0xf1, 0x47, 0x1e, 0xeb, 0xb8,
0x6d, 0x6d, 0xe4, 0x22, 0x72, 0xe7, 0x51, 0xe8, 0xca, 0xc5, 0x92, 0xc7, 0x92, 0x2d, 0x57, 0xfa,
0x65, 0x6f, 0x26, 0x66, 0x42, 0x95, 0x6e, 0x5a, 0x65, 0x7f, 0x4f, 0xbe, 0x10, 0x34, 0x87, 0xa9,
0x45, 0xa0, 0x24, 0xf0, 0x39, 0x54, 0x95, 0x94, 0x85, 0xda, 0xa8, 0xd3, 0x3c, 0xb3, 0x48, 0xa1,
0xa3, 0x2d, 0x88, 0xc2, 0xfb, 0x95, 0xed, 0xdb, 0x91, 0x41, 0x33, 0x18, 0xf7, 0xa0, 0x91, 0x8f,
0xb3, 0x4a, 0x2a, 0x79, 0xf8, 0x23, 0x99, 0x3a, 0x91, 0x79, 0x14, 0x92, 0xc9, 0x0e, 0xd2, 0xf1,
0x22, 0x85, 0xbb, 0x50, 0x4d, 0xef, 0xc0, 0xad, 0x72, 0x1b, 0x75, 0x5a, 0xbf, 0xe2, 0x7a, 0x55,
0x92, 0x7b, 0x72, 0x9a, 0xb1, 0xf8, 0x02, 0xfe, 0xed, 0xf6, 0xb7, 0x2a, 0x6a, 0xec, 0xc1, 0x5f,
0x39, 0x8d, 0xd0, 0x1c, 0x3e, 0xbd, 0x02, 0x28, 0xba, 0xe1, 0x06, 0x54, 0x3d, 0x4a, 0x47, 0xd4,
0x34, 0xd2, 0xf2, 0xba, 0x37, 0xf4, 0x07, 0x26, 0xc2, 0x00, 0xb5, 0x60, 0xd2, 0x0b, 0xfc, 0xc0,
0x2c, 0xe1, 0x26, 0xd4, 0xbd, 0x9b, 0xb1, 0x4f, 0xbd, 0x81, 0x59, 0xc6, 0x2d, 0x80, 0x31, 0x1d,
0x05, 0x97, 0xd4, 0xef, 0x7b, 0x03, 0xb3, 0xd2, 0x3f, 0xde, 0x7e, 0x38, 0xc6, 0x36, 0x71, 0xd0,
0x4b, 0xe2, 0xa0, 0xd7, 0xc4, 0x41, 0xef, 0x89, 0x83, 0x9e, 0x3f, 0x1d, 0xe3, 0xb6, 0xae, 0x15,
0xa6, 0x35, 0x75, 0xed, 0xee, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x96, 0xe1, 0xb7, 0x3c, 0xfa,
0x01, 0x00, 0x00,
}
| {
return 0, io.ErrUnexpectedEOF
} | conditional_block |
lease_status.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: storage/lease_status.proto
package storage
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import cockroach_roachpb2 "github.com/cockroachdb/cockroach/pkg/roachpb"
import cockroach_util_hlc "github.com/cockroachdb/cockroach/pkg/util/hlc"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
type LeaseState int32
const (
// ERROR indicates that the lease can't be used or acquired.
LeaseState_ERROR LeaseState = 0
// VALID indicates that the lease can be used.
LeaseState_VALID LeaseState = 1
// STASIS indicates that the lease has not expired, but can't be used.
LeaseState_STASIS LeaseState = 2
// EXPIRED indicates that the lease can't be used.
LeaseState_EXPIRED LeaseState = 3
// PROSCRIBED indicates that the lease's proposed timestamp is earlier than
// allowed.
LeaseState_PROSCRIBED LeaseState = 4
)
var LeaseState_name = map[int32]string{
0: "ERROR",
1: "VALID",
2: "STASIS",
3: "EXPIRED",
4: "PROSCRIBED",
}
var LeaseState_value = map[string]int32{
"ERROR": 0,
"VALID": 1,
"STASIS": 2,
"EXPIRED": 3,
"PROSCRIBED": 4,
}
func (x LeaseState) String() string {
return proto.EnumName(LeaseState_name, int32(x))
}
func (LeaseState) EnumDescriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
// LeaseStatus holds the lease state, the timestamp at which the state
// is accurate, the lease and optionally the liveness if the lease is
// epoch-based.
type LeaseStatus struct {
// Lease which this status describes.
Lease cockroach_roachpb2.Lease `protobuf:"bytes,1,opt,name=lease" json:"lease"`
// Timestamp that the lease was evaluated at.
Timestamp cockroach_util_hlc.Timestamp `protobuf:"bytes,2,opt,name=timestamp" json:"timestamp"`
// State of the lease at timestamp.
State LeaseState `protobuf:"varint,3,opt,name=state,proto3,enum=cockroach.storage.LeaseState" json:"state,omitempty"`
// Liveness if this is an epoch-based lease.
Liveness *Liveness `protobuf:"bytes,4,opt,name=liveness" json:"liveness,omitempty"`
}
func (m *LeaseStatus) Reset() { *m = LeaseStatus{} }
func (m *LeaseStatus) String() string { return proto.CompactTextString(m) }
func (*LeaseStatus) ProtoMessage() {}
func (*LeaseStatus) Descriptor() ([]byte, []int) { return fileDescriptorLeaseStatus, []int{0} }
func init() {
proto.RegisterType((*LeaseStatus)(nil), "cockroach.storage.LeaseStatus")
proto.RegisterEnum("cockroach.storage.LeaseState", LeaseState_name, LeaseState_value)
}
func (m *LeaseStatus) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *LeaseStatus) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Lease.Size()))
n1, err := m.Lease.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n1
dAtA[i] = 0x12
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Timestamp.Size()))
n2, err := m.Timestamp.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n2
if m.State != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.State))
}
if m.Liveness != nil {
dAtA[i] = 0x22
i++
i = encodeVarintLeaseStatus(dAtA, i, uint64(m.Liveness.Size()))
n3, err := m.Liveness.MarshalTo(dAtA[i:])
if err != nil {
return 0, err
}
i += n3
}
return i, nil
}
func encodeVarintLeaseStatus(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *LeaseStatus) Size() (n int) {
var l int
_ = l
l = m.Lease.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
l = m.Timestamp.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
if m.State != 0 {
n += 1 + sovLeaseStatus(uint64(m.State))
}
if m.Liveness != nil {
l = m.Liveness.Size()
n += 1 + l + sovLeaseStatus(uint64(l))
}
return n
}
func sovLeaseStatus(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozLeaseStatus(x uint64) (n int) {
return sovLeaseStatus(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *LeaseStatus) Unmarshal(dAtA []byte) error |
func skipLeaseStatus(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthLeaseStatus
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipLeaseStatus(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthLeaseStatus = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowLeaseStatus = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("storage/lease_status.proto", fileDescriptorLeaseStatus) }
var fileDescriptorLeaseStatus = []byte{
// 339 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xcf, 0x4a, 0xfb, 0x40,
0x10, 0xc7, 0xb3, 0xfd, 0xfb, 0xeb, 0x14, 0x4a, 0x7e, 0x8b, 0x48, 0x88, 0x18, 0xab, 0xa7, 0xe2,
0x61, 0x03, 0x56, 0xf0, 0xdc, 0xda, 0x1c, 0x82, 0x85, 0x96, 0x4d, 0x11, 0xf1, 0x22, 0xdb, 0xb8,
0xb4, 0xc5, 0xd4, 0x2d, 0xcd, 0xd6, 0xe7, 0xf0, 0xb1, 0x7a, 0xf4, 0xe8, 0x49, 0x34, 0xbe, 0x82,
0x0f, 0x20, 0xd9, 0x6c, 0x13, 0x05, 0x6f, 0x93, 0xec, 0xe7, 0x3b, 0xf3, 0x99, 0x01, 0x3b, 0x96,
0x62, 0xcd, 0x66, 0xdc, 0x8d, 0x38, 0x8b, 0xf9, 0x5d, 0x2c, 0x99, 0xdc, 0xc4, 0x64, 0xb5, 0x16,
0x52, 0xe0, 0xff, 0xa1, 0x08, 0x1f, 0xd6, 0x82, 0x85, 0x73, 0xa2, 0x29, 0x1b, 0xab, 0xcf, 0xd5,
0xd4, 0xbd, 0x67, 0x92, 0x65, 0x98, 0xbd, 0x9f, 0xb7, 0x58, 0x3c, 0xf1, 0x47, 0x1e, 0xeb, 0xb8,
0x6d, 0x6d, 0xe4, 0x22, 0x72, 0xe7, 0x51, 0xe8, 0xca, 0xc5, 0x92, 0xc7, 0x92, 0x2d, 0x57, 0xfa,
0x65, 0x6f, 0x26, 0x66, 0x42, 0x95, 0x6e, 0x5a, 0x65, 0x7f, 0x4f, 0xbe, 0x10, 0x34, 0x87, 0xa9,
0x45, 0xa0, 0x24, 0xf0, 0x39, 0x54, 0x95, 0x94, 0x85, 0xda, 0xa8, 0xd3, 0x3c, 0xb3, 0x48, 0xa1,
0xa3, 0x2d, 0x88, 0xc2, 0xfb, 0x95, 0xed, 0xdb, 0x91, 0x41, 0x33, 0x18, 0xf7, 0xa0, 0x91, 0x8f,
0xb3, 0x4a, 0x2a, 0x79, 0xf8, 0x23, 0x99, 0x3a, 0x91, 0x79, 0x14, 0x92, 0xc9, 0x0e, 0xd2, 0xf1,
0x22, 0x85, 0xbb, 0x50, 0x4d, 0xef, 0xc0, 0xad, 0x72, 0x1b, 0x75, 0x5a, 0xbf, 0xe2, 0x7a, 0x55,
0x92, 0x7b, 0x72, 0x9a, 0xb1, 0xf8, 0x02, 0xfe, 0xed, 0xf6, 0xb7, 0x2a, 0x6a, 0xec, 0xc1, 0x5f,
0x39, 0x8d, 0xd0, 0x1c, 0x3e, 0xbd, 0x02, 0x28, 0xba, 0xe1, 0x06, 0x54, 0x3d, 0x4a, 0x47, 0xd4,
0x34, 0xd2, 0xf2, 0xba, 0x37, 0xf4, 0x07, 0x26, 0xc2, 0x00, 0xb5, 0x60, 0xd2, 0x0b, 0xfc, 0xc0,
0x2c, 0xe1, 0x26, 0xd4, 0xbd, 0x9b, 0xb1, 0x4f, 0xbd, 0x81, 0x59, 0xc6, 0x2d, 0x80, 0x31, 0x1d,
0x05, 0x97, 0xd4, 0xef, 0x7b, 0x03, 0xb3, 0xd2, 0x3f, 0xde, 0x7e, 0x38, 0xc6, 0x36, 0x71, 0xd0,
0x4b, 0xe2, 0xa0, 0xd7, 0xc4, 0x41, 0xef, 0x89, 0x83, 0x9e, 0x3f, 0x1d, 0xe3, 0xb6, 0xae, 0x15,
0xa6, 0x35, 0x75, 0xed, 0xee, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x96, 0xe1, 0xb7, 0x3c, 0xfa,
0x01, 0x00, 0x00,
}
| {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: LeaseStatus: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: LeaseStatus: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Lease", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Lease.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Timestamp", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Timestamp.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field State", wireType)
}
m.State = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.State |= (LeaseState(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Liveness", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLeaseStatus
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLeaseStatus
}
postIndex := iNdEx + msglen
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Liveness == nil {
m.Liveness = &Liveness{}
}
if err := m.Liveness.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipLeaseStatus(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthLeaseStatus
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
} | identifier_body |
writer.rs | //! Defines the [SymCache Converter](`SymCacheConverter`).
use std::collections::btree_map;
use std::collections::BTreeMap;
use std::io::Write;
use indexmap::IndexSet;
use symbolic_common::{Arch, DebugId};
use symbolic_debuginfo::{DebugSession, FileFormat, Function, ObjectLike, Symbol};
use watto::{Pod, StringTable, Writer};
use super::{raw, transform};
use crate::{Error, ErrorKind};
/// The SymCache Converter.
///
/// This can convert data in various source formats to an intermediate representation, which can
/// then be serialized to disk via its [`serialize`](SymCacheConverter::serialize) method.
#[derive(Debug, Default)]
pub struct SymCacheConverter<'a> {
/// Debug identifier of the object file.
debug_id: DebugId,
/// CPU architecture of the object file.
arch: Arch,
/// A flag that indicates that we are currently processing a Windows object, which
/// will inform us if we should undecorate function names.
is_windows_object: bool,
/// A list of transformers that are used to transform each function / source location.
transformers: transform::Transformers<'a>,
string_table: StringTable,
/// The set of all [`raw::File`]s that have been added to this `Converter`.
files: IndexSet<raw::File>,
/// The set of all [`raw::Function`]s that have been added to this `Converter`.
functions: IndexSet<raw::Function>,
/// The set of [`raw::SourceLocation`]s used in this `Converter` that are only used as
/// "call locations", i.e. which are only referred to from `inlined_into_idx`.
call_locations: IndexSet<raw::SourceLocation>,
/// A map from code ranges to the [`raw::SourceLocation`]s they correspond to.
///
/// Only the starting address of a range is saved, the end address is given implicitly
/// by the start address of the next range.
ranges: BTreeMap<u32, raw::SourceLocation>,
/// This is highest addr that we know is outside of a valid function.
/// Functions have an explicit end, while Symbols implicitly extend to infinity.
/// In case the highest addr belongs to a Symbol, this will be `None` and the SymCache
/// also extends to infinite, otherwise this is the end of the highest function.
last_addr: Option<u32>,
}
impl<'a> SymCacheConverter<'a> {
/// Creates a new Converter.
pub fn new() -> Self {
Self::default()
}
/// Adds a new [`transform::Transformer`] to this [`SymCacheConverter`].
///
/// Every [`transform::Function`] and [`transform::SourceLocation`] will be passed through
/// this transformer before it is written to the SymCache.
pub fn add_transformer<T>(&mut self, t: T)
where
T: transform::Transformer + 'a,
{
self.transformers.0.push(Box::new(t));
}
/// Sets the CPU architecture of this SymCache.
pub fn | (&mut self, arch: Arch) {
self.arch = arch;
}
/// Sets the debug identifier of this SymCache.
pub fn set_debug_id(&mut self, debug_id: DebugId) {
self.debug_id = debug_id;
}
// Methods processing symbolic-debuginfo [`ObjectLike`] below:
// Feel free to move these to a separate file.
/// This processes the given [`ObjectLike`] object, collecting all its functions and line
/// information into the converter.
#[tracing::instrument(skip_all, fields(object.debug_id = %object.debug_id().breakpad()))]
pub fn process_object<'d, 'o, O>(&mut self, object: &'o O) -> Result<(), Error>
where
O: ObjectLike<'d, 'o>,
O::Error: std::error::Error + Send + Sync + 'static,
{
let session = object
.debug_session()
.map_err(|e| Error::new(ErrorKind::BadDebugFile, e))?;
self.set_arch(object.arch());
self.set_debug_id(object.debug_id());
self.is_windows_object = matches!(object.file_format(), FileFormat::Pe | FileFormat::Pdb);
for function in session.functions() {
let function = function.map_err(|e| Error::new(ErrorKind::BadDebugFile, e))?;
self.process_symbolic_function(&function);
}
for symbol in object.symbols() {
self.process_symbolic_symbol(&symbol);
}
self.is_windows_object = false;
Ok(())
}
/// Processes an individual [`Function`], adding its line information to the converter.
pub fn process_symbolic_function(&mut self, function: &Function<'_>) {
self.process_symbolic_function_recursive(function, &[(0x0, u32::MAX)]);
}
/// Processes an individual [`Function`], adding its line information to the converter.
///
/// `call_locations` is a non-empty sorted list of `(address, call_location index)` pairs.
fn process_symbolic_function_recursive(
&mut self,
function: &Function<'_>,
call_locations: &[(u32, u32)],
) {
let string_table = &mut self.string_table;
// skip over empty functions or functions whose address is too large to fit in a u32
if function.size == 0 || function.address > u32::MAX as u64 {
return;
}
let comp_dir = std::str::from_utf8(function.compilation_dir).ok();
let entry_pc = if function.inline {
u32::MAX
} else {
function.address as u32
};
let function_idx = {
let language = function.name.language();
let mut function = transform::Function {
name: function.name.as_str().into(),
comp_dir: comp_dir.map(Into::into),
};
for transformer in &mut self.transformers.0 {
function = transformer.transform_function(function);
}
let function_name = if self.is_windows_object {
undecorate_win_symbol(&function.name)
} else {
&function.name
};
let name_offset = string_table.insert(function_name) as u32;
let lang = language as u32;
let (fun_idx, _) = self.functions.insert_full(raw::Function {
name_offset,
_comp_dir_offset: u32::MAX,
entry_pc,
lang,
});
fun_idx as u32
};
// We can divide the instructions in a function into two buckets:
// (1) Instructions which are part of an inlined function call, and
// (2) instructions which are *not* part of an inlined function call.
//
// Our incoming line records cover both (1) and (2) types of instructions.
//
// Let's call the address ranges of these instructions (1) inlinee ranges and (2) self ranges.
//
// We use the following strategy: For each function, only insert that function's "self ranges"
// into `self.ranges`. Then recurse into the function's inlinees. Those will insert their
// own "self ranges". Once the entire tree has been traversed, `self.ranges` will contain
// entries from all levels.
//
// In order to compute this function's "self ranges", we first gather and sort its
// "inlinee ranges". Later, when we iterate over this function's lines, we will compute the
// "self ranges" from the gaps between the "inlinee ranges".
let mut inlinee_ranges = Vec::new();
for inlinee in &function.inlinees {
for line in &inlinee.lines {
let start = line.address as u32;
let end = (line.address + line.size.unwrap_or(1)) as u32;
inlinee_ranges.push(start..end);
}
}
inlinee_ranges.sort_unstable_by_key(|range| range.start);
// Walk three iterators. All of these are already sorted by address.
let mut line_iter = function.lines.iter();
let mut call_location_iter = call_locations.iter();
let mut inline_iter = inlinee_ranges.into_iter();
// call_locations is non-empty, so the first element always exists.
let mut current_call_location = call_location_iter.next().unwrap();
let mut next_call_location = call_location_iter.next();
let mut next_line = line_iter.next();
let mut next_inline = inline_iter.next();
// This will be the list we pass to our inlinees as the call_locations argument.
// This list is ordered by address by construction.
let mut callee_call_locations = Vec::new();
// Iterate over the line records.
while let Some(line) = next_line.take() {
let line_range_start = line.address as u32;
let line_range_end = (line.address + line.size.unwrap_or(1)) as u32;
// Find the call location for this line.
while next_call_location.is_some() && next_call_location.unwrap().0 <= line_range_start
{
current_call_location = next_call_location.unwrap();
next_call_location = call_location_iter.next();
}
let inlined_into_idx = current_call_location.1;
let mut location = transform::SourceLocation {
file: transform::File {
name: line.file.name_str(),
directory: Some(line.file.dir_str()),
comp_dir: comp_dir.map(Into::into),
},
line: line.line as u32,
};
for transformer in &mut self.transformers.0 {
location = transformer.transform_source_location(location);
}
let name_offset = string_table.insert(&location.file.name) as u32;
let directory_offset = location
.file
.directory
.map_or(u32::MAX, |d| string_table.insert(&d) as u32);
let comp_dir_offset = location
.file
.comp_dir
.map_or(u32::MAX, |cd| string_table.insert(&cd) as u32);
let (file_idx, _) = self.files.insert_full(raw::File {
name_offset,
directory_offset,
comp_dir_offset,
});
let source_location = raw::SourceLocation {
file_idx: file_idx as u32,
line: location.line,
function_idx,
inlined_into_idx,
};
// The current line can be a "self line", or a "call line", or even a mixture.
//
// Examples:
//
// a) Just self line:
// Line: |==============|
// Inlinee ranges: (none)
//
// Effect: insert_range
//
// b) Just call line:
// Line: |==============|
// Inlinee ranges: |--------------|
//
// Effect: make_call_location
//
// c) Just call line, for multiple inlined calls:
// Line: |==========================|
// Inlinee ranges: |----------||--------------|
//
// Effect: make_call_location, make_call_location
//
// d) Call line and trailing self line:
// Line: |==================|
// Inlinee ranges: |-----------|
//
// Effect: make_call_location, insert_range
//
// e) Leading self line and also call line:
// Line: |==================|
// Inlinee ranges: |-----------|
//
// Effect: insert_range, make_call_location
//
// f) Interleaving
// Line: |======================================|
// Inlinee ranges: |-----------| |-------|
//
// Effect: insert_range, make_call_location, insert_range, make_call_location, insert_range
//
// g) Bad debug info
// Line: |=======|
// Inlinee ranges: |-------------|
//
// Effect: make_call_location
let mut current_address = line_range_start;
while current_address < line_range_end {
// Emit our source location at current_address if current_address is not covered by an inlinee.
if next_inline.is_none() || next_inline.as_ref().unwrap().start > current_address {
// "insert_range"
self.ranges.insert(current_address, source_location.clone());
}
// If there is an inlinee range covered by this line record, turn this line into that
// call's "call line". Make a `call_location_idx` for it and store it in `callee_call_locations`.
if next_inline.is_some() && next_inline.as_ref().unwrap().start < line_range_end {
let inline_range = next_inline.take().unwrap();
// "make_call_location"
let (call_location_idx, _) =
self.call_locations.insert_full(source_location.clone());
callee_call_locations.push((inline_range.start, call_location_idx as u32));
// Advance current_address to the end of this inlinee range.
current_address = inline_range.end;
next_inline = inline_iter.next();
} else {
// No further inlinee ranges are overlapping with this line record. Advance to the
// end of the line record.
current_address = line_range_end;
}
}
// Advance the line iterator.
next_line = line_iter.next();
// Skip any lines that start before current_address.
// Such lines can exist if the debug information is faulty, or if the compiler created
// multiple identical small "call line" records instead of one combined record
// covering the entire inlinee range. We can't have different "call lines" for a single
// inlinee range anyway, so it's fine to skip these.
while next_line.is_some()
&& (next_line.as_ref().unwrap().address as u32) < current_address
{
next_line = line_iter.next();
}
}
if !function.inline {
// add the bare minimum of information for the function if there isn't any.
self.ranges.entry(entry_pc).or_insert(raw::SourceLocation {
file_idx: u32::MAX,
line: 0,
function_idx,
inlined_into_idx: u32::MAX,
});
}
// We've processed all address ranges which are *not* covered by inlinees.
// Now it's time to recurse.
// Process our inlinees.
if !callee_call_locations.is_empty() {
for inlinee in &function.inlinees {
self.process_symbolic_function_recursive(inlinee, &callee_call_locations);
}
}
let function_end = function.end_address() as u32;
let last_addr = self.last_addr.get_or_insert(0);
if function_end > *last_addr {
*last_addr = function_end;
}
}
/// Processes an individual [`Symbol`].
pub fn process_symbolic_symbol(&mut self, symbol: &Symbol<'_>) {
let name_idx = {
let mut function = transform::Function {
name: match symbol.name {
Some(ref name) => name.clone(),
None => return,
},
comp_dir: None,
};
for transformer in &mut self.transformers.0 {
function = transformer.transform_function(function);
}
let function_name = if self.is_windows_object {
undecorate_win_symbol(&function.name)
} else {
&function.name
};
self.string_table.insert(function_name) as u32
};
match self.ranges.entry(symbol.address as u32) {
btree_map::Entry::Vacant(entry) => {
let function = raw::Function {
name_offset: name_idx,
_comp_dir_offset: u32::MAX,
entry_pc: symbol.address as u32,
lang: u32::MAX,
};
let function_idx = self.functions.insert_full(function).0 as u32;
entry.insert(raw::SourceLocation {
file_idx: u32::MAX,
line: 0,
function_idx,
inlined_into_idx: u32::MAX,
});
}
btree_map::Entry::Occupied(entry) => {
// ASSUMPTION:
// the `functions` iterator has already filled in this addr via debug session.
// we could trace the caller hierarchy up to the root, and assert that it is
// indeed the same function, and maybe update its `entry_pc`, but we don’t do
// that for now.
let _function_idx = entry.get().function_idx as usize;
}
}
let last_addr = self.last_addr.get_or_insert(0);
if symbol.address as u32 >= *last_addr {
self.last_addr = None;
}
}
// Methods for serializing to a [`Write`] below:
// Feel free to move these to a separate file.
/// Serialize the converted data.
///
/// This writes the SymCache binary format into the given [`Write`].
pub fn serialize<W: Write>(mut self, writer: &mut W) -> std::io::Result<()> {
let mut writer = Writer::new(writer);
// Insert a trailing sentinel source location in case we have a definite end addr
if let Some(last_addr) = self.last_addr {
// TODO: to be extra safe, we might check that `last_addr` is indeed larger than
// the largest range at some point.
match self.ranges.entry(last_addr) {
btree_map::Entry::Vacant(entry) => {
entry.insert(raw::NO_SOURCE_LOCATION);
}
btree_map::Entry::Occupied(_entry) => {
// BUG:
// the last addr should not map to an already defined range
}
}
}
let num_files = self.files.len() as u32;
let num_functions = self.functions.len() as u32;
let num_source_locations = (self.call_locations.len() + self.ranges.len()) as u32;
let num_ranges = self.ranges.len() as u32;
let string_bytes = self.string_table.into_bytes();
let header = raw::Header {
magic: raw::SYMCACHE_MAGIC,
version: crate::SYMCACHE_VERSION,
debug_id: self.debug_id,
arch: self.arch,
num_files,
num_functions,
num_source_locations,
num_ranges,
string_bytes: string_bytes.len() as u32,
_reserved: [0; 16],
};
writer.write_all(header.as_bytes())?;
writer.align_to(8)?;
for f in self.files {
writer.write_all(f.as_bytes())?;
}
writer.align_to(8)?;
for f in self.functions {
writer.write_all(f.as_bytes())?;
}
writer.align_to(8)?;
for s in self.call_locations {
writer.write_all(s.as_bytes())?;
}
for s in self.ranges.values() {
writer.write_all(s.as_bytes())?;
}
writer.align_to(8)?;
for r in self.ranges.keys() {
writer.write_all(r.as_bytes())?;
}
writer.align_to(8)?;
writer.write_all(&string_bytes)?;
Ok(())
}
}
/// Undecorates a Windows C-decorated symbol name.
///
/// The decoration rules are explained here:
/// <https://docs.microsoft.com/en-us/cpp/build/reference/decorated-names?view=vs-2019>
///
/// - __cdecl Leading underscore (_)
/// - __stdcall Leading underscore (_) and a trailing at sign (@) followed by the number of bytes in the parameter list in decimal
/// - __fastcall Leading and trailing at signs (@) followed by a decimal number representing the number of bytes in the parameter list
/// - __vectorcall Two trailing at signs (@@) followed by a decimal number of bytes in the parameter list
/// > In a 64-bit environment, C or extern "C" functions are only decorated when using the __vectorcall calling convention."
///
/// This code is adapted from `dump_syms`:
/// See <https://github.com/mozilla/dump_syms/blob/325cf2c61b2cacc55a7f1af74081b57237c7f9de/src/symbol.rs#L169-L216>
fn undecorate_win_symbol(name: &str) -> &str {
if name.starts_with('?') || name.contains([':', '(', '<']) {
return name;
}
// Parse __vectorcall.
if let Some((name, param_size)) = name.rsplit_once("@@") {
if param_size.parse::<u32>().is_ok() {
return name;
}
}
// Parse the other three.
if !name.is_empty() {
if let ("@" | "_", rest) = name.split_at(1) {
if let Some((name, param_size)) = rest.rsplit_once('@') {
if param_size.parse::<u32>().is_ok() {
// __stdcall or __fastcall
return name;
}
}
if let Some(name) = name.strip_prefix('_') {
// __cdecl
return name;
}
}
}
name
}
| set_arch | identifier_name |
writer.rs | //! Defines the [SymCache Converter](`SymCacheConverter`).
use std::collections::btree_map;
use std::collections::BTreeMap;
use std::io::Write;
use indexmap::IndexSet;
use symbolic_common::{Arch, DebugId};
use symbolic_debuginfo::{DebugSession, FileFormat, Function, ObjectLike, Symbol};
use watto::{Pod, StringTable, Writer};
use super::{raw, transform};
use crate::{Error, ErrorKind};
/// The SymCache Converter.
///
/// This can convert data in various source formats to an intermediate representation, which can
/// then be serialized to disk via its [`serialize`](SymCacheConverter::serialize) method.
#[derive(Debug, Default)]
pub struct SymCacheConverter<'a> {
/// Debug identifier of the object file.
debug_id: DebugId,
/// CPU architecture of the object file.
arch: Arch,
/// A flag that indicates that we are currently processing a Windows object, which
/// will inform us if we should undecorate function names.
is_windows_object: bool,
/// A list of transformers that are used to transform each function / source location.
transformers: transform::Transformers<'a>,
string_table: StringTable,
/// The set of all [`raw::File`]s that have been added to this `Converter`.
files: IndexSet<raw::File>,
/// The set of all [`raw::Function`]s that have been added to this `Converter`.
functions: IndexSet<raw::Function>,
/// The set of [`raw::SourceLocation`]s used in this `Converter` that are only used as
/// "call locations", i.e. which are only referred to from `inlined_into_idx`.
call_locations: IndexSet<raw::SourceLocation>,
/// A map from code ranges to the [`raw::SourceLocation`]s they correspond to.
///
/// Only the starting address of a range is saved, the end address is given implicitly
/// by the start address of the next range.
ranges: BTreeMap<u32, raw::SourceLocation>,
/// This is highest addr that we know is outside of a valid function.
/// Functions have an explicit end, while Symbols implicitly extend to infinity.
/// In case the highest addr belongs to a Symbol, this will be `None` and the SymCache
/// also extends to infinite, otherwise this is the end of the highest function.
last_addr: Option<u32>,
}
impl<'a> SymCacheConverter<'a> {
/// Creates a new Converter.
pub fn new() -> Self {
Self::default()
}
/// Adds a new [`transform::Transformer`] to this [`SymCacheConverter`].
///
/// Every [`transform::Function`] and [`transform::SourceLocation`] will be passed through
/// this transformer before it is written to the SymCache.
pub fn add_transformer<T>(&mut self, t: T)
where
T: transform::Transformer + 'a,
{
self.transformers.0.push(Box::new(t));
}
/// Sets the CPU architecture of this SymCache.
pub fn set_arch(&mut self, arch: Arch) {
self.arch = arch;
}
/// Sets the debug identifier of this SymCache.
pub fn set_debug_id(&mut self, debug_id: DebugId) {
self.debug_id = debug_id;
}
// Methods processing symbolic-debuginfo [`ObjectLike`] below:
// Feel free to move these to a separate file.
/// This processes the given [`ObjectLike`] object, collecting all its functions and line
/// information into the converter.
#[tracing::instrument(skip_all, fields(object.debug_id = %object.debug_id().breakpad()))]
pub fn process_object<'d, 'o, O>(&mut self, object: &'o O) -> Result<(), Error>
where
O: ObjectLike<'d, 'o>,
O::Error: std::error::Error + Send + Sync + 'static,
{
let session = object
.debug_session()
.map_err(|e| Error::new(ErrorKind::BadDebugFile, e))?;
self.set_arch(object.arch());
self.set_debug_id(object.debug_id());
self.is_windows_object = matches!(object.file_format(), FileFormat::Pe | FileFormat::Pdb);
for function in session.functions() {
let function = function.map_err(|e| Error::new(ErrorKind::BadDebugFile, e))?;
self.process_symbolic_function(&function);
}
for symbol in object.symbols() {
self.process_symbolic_symbol(&symbol);
}
self.is_windows_object = false;
Ok(())
}
/// Processes an individual [`Function`], adding its line information to the converter.
pub fn process_symbolic_function(&mut self, function: &Function<'_>) {
self.process_symbolic_function_recursive(function, &[(0x0, u32::MAX)]);
}
/// Processes an individual [`Function`], adding its line information to the converter.
///
/// `call_locations` is a non-empty sorted list of `(address, call_location index)` pairs.
fn process_symbolic_function_recursive(
&mut self,
function: &Function<'_>,
call_locations: &[(u32, u32)],
) {
let string_table = &mut self.string_table;
// skip over empty functions or functions whose address is too large to fit in a u32
if function.size == 0 || function.address > u32::MAX as u64 {
return;
}
let comp_dir = std::str::from_utf8(function.compilation_dir).ok();
let entry_pc = if function.inline {
u32::MAX
} else {
function.address as u32
};
let function_idx = {
let language = function.name.language();
let mut function = transform::Function {
name: function.name.as_str().into(),
comp_dir: comp_dir.map(Into::into),
};
for transformer in &mut self.transformers.0 {
function = transformer.transform_function(function);
}
let function_name = if self.is_windows_object {
undecorate_win_symbol(&function.name)
} else {
&function.name
};
let name_offset = string_table.insert(function_name) as u32;
let lang = language as u32;
let (fun_idx, _) = self.functions.insert_full(raw::Function {
name_offset,
_comp_dir_offset: u32::MAX,
entry_pc,
lang,
});
fun_idx as u32
};
// We can divide the instructions in a function into two buckets:
// (1) Instructions which are part of an inlined function call, and
// (2) instructions which are *not* part of an inlined function call.
//
// Our incoming line records cover both (1) and (2) types of instructions.
//
// Let's call the address ranges of these instructions (1) inlinee ranges and (2) self ranges.
//
// We use the following strategy: For each function, only insert that function's "self ranges"
// into `self.ranges`. Then recurse into the function's inlinees. Those will insert their
// own "self ranges". Once the entire tree has been traversed, `self.ranges` will contain
// entries from all levels.
//
// In order to compute this function's "self ranges", we first gather and sort its
// "inlinee ranges". Later, when we iterate over this function's lines, we will compute the
// "self ranges" from the gaps between the "inlinee ranges".
let mut inlinee_ranges = Vec::new();
for inlinee in &function.inlinees {
for line in &inlinee.lines {
let start = line.address as u32;
let end = (line.address + line.size.unwrap_or(1)) as u32;
inlinee_ranges.push(start..end);
}
}
inlinee_ranges.sort_unstable_by_key(|range| range.start);
// Walk three iterators. All of these are already sorted by address.
let mut line_iter = function.lines.iter();
let mut call_location_iter = call_locations.iter();
let mut inline_iter = inlinee_ranges.into_iter();
// call_locations is non-empty, so the first element always exists.
let mut current_call_location = call_location_iter.next().unwrap();
let mut next_call_location = call_location_iter.next();
let mut next_line = line_iter.next();
let mut next_inline = inline_iter.next();
// This will be the list we pass to our inlinees as the call_locations argument.
// This list is ordered by address by construction.
let mut callee_call_locations = Vec::new();
// Iterate over the line records.
while let Some(line) = next_line.take() {
let line_range_start = line.address as u32;
let line_range_end = (line.address + line.size.unwrap_or(1)) as u32;
// Find the call location for this line.
while next_call_location.is_some() && next_call_location.unwrap().0 <= line_range_start
{
current_call_location = next_call_location.unwrap();
next_call_location = call_location_iter.next();
}
let inlined_into_idx = current_call_location.1;
let mut location = transform::SourceLocation {
file: transform::File {
name: line.file.name_str(),
directory: Some(line.file.dir_str()),
comp_dir: comp_dir.map(Into::into),
},
line: line.line as u32,
};
for transformer in &mut self.transformers.0 {
location = transformer.transform_source_location(location);
}
let name_offset = string_table.insert(&location.file.name) as u32;
let directory_offset = location
.file
.directory
.map_or(u32::MAX, |d| string_table.insert(&d) as u32);
let comp_dir_offset = location
.file
.comp_dir
.map_or(u32::MAX, |cd| string_table.insert(&cd) as u32);
let (file_idx, _) = self.files.insert_full(raw::File {
name_offset,
directory_offset,
comp_dir_offset,
});
let source_location = raw::SourceLocation {
file_idx: file_idx as u32,
line: location.line,
function_idx,
inlined_into_idx,
};
// The current line can be a "self line", or a "call line", or even a mixture.
//
// Examples:
//
// a) Just self line:
// Line: |==============|
// Inlinee ranges: (none)
//
// Effect: insert_range
//
// b) Just call line:
// Line: |==============|
// Inlinee ranges: |--------------|
//
// Effect: make_call_location
//
// c) Just call line, for multiple inlined calls:
// Line: |==========================|
// Inlinee ranges: |----------||--------------|
//
// Effect: make_call_location, make_call_location
//
// d) Call line and trailing self line:
// Line: |==================|
// Inlinee ranges: |-----------|
//
// Effect: make_call_location, insert_range
//
// e) Leading self line and also call line:
// Line: |==================|
// Inlinee ranges: |-----------|
//
// Effect: insert_range, make_call_location
//
// f) Interleaving
// Line: |======================================|
// Inlinee ranges: |-----------| |-------|
//
// Effect: insert_range, make_call_location, insert_range, make_call_location, insert_range
//
// g) Bad debug info
// Line: |=======|
// Inlinee ranges: |-------------|
//
// Effect: make_call_location
let mut current_address = line_range_start;
while current_address < line_range_end {
// Emit our source location at current_address if current_address is not covered by an inlinee.
if next_inline.is_none() || next_inline.as_ref().unwrap().start > current_address {
// "insert_range"
self.ranges.insert(current_address, source_location.clone());
}
// If there is an inlinee range covered by this line record, turn this line into that
// call's "call line". Make a `call_location_idx` for it and store it in `callee_call_locations`.
if next_inline.is_some() && next_inline.as_ref().unwrap().start < line_range_end {
let inline_range = next_inline.take().unwrap();
// "make_call_location"
let (call_location_idx, _) =
self.call_locations.insert_full(source_location.clone());
callee_call_locations.push((inline_range.start, call_location_idx as u32));
// Advance current_address to the end of this inlinee range.
current_address = inline_range.end;
next_inline = inline_iter.next();
} else {
// No further inlinee ranges are overlapping with this line record. Advance to the
// end of the line record.
current_address = line_range_end;
}
}
// Advance the line iterator.
next_line = line_iter.next();
// Skip any lines that start before current_address.
// Such lines can exist if the debug information is faulty, or if the compiler created
// multiple identical small "call line" records instead of one combined record
// covering the entire inlinee range. We can't have different "call lines" for a single
// inlinee range anyway, so it's fine to skip these.
while next_line.is_some()
&& (next_line.as_ref().unwrap().address as u32) < current_address
{
next_line = line_iter.next();
}
}
if !function.inline {
// add the bare minimum of information for the function if there isn't any.
self.ranges.entry(entry_pc).or_insert(raw::SourceLocation {
file_idx: u32::MAX,
line: 0,
function_idx,
inlined_into_idx: u32::MAX,
});
}
// We've processed all address ranges which are *not* covered by inlinees.
// Now it's time to recurse.
// Process our inlinees.
if !callee_call_locations.is_empty() {
for inlinee in &function.inlinees {
self.process_symbolic_function_recursive(inlinee, &callee_call_locations);
}
}
let function_end = function.end_address() as u32;
let last_addr = self.last_addr.get_or_insert(0);
if function_end > *last_addr {
*last_addr = function_end;
}
}
/// Processes an individual [`Symbol`].
pub fn process_symbolic_symbol(&mut self, symbol: &Symbol<'_>) {
let name_idx = {
let mut function = transform::Function {
name: match symbol.name {
Some(ref name) => name.clone(),
None => return,
},
comp_dir: None,
};
for transformer in &mut self.transformers.0 {
function = transformer.transform_function(function);
}
let function_name = if self.is_windows_object {
undecorate_win_symbol(&function.name)
} else {
&function.name
};
self.string_table.insert(function_name) as u32
};
match self.ranges.entry(symbol.address as u32) {
btree_map::Entry::Vacant(entry) => {
let function = raw::Function {
name_offset: name_idx,
_comp_dir_offset: u32::MAX,
entry_pc: symbol.address as u32,
lang: u32::MAX,
};
let function_idx = self.functions.insert_full(function).0 as u32;
entry.insert(raw::SourceLocation {
file_idx: u32::MAX,
line: 0,
function_idx,
inlined_into_idx: u32::MAX,
});
}
btree_map::Entry::Occupied(entry) => {
// ASSUMPTION:
// the `functions` iterator has already filled in this addr via debug session.
// we could trace the caller hierarchy up to the root, and assert that it is
// indeed the same function, and maybe update its `entry_pc`, but we don’t do
// that for now.
let _function_idx = entry.get().function_idx as usize;
}
}
let last_addr = self.last_addr.get_or_insert(0);
if symbol.address as u32 >= *last_addr {
self.last_addr = None;
}
}
// Methods for serializing to a [`Write`] below:
// Feel free to move these to a separate file.
/// Serialize the converted data.
///
/// This writes the SymCache binary format into the given [`Write`].
pub fn serialize<W: Write>(mut self, writer: &mut W) -> std::io::Result<()> {
|
/// Undecorates a Windows C-decorated symbol name.
///
/// The decoration rules are explained here:
/// <https://docs.microsoft.com/en-us/cpp/build/reference/decorated-names?view=vs-2019>
///
/// - __cdecl Leading underscore (_)
/// - __stdcall Leading underscore (_) and a trailing at sign (@) followed by the number of bytes in the parameter list in decimal
/// - __fastcall Leading and trailing at signs (@) followed by a decimal number representing the number of bytes in the parameter list
/// - __vectorcall Two trailing at signs (@@) followed by a decimal number of bytes in the parameter list
/// > In a 64-bit environment, C or extern "C" functions are only decorated when using the __vectorcall calling convention."
///
/// This code is adapted from `dump_syms`:
/// See <https://github.com/mozilla/dump_syms/blob/325cf2c61b2cacc55a7f1af74081b57237c7f9de/src/symbol.rs#L169-L216>
fn undecorate_win_symbol(name: &str) -> &str {
if name.starts_with('?') || name.contains([':', '(', '<']) {
return name;
}
// Parse __vectorcall.
if let Some((name, param_size)) = name.rsplit_once("@@") {
if param_size.parse::<u32>().is_ok() {
return name;
}
}
// Parse the other three.
if !name.is_empty() {
if let ("@" | "_", rest) = name.split_at(1) {
if let Some((name, param_size)) = rest.rsplit_once('@') {
if param_size.parse::<u32>().is_ok() {
// __stdcall or __fastcall
return name;
}
}
if let Some(name) = name.strip_prefix('_') {
// __cdecl
return name;
}
}
}
name
}
| let mut writer = Writer::new(writer);
// Insert a trailing sentinel source location in case we have a definite end addr
if let Some(last_addr) = self.last_addr {
// TODO: to be extra safe, we might check that `last_addr` is indeed larger than
// the largest range at some point.
match self.ranges.entry(last_addr) {
btree_map::Entry::Vacant(entry) => {
entry.insert(raw::NO_SOURCE_LOCATION);
}
btree_map::Entry::Occupied(_entry) => {
// BUG:
// the last addr should not map to an already defined range
}
}
}
let num_files = self.files.len() as u32;
let num_functions = self.functions.len() as u32;
let num_source_locations = (self.call_locations.len() + self.ranges.len()) as u32;
let num_ranges = self.ranges.len() as u32;
let string_bytes = self.string_table.into_bytes();
let header = raw::Header {
magic: raw::SYMCACHE_MAGIC,
version: crate::SYMCACHE_VERSION,
debug_id: self.debug_id,
arch: self.arch,
num_files,
num_functions,
num_source_locations,
num_ranges,
string_bytes: string_bytes.len() as u32,
_reserved: [0; 16],
};
writer.write_all(header.as_bytes())?;
writer.align_to(8)?;
for f in self.files {
writer.write_all(f.as_bytes())?;
}
writer.align_to(8)?;
for f in self.functions {
writer.write_all(f.as_bytes())?;
}
writer.align_to(8)?;
for s in self.call_locations {
writer.write_all(s.as_bytes())?;
}
for s in self.ranges.values() {
writer.write_all(s.as_bytes())?;
}
writer.align_to(8)?;
for r in self.ranges.keys() {
writer.write_all(r.as_bytes())?;
}
writer.align_to(8)?;
writer.write_all(&string_bytes)?;
Ok(())
}
} | identifier_body |
writer.rs | //! Defines the [SymCache Converter](`SymCacheConverter`).
use std::collections::btree_map;
use std::collections::BTreeMap;
use std::io::Write;
use indexmap::IndexSet;
use symbolic_common::{Arch, DebugId};
use symbolic_debuginfo::{DebugSession, FileFormat, Function, ObjectLike, Symbol};
use watto::{Pod, StringTable, Writer};
use super::{raw, transform};
use crate::{Error, ErrorKind};
/// The SymCache Converter.
///
/// This can convert data in various source formats to an intermediate representation, which can
/// then be serialized to disk via its [`serialize`](SymCacheConverter::serialize) method.
#[derive(Debug, Default)]
pub struct SymCacheConverter<'a> {
/// Debug identifier of the object file.
debug_id: DebugId,
/// CPU architecture of the object file.
arch: Arch,
/// A flag that indicates that we are currently processing a Windows object, which
/// will inform us if we should undecorate function names.
is_windows_object: bool,
/// A list of transformers that are used to transform each function / source location.
transformers: transform::Transformers<'a>,
string_table: StringTable,
/// The set of all [`raw::File`]s that have been added to this `Converter`.
files: IndexSet<raw::File>,
/// The set of all [`raw::Function`]s that have been added to this `Converter`.
functions: IndexSet<raw::Function>,
/// The set of [`raw::SourceLocation`]s used in this `Converter` that are only used as
/// "call locations", i.e. which are only referred to from `inlined_into_idx`.
call_locations: IndexSet<raw::SourceLocation>,
/// A map from code ranges to the [`raw::SourceLocation`]s they correspond to.
///
/// Only the starting address of a range is saved, the end address is given implicitly
/// by the start address of the next range.
ranges: BTreeMap<u32, raw::SourceLocation>,
/// This is highest addr that we know is outside of a valid function.
/// Functions have an explicit end, while Symbols implicitly extend to infinity.
/// In case the highest addr belongs to a Symbol, this will be `None` and the SymCache
/// also extends to infinite, otherwise this is the end of the highest function.
last_addr: Option<u32>,
}
impl<'a> SymCacheConverter<'a> {
/// Creates a new Converter.
pub fn new() -> Self {
Self::default()
}
/// Adds a new [`transform::Transformer`] to this [`SymCacheConverter`].
///
/// Every [`transform::Function`] and [`transform::SourceLocation`] will be passed through
/// this transformer before it is written to the SymCache.
pub fn add_transformer<T>(&mut self, t: T)
where
T: transform::Transformer + 'a,
{
self.transformers.0.push(Box::new(t));
}
/// Sets the CPU architecture of this SymCache.
pub fn set_arch(&mut self, arch: Arch) {
self.arch = arch;
}
/// Sets the debug identifier of this SymCache.
pub fn set_debug_id(&mut self, debug_id: DebugId) {
self.debug_id = debug_id;
}
// Methods processing symbolic-debuginfo [`ObjectLike`] below:
// Feel free to move these to a separate file.
/// This processes the given [`ObjectLike`] object, collecting all its functions and line
/// information into the converter.
#[tracing::instrument(skip_all, fields(object.debug_id = %object.debug_id().breakpad()))]
pub fn process_object<'d, 'o, O>(&mut self, object: &'o O) -> Result<(), Error>
where
O: ObjectLike<'d, 'o>,
O::Error: std::error::Error + Send + Sync + 'static,
{
let session = object
.debug_session()
.map_err(|e| Error::new(ErrorKind::BadDebugFile, e))?;
self.set_arch(object.arch());
self.set_debug_id(object.debug_id());
self.is_windows_object = matches!(object.file_format(), FileFormat::Pe | FileFormat::Pdb);
for function in session.functions() {
let function = function.map_err(|e| Error::new(ErrorKind::BadDebugFile, e))?;
self.process_symbolic_function(&function);
}
for symbol in object.symbols() {
self.process_symbolic_symbol(&symbol);
}
self.is_windows_object = false;
Ok(())
}
/// Processes an individual [`Function`], adding its line information to the converter.
pub fn process_symbolic_function(&mut self, function: &Function<'_>) {
self.process_symbolic_function_recursive(function, &[(0x0, u32::MAX)]);
}
/// Processes an individual [`Function`], adding its line information to the converter.
///
/// `call_locations` is a non-empty sorted list of `(address, call_location index)` pairs.
fn process_symbolic_function_recursive(
&mut self,
function: &Function<'_>,
call_locations: &[(u32, u32)],
) {
let string_table = &mut self.string_table;
// skip over empty functions or functions whose address is too large to fit in a u32
if function.size == 0 || function.address > u32::MAX as u64 {
return;
}
let comp_dir = std::str::from_utf8(function.compilation_dir).ok();
let entry_pc = if function.inline {
u32::MAX | let language = function.name.language();
let mut function = transform::Function {
name: function.name.as_str().into(),
comp_dir: comp_dir.map(Into::into),
};
for transformer in &mut self.transformers.0 {
function = transformer.transform_function(function);
}
let function_name = if self.is_windows_object {
undecorate_win_symbol(&function.name)
} else {
&function.name
};
let name_offset = string_table.insert(function_name) as u32;
let lang = language as u32;
let (fun_idx, _) = self.functions.insert_full(raw::Function {
name_offset,
_comp_dir_offset: u32::MAX,
entry_pc,
lang,
});
fun_idx as u32
};
// We can divide the instructions in a function into two buckets:
// (1) Instructions which are part of an inlined function call, and
// (2) instructions which are *not* part of an inlined function call.
//
// Our incoming line records cover both (1) and (2) types of instructions.
//
// Let's call the address ranges of these instructions (1) inlinee ranges and (2) self ranges.
//
// We use the following strategy: For each function, only insert that function's "self ranges"
// into `self.ranges`. Then recurse into the function's inlinees. Those will insert their
// own "self ranges". Once the entire tree has been traversed, `self.ranges` will contain
// entries from all levels.
//
// In order to compute this function's "self ranges", we first gather and sort its
// "inlinee ranges". Later, when we iterate over this function's lines, we will compute the
// "self ranges" from the gaps between the "inlinee ranges".
let mut inlinee_ranges = Vec::new();
for inlinee in &function.inlinees {
for line in &inlinee.lines {
let start = line.address as u32;
let end = (line.address + line.size.unwrap_or(1)) as u32;
inlinee_ranges.push(start..end);
}
}
inlinee_ranges.sort_unstable_by_key(|range| range.start);
// Walk three iterators. All of these are already sorted by address.
let mut line_iter = function.lines.iter();
let mut call_location_iter = call_locations.iter();
let mut inline_iter = inlinee_ranges.into_iter();
// call_locations is non-empty, so the first element always exists.
let mut current_call_location = call_location_iter.next().unwrap();
let mut next_call_location = call_location_iter.next();
let mut next_line = line_iter.next();
let mut next_inline = inline_iter.next();
// This will be the list we pass to our inlinees as the call_locations argument.
// This list is ordered by address by construction.
let mut callee_call_locations = Vec::new();
// Iterate over the line records.
while let Some(line) = next_line.take() {
let line_range_start = line.address as u32;
let line_range_end = (line.address + line.size.unwrap_or(1)) as u32;
// Find the call location for this line.
while next_call_location.is_some() && next_call_location.unwrap().0 <= line_range_start
{
current_call_location = next_call_location.unwrap();
next_call_location = call_location_iter.next();
}
let inlined_into_idx = current_call_location.1;
let mut location = transform::SourceLocation {
file: transform::File {
name: line.file.name_str(),
directory: Some(line.file.dir_str()),
comp_dir: comp_dir.map(Into::into),
},
line: line.line as u32,
};
for transformer in &mut self.transformers.0 {
location = transformer.transform_source_location(location);
}
let name_offset = string_table.insert(&location.file.name) as u32;
let directory_offset = location
.file
.directory
.map_or(u32::MAX, |d| string_table.insert(&d) as u32);
let comp_dir_offset = location
.file
.comp_dir
.map_or(u32::MAX, |cd| string_table.insert(&cd) as u32);
let (file_idx, _) = self.files.insert_full(raw::File {
name_offset,
directory_offset,
comp_dir_offset,
});
let source_location = raw::SourceLocation {
file_idx: file_idx as u32,
line: location.line,
function_idx,
inlined_into_idx,
};
// The current line can be a "self line", or a "call line", or even a mixture.
//
// Examples:
//
// a) Just self line:
// Line: |==============|
// Inlinee ranges: (none)
//
// Effect: insert_range
//
// b) Just call line:
// Line: |==============|
// Inlinee ranges: |--------------|
//
// Effect: make_call_location
//
// c) Just call line, for multiple inlined calls:
// Line: |==========================|
// Inlinee ranges: |----------||--------------|
//
// Effect: make_call_location, make_call_location
//
// d) Call line and trailing self line:
// Line: |==================|
// Inlinee ranges: |-----------|
//
// Effect: make_call_location, insert_range
//
// e) Leading self line and also call line:
// Line: |==================|
// Inlinee ranges: |-----------|
//
// Effect: insert_range, make_call_location
//
// f) Interleaving
// Line: |======================================|
// Inlinee ranges: |-----------| |-------|
//
// Effect: insert_range, make_call_location, insert_range, make_call_location, insert_range
//
// g) Bad debug info
// Line: |=======|
// Inlinee ranges: |-------------|
//
// Effect: make_call_location
let mut current_address = line_range_start;
while current_address < line_range_end {
// Emit our source location at current_address if current_address is not covered by an inlinee.
if next_inline.is_none() || next_inline.as_ref().unwrap().start > current_address {
// "insert_range"
self.ranges.insert(current_address, source_location.clone());
}
// If there is an inlinee range covered by this line record, turn this line into that
// call's "call line". Make a `call_location_idx` for it and store it in `callee_call_locations`.
if next_inline.is_some() && next_inline.as_ref().unwrap().start < line_range_end {
let inline_range = next_inline.take().unwrap();
// "make_call_location"
let (call_location_idx, _) =
self.call_locations.insert_full(source_location.clone());
callee_call_locations.push((inline_range.start, call_location_idx as u32));
// Advance current_address to the end of this inlinee range.
current_address = inline_range.end;
next_inline = inline_iter.next();
} else {
// No further inlinee ranges are overlapping with this line record. Advance to the
// end of the line record.
current_address = line_range_end;
}
}
// Advance the line iterator.
next_line = line_iter.next();
// Skip any lines that start before current_address.
// Such lines can exist if the debug information is faulty, or if the compiler created
// multiple identical small "call line" records instead of one combined record
// covering the entire inlinee range. We can't have different "call lines" for a single
// inlinee range anyway, so it's fine to skip these.
while next_line.is_some()
&& (next_line.as_ref().unwrap().address as u32) < current_address
{
next_line = line_iter.next();
}
}
if !function.inline {
// add the bare minimum of information for the function if there isn't any.
self.ranges.entry(entry_pc).or_insert(raw::SourceLocation {
file_idx: u32::MAX,
line: 0,
function_idx,
inlined_into_idx: u32::MAX,
});
}
// We've processed all address ranges which are *not* covered by inlinees.
// Now it's time to recurse.
// Process our inlinees.
if !callee_call_locations.is_empty() {
for inlinee in &function.inlinees {
self.process_symbolic_function_recursive(inlinee, &callee_call_locations);
}
}
let function_end = function.end_address() as u32;
let last_addr = self.last_addr.get_or_insert(0);
if function_end > *last_addr {
*last_addr = function_end;
}
}
/// Processes an individual [`Symbol`].
pub fn process_symbolic_symbol(&mut self, symbol: &Symbol<'_>) {
let name_idx = {
let mut function = transform::Function {
name: match symbol.name {
Some(ref name) => name.clone(),
None => return,
},
comp_dir: None,
};
for transformer in &mut self.transformers.0 {
function = transformer.transform_function(function);
}
let function_name = if self.is_windows_object {
undecorate_win_symbol(&function.name)
} else {
&function.name
};
self.string_table.insert(function_name) as u32
};
match self.ranges.entry(symbol.address as u32) {
btree_map::Entry::Vacant(entry) => {
let function = raw::Function {
name_offset: name_idx,
_comp_dir_offset: u32::MAX,
entry_pc: symbol.address as u32,
lang: u32::MAX,
};
let function_idx = self.functions.insert_full(function).0 as u32;
entry.insert(raw::SourceLocation {
file_idx: u32::MAX,
line: 0,
function_idx,
inlined_into_idx: u32::MAX,
});
}
btree_map::Entry::Occupied(entry) => {
// ASSUMPTION:
// the `functions` iterator has already filled in this addr via debug session.
// we could trace the caller hierarchy up to the root, and assert that it is
// indeed the same function, and maybe update its `entry_pc`, but we don’t do
// that for now.
let _function_idx = entry.get().function_idx as usize;
}
}
let last_addr = self.last_addr.get_or_insert(0);
if symbol.address as u32 >= *last_addr {
self.last_addr = None;
}
}
// Methods for serializing to a [`Write`] below:
// Feel free to move these to a separate file.
/// Serialize the converted data.
///
/// This writes the SymCache binary format into the given [`Write`].
pub fn serialize<W: Write>(mut self, writer: &mut W) -> std::io::Result<()> {
let mut writer = Writer::new(writer);
// Insert a trailing sentinel source location in case we have a definite end addr
if let Some(last_addr) = self.last_addr {
// TODO: to be extra safe, we might check that `last_addr` is indeed larger than
// the largest range at some point.
match self.ranges.entry(last_addr) {
btree_map::Entry::Vacant(entry) => {
entry.insert(raw::NO_SOURCE_LOCATION);
}
btree_map::Entry::Occupied(_entry) => {
// BUG:
// the last addr should not map to an already defined range
}
}
}
let num_files = self.files.len() as u32;
let num_functions = self.functions.len() as u32;
let num_source_locations = (self.call_locations.len() + self.ranges.len()) as u32;
let num_ranges = self.ranges.len() as u32;
let string_bytes = self.string_table.into_bytes();
let header = raw::Header {
magic: raw::SYMCACHE_MAGIC,
version: crate::SYMCACHE_VERSION,
debug_id: self.debug_id,
arch: self.arch,
num_files,
num_functions,
num_source_locations,
num_ranges,
string_bytes: string_bytes.len() as u32,
_reserved: [0; 16],
};
writer.write_all(header.as_bytes())?;
writer.align_to(8)?;
for f in self.files {
writer.write_all(f.as_bytes())?;
}
writer.align_to(8)?;
for f in self.functions {
writer.write_all(f.as_bytes())?;
}
writer.align_to(8)?;
for s in self.call_locations {
writer.write_all(s.as_bytes())?;
}
for s in self.ranges.values() {
writer.write_all(s.as_bytes())?;
}
writer.align_to(8)?;
for r in self.ranges.keys() {
writer.write_all(r.as_bytes())?;
}
writer.align_to(8)?;
writer.write_all(&string_bytes)?;
Ok(())
}
}
/// Undecorates a Windows C-decorated symbol name.
///
/// The decoration rules are explained here:
/// <https://docs.microsoft.com/en-us/cpp/build/reference/decorated-names?view=vs-2019>
///
/// - __cdecl Leading underscore (_)
/// - __stdcall Leading underscore (_) and a trailing at sign (@) followed by the number of bytes in the parameter list in decimal
/// - __fastcall Leading and trailing at signs (@) followed by a decimal number representing the number of bytes in the parameter list
/// - __vectorcall Two trailing at signs (@@) followed by a decimal number of bytes in the parameter list
/// > In a 64-bit environment, C or extern "C" functions are only decorated when using the __vectorcall calling convention."
///
/// This code is adapted from `dump_syms`:
/// See <https://github.com/mozilla/dump_syms/blob/325cf2c61b2cacc55a7f1af74081b57237c7f9de/src/symbol.rs#L169-L216>
fn undecorate_win_symbol(name: &str) -> &str {
if name.starts_with('?') || name.contains([':', '(', '<']) {
return name;
}
// Parse __vectorcall.
if let Some((name, param_size)) = name.rsplit_once("@@") {
if param_size.parse::<u32>().is_ok() {
return name;
}
}
// Parse the other three.
if !name.is_empty() {
if let ("@" | "_", rest) = name.split_at(1) {
if let Some((name, param_size)) = rest.rsplit_once('@') {
if param_size.parse::<u32>().is_ok() {
// __stdcall or __fastcall
return name;
}
}
if let Some(name) = name.strip_prefix('_') {
// __cdecl
return name;
}
}
}
name
} | } else {
function.address as u32
};
let function_idx = { | random_line_split |
CAPM_ab.py |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as ColCon
import scipy.stats as stats
import basicMathlib as bMl
from graph_lib import gl
import utilities_lib as ul
###################################################
########### Functions that have to with alpha beta #####################
########################################################
## Just another way to express the return and variance
## of all the symbols, related to one index.
def set_index(self, symbol_index = -1):
## Set the index of CAPM model
|
def get_indexReturns(self):
index = self.Sindex # The index
ind_ret = self.pf.symbols[index].TDs[self.period].get_timeSeriesReturn()
return ind_ret
def get_indexMeanReturn(self):
ind_ret = self.get_indexReturns()
ind_ret = np.mean(ind_ret)
return ind_ret
def get_symbol_ab(self, symbol):
## This function outputs the alpha beta a symbol
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# plt.scatter(ind_ret,sym_ret)
coeff = bMl.get_linearRef(ind_ret, sym_ret)
return coeff
def get_all_symbols_ab (self):
symbols = self.pf.symbols.keys()
coeffs = []
for sym in symbols:
coeffs.append(self.get_symbol_ab(sym))
return coeffs
def get_portfolio_ab(self, mode = "normal"):
### This function gets the alpha beta for the portfolio
index = self.Sindex
if (mode == "normal"):
# We calculate it in a gaussian way
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
coeff = bMl.get_linearRef(ind_ret, returns)
if (mode == "gaussian"):
# We calculate by calculating the individual ones first.
# The total coefficient is the sum of all coefficients
coeffs = np.array(self.get_all_symbols_ab())
coeff = coeffs.T.dot(self.allocation)
return coeff
def get_symbol_JensenAlpha(self, symbol, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_symbol_ab(symbol)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_SymbolReturn(symbol)
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def get_portfolio_JensenAlpha(self, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_portfolio_ab(mode = mode)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def test_Jensens_Alpha(self, nf = 1):
# Test the gaussianity and confidence of the alpha.
residual = self.get_portfolio_JensenAlpha()
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print "TESTING PORFOLIO"
print np.mean(residual), np.std(residual)
print ttest
## Fit a gaussian and plot it
gl.histogram(residual)
def test_symbol_ab(self,symbol, nf = 1):
## This function tests that the residuals behaves properly.
## That is, that the alpha (how we behave compared to the market)
## has a nice gaussian distribution.
## Slide 7
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Get coefficients for the symbol
coeffs = self.get_symbol_ab(symbol)
##### GET THE RESIDUAL
X = np.concatenate((np.ones((sym_ret.shape[0],1)),sym_ret),axis = 1)
pred = X.dot(np.array(coeffs)) # Pred = X * Phi
pred = pred.reshape(pred.shape[0],1)
residual = pred - ind_ret
print "Mean of residual %f" % np.mean(residual)
### Now we test the residual
print "Statistical test of residual"
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print ttest
######## DOUBLE REGRESSION OF PAGE 7. Early empirical test
Xres = np.concatenate((ind_ret,np.power(residual,2)),axis = 1)
coeff = bMl.get_linearRef(Xres, sym_ret)
print "Early empirical test of CAPM is wrong"
print coeff
hist, bin_edges = np.histogram(residual, density=True)
gl.bar(bin_edges[:-1], hist,
labels = ["Distribution","Return", "Probability"],
legend = [symbol],
alpha = 0.5,
nf = nf)
## Lets get some statistics using stats
m, v, s, k = stats.t.stats(10, moments='mvsk')
n, (smin, smax), sm, sv, ss, sk = stats.describe(residual)
print "****** MORE STATISTIC ************"
print "Mean " + str(sm)
tt = (sm-m)/np.sqrt(sv/float(n)) # t-statistic for mean
pval = stats.t.sf(np.abs(tt), n-1)*2 # two-sided pvalue = Prob(abs(t)>tt)
print 't-statistic = %6.3f pvalue = %6.4f' % (tt, pval)
return coeff
def marketTiming(self,returns = [], ind_ret = [], mode = "Treynor-Mazuy"):
# Investigate if the model is good.
# We put a cuatric term of the error.
returns = ul.fnp(returns)
ind_ret = ul.fnp(ind_ret)
if (returns.size == 0):
returns = self.get_PortfolioReturn()
if (ind_ret.size == 0):
ind_ret = self.get_indexReturns()
# Instead of fitting a line, we fit a parabola, to try to see
# if we do better than the market return. If when Rm is higher, we have
# higher beta, and if when Rm is lower, we have lower beta. So higher
# and lowr return fitting a curve, cuatric,
gl.scatter(ind_ret, returns,
labels = ["Treynor-Mazuy", "Index Return", "Portfolio Return"],
legend = ["Returns"])
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, returns)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid, legend = ["Linear Regression"], nf = 0)
Xres = np.concatenate((ind_ret,np.power(ind_ret,2)),axis = 1)
coeffs = bMl.get_linearRef(Xres, returns)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid,np.power(x_grid,2).reshape(Npoints,1) ),axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
# print y_grid.shape
gl.plot(x_grid, y_grid, legend = ["Quadratic Regression"], nf = 0)
print coeffs
return 1
def get_residuals_ab(self):
# For histogram
import pylab
import scipy.stats as stats
measurements = np.random.normal(loc = 20, scale = 5, size=100)
stats.probplot(measurements, dist="norm", plot=pylab)
pylab.show()
def plot_portfoliocorrab(self, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,"Porfolio"],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
def plot_corrab(self, symbol, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,symbol],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
| if (type(symbol_index) == type(-1)):
# If we are given nothing or a number
# We just stablish the first one
symbol_index = self.pf.symbols.keys()[0]
self.Sindex = symbol_index | identifier_body |
CAPM_ab.py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as ColCon
import scipy.stats as stats
import basicMathlib as bMl
from graph_lib import gl
import utilities_lib as ul
###################################################
########### Functions that have to with alpha beta #####################
########################################################
## Just another way to express the return and variance
## of all the symbols, related to one index.
def set_index(self, symbol_index = -1):
## Set the index of CAPM model
if (type(symbol_index) == type(-1)):
# If we are given nothing or a number
# We just stablish the first one
symbol_index = self.pf.symbols.keys()[0]
self.Sindex = symbol_index
def get_indexReturns(self):
index = self.Sindex # The index
ind_ret = self.pf.symbols[index].TDs[self.period].get_timeSeriesReturn()
return ind_ret
def get_indexMeanReturn(self):
ind_ret = self.get_indexReturns()
ind_ret = np.mean(ind_ret)
return ind_ret
def get_symbol_ab(self, symbol):
## This function outputs the alpha beta a symbol
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# plt.scatter(ind_ret,sym_ret)
coeff = bMl.get_linearRef(ind_ret, sym_ret)
return coeff
def get_all_symbols_ab (self):
symbols = self.pf.symbols.keys()
coeffs = []
for sym in symbols:
coeffs.append(self.get_symbol_ab(sym))
return coeffs
def get_portfolio_ab(self, mode = "normal"):
### This function gets the alpha beta for the portfolio
index = self.Sindex
if (mode == "normal"):
# We calculate it in a gaussian way
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
coeff = bMl.get_linearRef(ind_ret, returns)
if (mode == "gaussian"):
# We calculate by calculating the individual ones first.
# The total coefficient is the sum of all coefficients
coeffs = np.array(self.get_all_symbols_ab())
coeff = coeffs.T.dot(self.allocation)
return coeff
def get_symbol_JensenAlpha(self, symbol, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_symbol_ab(symbol)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_SymbolReturn(symbol)
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def get_portfolio_JensenAlpha(self, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_portfolio_ab(mode = mode)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def test_Jensens_Alpha(self, nf = 1):
# Test the gaussianity and confidence of the alpha.
residual = self.get_portfolio_JensenAlpha()
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print "TESTING PORFOLIO"
print np.mean(residual), np.std(residual)
print ttest
## Fit a gaussian and plot it
gl.histogram(residual)
def test_symbol_ab(self,symbol, nf = 1):
## This function tests that the residuals behaves properly.
## That is, that the alpha (how we behave compared to the market)
## has a nice gaussian distribution.
## Slide 7
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Get coefficients for the symbol
coeffs = self.get_symbol_ab(symbol)
##### GET THE RESIDUAL
X = np.concatenate((np.ones((sym_ret.shape[0],1)),sym_ret),axis = 1)
pred = X.dot(np.array(coeffs)) # Pred = X * Phi
pred = pred.reshape(pred.shape[0],1)
residual = pred - ind_ret
print "Mean of residual %f" % np.mean(residual)
### Now we test the residual
print "Statistical test of residual"
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print ttest
######## DOUBLE REGRESSION OF PAGE 7. Early empirical test
Xres = np.concatenate((ind_ret,np.power(residual,2)),axis = 1)
coeff = bMl.get_linearRef(Xres, sym_ret)
print "Early empirical test of CAPM is wrong"
print coeff | hist, bin_edges = np.histogram(residual, density=True)
gl.bar(bin_edges[:-1], hist,
labels = ["Distribution","Return", "Probability"],
legend = [symbol],
alpha = 0.5,
nf = nf)
## Lets get some statistics using stats
m, v, s, k = stats.t.stats(10, moments='mvsk')
n, (smin, smax), sm, sv, ss, sk = stats.describe(residual)
print "****** MORE STATISTIC ************"
print "Mean " + str(sm)
tt = (sm-m)/np.sqrt(sv/float(n)) # t-statistic for mean
pval = stats.t.sf(np.abs(tt), n-1)*2 # two-sided pvalue = Prob(abs(t)>tt)
print 't-statistic = %6.3f pvalue = %6.4f' % (tt, pval)
return coeff
def marketTiming(self,returns = [], ind_ret = [], mode = "Treynor-Mazuy"):
# Investigate if the model is good.
# We put a cuatric term of the error.
returns = ul.fnp(returns)
ind_ret = ul.fnp(ind_ret)
if (returns.size == 0):
returns = self.get_PortfolioReturn()
if (ind_ret.size == 0):
ind_ret = self.get_indexReturns()
# Instead of fitting a line, we fit a parabola, to try to see
# if we do better than the market return. If when Rm is higher, we have
# higher beta, and if when Rm is lower, we have lower beta. So higher
# and lowr return fitting a curve, cuatric,
gl.scatter(ind_ret, returns,
labels = ["Treynor-Mazuy", "Index Return", "Portfolio Return"],
legend = ["Returns"])
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, returns)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid, legend = ["Linear Regression"], nf = 0)
Xres = np.concatenate((ind_ret,np.power(ind_ret,2)),axis = 1)
coeffs = bMl.get_linearRef(Xres, returns)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid,np.power(x_grid,2).reshape(Npoints,1) ),axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
# print y_grid.shape
gl.plot(x_grid, y_grid, legend = ["Quadratic Regression"], nf = 0)
print coeffs
return 1
def get_residuals_ab(self):
# For histogram
import pylab
import scipy.stats as stats
measurements = np.random.normal(loc = 20, scale = 5, size=100)
stats.probplot(measurements, dist="norm", plot=pylab)
pylab.show()
def plot_portfoliocorrab(self, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,"Porfolio"],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
def plot_corrab(self, symbol, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,symbol],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0) | random_line_split | |
CAPM_ab.py |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as ColCon
import scipy.stats as stats
import basicMathlib as bMl
from graph_lib import gl
import utilities_lib as ul
###################################################
########### Functions that have to with alpha beta #####################
########################################################
## Just another way to express the return and variance
## of all the symbols, related to one index.
def set_index(self, symbol_index = -1):
## Set the index of CAPM model
if (type(symbol_index) == type(-1)):
# If we are given nothing or a number
# We just stablish the first one
|
self.Sindex = symbol_index
def get_indexReturns(self):
index = self.Sindex # The index
ind_ret = self.pf.symbols[index].TDs[self.period].get_timeSeriesReturn()
return ind_ret
def get_indexMeanReturn(self):
ind_ret = self.get_indexReturns()
ind_ret = np.mean(ind_ret)
return ind_ret
def get_symbol_ab(self, symbol):
## This function outputs the alpha beta a symbol
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# plt.scatter(ind_ret,sym_ret)
coeff = bMl.get_linearRef(ind_ret, sym_ret)
return coeff
def get_all_symbols_ab (self):
symbols = self.pf.symbols.keys()
coeffs = []
for sym in symbols:
coeffs.append(self.get_symbol_ab(sym))
return coeffs
def get_portfolio_ab(self, mode = "normal"):
### This function gets the alpha beta for the portfolio
index = self.Sindex
if (mode == "normal"):
# We calculate it in a gaussian way
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
coeff = bMl.get_linearRef(ind_ret, returns)
if (mode == "gaussian"):
# We calculate by calculating the individual ones first.
# The total coefficient is the sum of all coefficients
coeffs = np.array(self.get_all_symbols_ab())
coeff = coeffs.T.dot(self.allocation)
return coeff
def get_symbol_JensenAlpha(self, symbol, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_symbol_ab(symbol)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_SymbolReturn(symbol)
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def get_portfolio_JensenAlpha(self, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_portfolio_ab(mode = mode)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def test_Jensens_Alpha(self, nf = 1):
# Test the gaussianity and confidence of the alpha.
residual = self.get_portfolio_JensenAlpha()
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print "TESTING PORFOLIO"
print np.mean(residual), np.std(residual)
print ttest
## Fit a gaussian and plot it
gl.histogram(residual)
def test_symbol_ab(self,symbol, nf = 1):
## This function tests that the residuals behaves properly.
## That is, that the alpha (how we behave compared to the market)
## has a nice gaussian distribution.
## Slide 7
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Get coefficients for the symbol
coeffs = self.get_symbol_ab(symbol)
##### GET THE RESIDUAL
X = np.concatenate((np.ones((sym_ret.shape[0],1)),sym_ret),axis = 1)
pred = X.dot(np.array(coeffs)) # Pred = X * Phi
pred = pred.reshape(pred.shape[0],1)
residual = pred - ind_ret
print "Mean of residual %f" % np.mean(residual)
### Now we test the residual
print "Statistical test of residual"
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print ttest
######## DOUBLE REGRESSION OF PAGE 7. Early empirical test
Xres = np.concatenate((ind_ret,np.power(residual,2)),axis = 1)
coeff = bMl.get_linearRef(Xres, sym_ret)
print "Early empirical test of CAPM is wrong"
print coeff
hist, bin_edges = np.histogram(residual, density=True)
gl.bar(bin_edges[:-1], hist,
labels = ["Distribution","Return", "Probability"],
legend = [symbol],
alpha = 0.5,
nf = nf)
## Lets get some statistics using stats
m, v, s, k = stats.t.stats(10, moments='mvsk')
n, (smin, smax), sm, sv, ss, sk = stats.describe(residual)
print "****** MORE STATISTIC ************"
print "Mean " + str(sm)
tt = (sm-m)/np.sqrt(sv/float(n)) # t-statistic for mean
pval = stats.t.sf(np.abs(tt), n-1)*2 # two-sided pvalue = Prob(abs(t)>tt)
print 't-statistic = %6.3f pvalue = %6.4f' % (tt, pval)
return coeff
def marketTiming(self,returns = [], ind_ret = [], mode = "Treynor-Mazuy"):
# Investigate if the model is good.
# We put a cuatric term of the error.
returns = ul.fnp(returns)
ind_ret = ul.fnp(ind_ret)
if (returns.size == 0):
returns = self.get_PortfolioReturn()
if (ind_ret.size == 0):
ind_ret = self.get_indexReturns()
# Instead of fitting a line, we fit a parabola, to try to see
# if we do better than the market return. If when Rm is higher, we have
# higher beta, and if when Rm is lower, we have lower beta. So higher
# and lowr return fitting a curve, cuatric,
gl.scatter(ind_ret, returns,
labels = ["Treynor-Mazuy", "Index Return", "Portfolio Return"],
legend = ["Returns"])
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, returns)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid, legend = ["Linear Regression"], nf = 0)
Xres = np.concatenate((ind_ret,np.power(ind_ret,2)),axis = 1)
coeffs = bMl.get_linearRef(Xres, returns)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid,np.power(x_grid,2).reshape(Npoints,1) ),axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
# print y_grid.shape
gl.plot(x_grid, y_grid, legend = ["Quadratic Regression"], nf = 0)
print coeffs
return 1
def get_residuals_ab(self):
# For histogram
import pylab
import scipy.stats as stats
measurements = np.random.normal(loc = 20, scale = 5, size=100)
stats.probplot(measurements, dist="norm", plot=pylab)
pylab.show()
def plot_portfoliocorrab(self, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,"Porfolio"],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
def plot_corrab(self, symbol, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,symbol],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
| symbol_index = self.pf.symbols.keys()[0] | conditional_block |
CAPM_ab.py |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as ColCon
import scipy.stats as stats
import basicMathlib as bMl
from graph_lib import gl
import utilities_lib as ul
###################################################
########### Functions that have to with alpha beta #####################
########################################################
## Just another way to express the return and variance
## of all the symbols, related to one index.
def set_index(self, symbol_index = -1):
## Set the index of CAPM model
if (type(symbol_index) == type(-1)):
# If we are given nothing or a number
# We just stablish the first one
symbol_index = self.pf.symbols.keys()[0]
self.Sindex = symbol_index
def get_indexReturns(self):
index = self.Sindex # The index
ind_ret = self.pf.symbols[index].TDs[self.period].get_timeSeriesReturn()
return ind_ret
def get_indexMeanReturn(self):
ind_ret = self.get_indexReturns()
ind_ret = np.mean(ind_ret)
return ind_ret
def get_symbol_ab(self, symbol):
## This function outputs the alpha beta a symbol
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# plt.scatter(ind_ret,sym_ret)
coeff = bMl.get_linearRef(ind_ret, sym_ret)
return coeff
def get_all_symbols_ab (self):
symbols = self.pf.symbols.keys()
coeffs = []
for sym in symbols:
coeffs.append(self.get_symbol_ab(sym))
return coeffs
def get_portfolio_ab(self, mode = "normal"):
### This function gets the alpha beta for the portfolio
index = self.Sindex
if (mode == "normal"):
# We calculate it in a gaussian way
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
coeff = bMl.get_linearRef(ind_ret, returns)
if (mode == "gaussian"):
# We calculate by calculating the individual ones first.
# The total coefficient is the sum of all coefficients
coeffs = np.array(self.get_all_symbols_ab())
coeff = coeffs.T.dot(self.allocation)
return coeff
def get_symbol_JensenAlpha(self, symbol, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_symbol_ab(symbol)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_SymbolReturn(symbol)
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def get_portfolio_JensenAlpha(self, mode = "normal"):
### This function gets the Jensens Alpha of the portolio.
## Which is the alpha of the portfolio, taking into account
## The risk-free rate. Which is what is everything expected to
# Grow.
index = self.Sindex
coeff = self.get_portfolio_ab(mode = mode)
beta = coeff[1]
# print "beta = " + str(beta)
returns = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# It is the difference between what we obtain and the index
# Sum of weighted alphas, taking into account the Riskfree Rate
JensenAlpha = (returns - self.Rf) - beta*(ind_ret - self.Rf)
return JensenAlpha
def test_Jensens_Alpha(self, nf = 1):
# Test the gaussianity and confidence of the alpha.
residual = self.get_portfolio_JensenAlpha()
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print "TESTING PORFOLIO"
print np.mean(residual), np.std(residual)
print ttest
## Fit a gaussian and plot it
gl.histogram(residual)
def | (self,symbol, nf = 1):
## This function tests that the residuals behaves properly.
## That is, that the alpha (how we behave compared to the market)
## has a nice gaussian distribution.
## Slide 7
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Get coefficients for the symbol
coeffs = self.get_symbol_ab(symbol)
##### GET THE RESIDUAL
X = np.concatenate((np.ones((sym_ret.shape[0],1)),sym_ret),axis = 1)
pred = X.dot(np.array(coeffs)) # Pred = X * Phi
pred = pred.reshape(pred.shape[0],1)
residual = pred - ind_ret
print "Mean of residual %f" % np.mean(residual)
### Now we test the residual
print "Statistical test of residual"
ttest = stats.ttest_1samp(a = residual, # Sample data
popmean = 0) # Pop mean
print ttest
######## DOUBLE REGRESSION OF PAGE 7. Early empirical test
Xres = np.concatenate((ind_ret,np.power(residual,2)),axis = 1)
coeff = bMl.get_linearRef(Xres, sym_ret)
print "Early empirical test of CAPM is wrong"
print coeff
hist, bin_edges = np.histogram(residual, density=True)
gl.bar(bin_edges[:-1], hist,
labels = ["Distribution","Return", "Probability"],
legend = [symbol],
alpha = 0.5,
nf = nf)
## Lets get some statistics using stats
m, v, s, k = stats.t.stats(10, moments='mvsk')
n, (smin, smax), sm, sv, ss, sk = stats.describe(residual)
print "****** MORE STATISTIC ************"
print "Mean " + str(sm)
tt = (sm-m)/np.sqrt(sv/float(n)) # t-statistic for mean
pval = stats.t.sf(np.abs(tt), n-1)*2 # two-sided pvalue = Prob(abs(t)>tt)
print 't-statistic = %6.3f pvalue = %6.4f' % (tt, pval)
return coeff
def marketTiming(self,returns = [], ind_ret = [], mode = "Treynor-Mazuy"):
# Investigate if the model is good.
# We put a cuatric term of the error.
returns = ul.fnp(returns)
ind_ret = ul.fnp(ind_ret)
if (returns.size == 0):
returns = self.get_PortfolioReturn()
if (ind_ret.size == 0):
ind_ret = self.get_indexReturns()
# Instead of fitting a line, we fit a parabola, to try to see
# if we do better than the market return. If when Rm is higher, we have
# higher beta, and if when Rm is lower, we have lower beta. So higher
# and lowr return fitting a curve, cuatric,
gl.scatter(ind_ret, returns,
labels = ["Treynor-Mazuy", "Index Return", "Portfolio Return"],
legend = ["Returns"])
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, returns)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid, legend = ["Linear Regression"], nf = 0)
Xres = np.concatenate((ind_ret,np.power(ind_ret,2)),axis = 1)
coeffs = bMl.get_linearRef(Xres, returns)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid,np.power(x_grid,2).reshape(Npoints,1) ),axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
# print y_grid.shape
gl.plot(x_grid, y_grid, legend = ["Quadratic Regression"], nf = 0)
print coeffs
return 1
def get_residuals_ab(self):
# For histogram
import pylab
import scipy.stats as stats
measurements = np.random.normal(loc = 20, scale = 5, size=100)
stats.probplot(measurements, dist="norm", plot=pylab)
pylab.show()
def plot_portfoliocorrab(self, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.get_PortfolioReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,"Porfolio"],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
def plot_corrab(self, symbol, nf = 1):
# This function plots the returns of a symbol compared
# to the index, and computes the regresion and correlation parameters.
index = self.Sindex # The index
sym_ret = self.pf.symbols[symbol].TDs[self.period].get_timeSeriesReturn()
ind_ret = self.get_indexReturns()
# Mean and covariance
data = np.concatenate((sym_ret,ind_ret),axis = 1)
means = np.mean(data, axis = 0)
cov = np.cov(data)
# Regression
coeffs = bMl.get_linearRef(ind_ret, sym_ret)
gl.scatter(ind_ret, sym_ret,
labels = ["Gaussianity study", "Index: " + self.Sindex,symbol],
legend = ["Returns"],
nf = nf)
## Linear regression:
Xres = ind_ret
coeffs = bMl.get_linearRef(Xres, sym_ret)
Npoints = 10000
x_grid = np.array(range(Npoints))/float(Npoints)
x_grid = x_grid*(max(ind_ret) - min(ind_ret)) + min(ind_ret)
x_grid = x_grid.reshape(Npoints,1)
x_grid_2 = np.concatenate((np.ones((Npoints,1)),x_grid), axis = 1)
y_grid = x_grid_2.dot(np.array(coeffs))
gl.plot(x_grid, y_grid,
legend = ["b: %.2f ,a: %.2f" % (coeffs[1], coeffs[0])],
nf = 0)
| test_symbol_ab | identifier_name |
TableauViz.js | var viz, workbook, activeSheet, selectedMarks, options, placeholderDiv;
var url = '';
var tableauServer = '';
var tableauSite = '';
var tableauPath = '';
var ticket = '';
var nameOfVizToInteract = 'Scatterplot';
function initializeViz() {
url = tableauServer + '/trusted/' + ticket + "/t" + tableauSite + tableauPath;
placeholderDiv = document.getElementById("tableauViz");
options = {
width: placeholderDiv.offsetWidth,
height: placeholderDiv.offsetHeight,
hideTabs: true,
hideToolbar: true,
":refresh": "yes",
onFirstInteractive: completeLoad
};
viz = new tableau.Viz(placeholderDiv, url, options);
}
function exportPDF() {
viz.showExportPDFDialog();
$('.tab-dialog')[0].animate({ 'marginLeft': "-=50px" });
}
function exportData() {
viz.showExportDataDialog();
}
function resetViz() {
viz.revertAllAsync();
}
function showVizButtons() {
var sheets = workbook.getPublishedSheetsInfo();
var divIndividualButtons = $('#vizButtons');
// First clear any buttons that may have been added on a previous load
divIndividualButtons.html("");
// Show 'standard' controls, common to all vizzes
divIndividualButtons.append('<button type="button" onclick="resetViz()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Reset Filters</button>');
divIndividualButtons.append('<button type="button" onclick="exportPDF()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export PDF</button>');
divIndividualButtons.append('<button type="button" onclick="exportData()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export Data</button>');
divIndividualButtons.append('<button type="button" onclick="launch_edit()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Edit</button>');
// Only show buttons to switch vizzes if there's more than one
if (sheets.length > 1) {
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
var sheet = sheets[sheetIndex];
divIndividualButtons.append('<button type="button" onclick="switchToViz(\'' + sheet.getName() + '\')" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">See ' + sheet.getName() + '</button>')
}
}
}
function switchToViz(vizName) {
workbook.activateSheetAsync(vizName).then(function (dashboard) {
dashboard.changeSizeAsync({
behavior: tableau.SheetSizeBehavior.AUTOMATIC
});
});
}
function onMarksSelection(marksEvent) {
//filter sheets of selected marks because we dont need to hear events on all of our sheets
if (marksEvent.getWorksheet().getName() == nameOfVizToInteract) {
//get,marksAsync() is a method in the API that will retun a set of the marks selected
return marksEvent.getMarksAsync().then(handleSelectedMarks);
}
}
function handleSelectedMarks(marks) {
| function submitMarks()
{
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
tableauWriteBack(selectedMarks);
//var plural = ((selectedMarks.length == 1) ? "" : "s");
//$('#eventPanel').html("Success! <b>" + selectedMarks.length + "</b> selection" + plural + " submitted for research.");
//$('#eventBox').hide(2000);
}
function resetAllMarks() {
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
$('#eventBox').hide(800);
$('#eventPanel').html("");
}
function launch_edit() {
// Adjust UI: Hide Buttons & navigation menu, increase size for edit mode
$('#VizToolbar').hide();
$('body').addClass("sidebar-collapse");
$(".content-wrapper").css("height","1200px");
$("#tableauViz").hide();
// If the URL happens to have a ticket on it, clean it up before loading the edit window
var url_parts = url.split('/t/');
url = tableauServer + '/t/' + url_parts[1];
var edit_location = tableauServer + '/en/embed_wrapper.html?src=' + url + '?:embed=y';
edit_iframe = document.createElement('iframe');
edit_iframe.src = edit_location;
// This makes it not look like an iframe
edit_iframe.style.padding = '0px';
edit_iframe.style.border = 'none';
edit_iframe.style.margin = '0px';
// Also set these with the same values in the embed_wrapper.html page
edit_iframe.style.width = '100%';
edit_iframe.style.height = '100%';
$('#editViz').html(edit_iframe);
$('#editViz').show();
}
function iframe_change(new_url) {
console.log("Old URL received in iframe_change: " + url);
console.log("New URL received in iframe_change: " + new_url);
// Destroy the original edit_iframe so you can build another one later if necessary
$(edit_iframe).remove();
// Destroy the original Tableau Viz object so you can create new one with URL of the Save(d) As version
viz.dispose();
// Reset the global vizURL at this point so that it all works circularly
// But first remove any embed/authoring attributes from the URL
var url_parts = new_url.split('?');
url = url_parts[0].replace('/authoring', '/views');
// Handle site
if (url.search('/site/') !== -1) {
url_parts = url.split('#/site/');
url = url_parts[0] + "t/" + url_parts[1];
vizUrlForWebEdit = url;
console.log("URL updated in iframe_change: " + url);
}
// Adjust UI: Show buttons & navigation menu, decrease size post-edit mode
$('#VizToolbar').show();
$('body').removeClass("sidebar-collapse");
$(".content-wrapper").css("height", "");
$("#tableauViz").show();
$("#editViz").hide();
// Create a new Viz object
viz = null;
viz = new tableau.Viz(placeholderDiv, url, options);
}
function completeLoad(e) {
// Once the workbook & viz have loaded, assign them to global variables
workbook = viz.getWorkbook();
activeSheet = workbook.getActiveSheet();
// Load custom controls based on the vizzes published to the server
showVizButtons();
viz.addEventListener(tableau.TableauEventName.MARKS_SELECTION, onMarksSelection);
}
$(document).ready(initializeViz);
|
// If selection has been cleared, no need to show a message
if (marks.length == 0) {
$('#eventBox').hide(600);
return;
}
// Save selected marks in memory so they can be submitted later
selectedMarks = marks;
$('#eventPanel').html("");
$('#eventBox').show(600);
// Logic for Equities Dashboard is specialized, any other scatterplots also are enabled but in a general sense
if (workbook.getActiveSheet().getName() == 'Individual Equities Dashboard') {
//loop through all the selected marks
var noOrders = 0;
var company = "";
var fixedClose = 0;
var fixedCloseLabel = "";
var changeFromPriorClose = 0;
var changeFromPriorCloseLabel = "";
var date = "";
for (var markIndex = 0; markIndex < marks.length; markIndex++) {
//getPairs gets tuples of data for the mark. one mark has multiple tuples
var pairs = marks[markIndex].getPairs();
for (var pairIndex = 0; pairIndex < pairs.length; pairIndex++) {
switch (pairs[pairIndex].fieldName) {
case "Company":
company = pairs[pairIndex].value;
break;
case "Date":
date = pairs[pairIndex].formattedValue;
break;
case "SUM(Fixed Close)":
fixedClose += pairs[pairIndex].value;
fixedCloseLabel = pairs[pairIndex].formattedValue;
break;
case "AGG(Change from Prior Close)":
changeFromPriorClose += pairs[pairIndex].value;
changeFromPriorCloseLabel = pairs[pairIndex].formattedValue;
break;
}
}
}
// With all values in memory, let's produce the UI
if (marks.length == 1) {
// When we select a single mark, we can show the individual details
$('#eventPanel').html("Submit <b>" + company + "</b>'s " + date + " trading period for research. The fixed close price was <b>$" + fixedCloseLabel + "</b> with a variance of <b>" + changeFromPriorCloseLabel + "</b>.");
}
else {
// But if more that one mark is selected, we show a summary (average)
var avgFixedClose = Number((fixedClose / marks.length).toFixed(2));
var avgChangeFromPriorCloseLabel = Number((changeFromPriorClose * 100 / marks.length).toFixed(2));
$('#eventPanel').html("Submit <b>" + marks.length + " " + company + "</b>'s trading periods for research. The average fixed close price was <b>$" + avgFixedClose + "</b> & the average variance of <b>" + avgChangeFromPriorCloseLabel + "%</b>.");
}
}
else {
// Save selection in memory and give the user the option to submit
var plural = ((marks.length == 1) ? "it" : "them");
var pluralS = ((selectedMarks.length == 1) ? "" : "s");
$('#eventPanel').html("You've selected <b>" + marks.length + "</b> outlier" + pluralS + "." + " Would you like to submit " + plural + " them for research?");
}
}
| identifier_body |
TableauViz.js | var viz, workbook, activeSheet, selectedMarks, options, placeholderDiv;
var url = '';
var tableauServer = '';
var tableauSite = '';
var tableauPath = '';
var ticket = '';
var nameOfVizToInteract = 'Scatterplot';
function initializeViz() {
url = tableauServer + '/trusted/' + ticket + "/t" + tableauSite + tableauPath;
placeholderDiv = document.getElementById("tableauViz");
options = {
width: placeholderDiv.offsetWidth,
height: placeholderDiv.offsetHeight,
hideTabs: true,
hideToolbar: true,
":refresh": "yes",
onFirstInteractive: completeLoad
};
viz = new tableau.Viz(placeholderDiv, url, options);
}
function exportPDF() {
viz.showExportPDFDialog();
$('.tab-dialog')[0].animate({ 'marginLeft': "-=50px" });
}
function exportData() {
viz.showExportDataDialog();
}
function resetViz() {
viz.revertAllAsync();
}
function showVizButtons() {
var sheets = workbook.getPublishedSheetsInfo();
var divIndividualButtons = $('#vizButtons');
// First clear any buttons that may have been added on a previous load
divIndividualButtons.html("");
// Show 'standard' controls, common to all vizzes
divIndividualButtons.append('<button type="button" onclick="resetViz()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Reset Filters</button>');
divIndividualButtons.append('<button type="button" onclick="exportPDF()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export PDF</button>');
divIndividualButtons.append('<button type="button" onclick="exportData()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export Data</button>');
divIndividualButtons.append('<button type="button" onclick="launch_edit()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Edit</button>');
// Only show buttons to switch vizzes if there's more than one
if (sheets.length > 1) {
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
var sheet = sheets[sheetIndex];
divIndividualButtons.append('<button type="button" onclick="switchToViz(\'' + sheet.getName() + '\')" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">See ' + sheet.getName() + '</button>')
}
}
}
function switchToViz(vizName) {
workbook.activateSheetAsync(vizName).then(function (dashboard) {
dashboard.changeSizeAsync({
behavior: tableau.SheetSizeBehavior.AUTOMATIC
});
});
}
function onMarksSelection(marksEvent) {
//filter sheets of selected marks because we dont need to hear events on all of our sheets
if (marksEvent.getWorksheet().getName() == nameOfVizToInteract) {
//get,marksAsync() is a method in the API that will retun a set of the marks selected
return marksEvent.getMarksAsync().then(handleSelectedMarks);
}
}
function handleSelectedMarks(marks) {
// If selection has been cleared, no need to show a message
if (marks.length == 0) {
$('#eventBox').hide(600);
return;
}
// Save selected marks in memory so they can be submitted later
selectedMarks = marks;
$('#eventPanel').html("");
$('#eventBox').show(600);
// Logic for Equities Dashboard is specialized, any other scatterplots also are enabled but in a general sense
if (workbook.getActiveSheet().getName() == 'Individual Equities Dashboard') {
//loop through all the selected marks
var noOrders = 0;
var company = "";
var fixedClose = 0;
var fixedCloseLabel = "";
var changeFromPriorClose = 0;
var changeFromPriorCloseLabel = "";
var date = "";
for (var markIndex = 0; markIndex < marks.length; markIndex++) {
//getPairs gets tuples of data for the mark. one mark has multiple tuples
var pairs = marks[markIndex].getPairs();
for (var pairIndex = 0; pairIndex < pairs.length; pairIndex++) {
switch (pairs[pairIndex].fieldName) {
case "Company":
company = pairs[pairIndex].value;
break;
case "Date":
date = pairs[pairIndex].formattedValue;
break;
case "SUM(Fixed Close)":
fixedClose += pairs[pairIndex].value;
fixedCloseLabel = pairs[pairIndex].formattedValue;
break;
case "AGG(Change from Prior Close)":
changeFromPriorClose += pairs[pairIndex].value;
changeFromPriorCloseLabel = pairs[pairIndex].formattedValue;
break;
}
}
}
// With all values in memory, let's produce the UI
if (marks.length == 1) {
// When we select a single mark, we can show the individual details
$('#eventPanel').html("Submit <b>" + company + "</b>'s " + date + " trading period for research. The fixed close price was <b>$" + fixedCloseLabel + "</b> with a variance of <b>" + changeFromPriorCloseLabel + "</b>.");
}
else {
// But if more that one mark is selected, we show a summary (average)
var avgFixedClose = Number((fixedClose / marks.length).toFixed(2));
var avgChangeFromPriorCloseLabel = Number((changeFromPriorClose * 100 / marks.length).toFixed(2));
$('#eventPanel').html("Submit <b>" + marks.length + " " + company + "</b>'s trading periods for research. The average fixed close price was <b>$" + avgFixedClose + "</b> & the average variance of <b>" + avgChangeFromPriorCloseLabel + "%</b>.");
}
}
else {
// Save selection in memory and give the user the option to submit
var plural = ((marks.length == 1) ? "it" : "them");
var pluralS = ((selectedMarks.length == 1) ? "" : "s");
$('#eventPanel').html("You've selected <b>" + marks.length + "</b> outlier" + pluralS + "." + " Would you like to submit " + plural + " them for research?");
}
}
function submitMarks()
{
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
tableauWriteBack(selectedMarks);
//var plural = ((selectedMarks.length == 1) ? "" : "s");
//$('#eventPanel').html("Success! <b>" + selectedMarks.length + "</b> selection" + plural + " submitted for research.");
//$('#eventBox').hide(2000);
}
function resetAllMarks() {
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
$('#eventBox').hide(800);
$('#eventPanel').html("");
}
function la | {
// Adjust UI: Hide Buttons & navigation menu, increase size for edit mode
$('#VizToolbar').hide();
$('body').addClass("sidebar-collapse");
$(".content-wrapper").css("height","1200px");
$("#tableauViz").hide();
// If the URL happens to have a ticket on it, clean it up before loading the edit window
var url_parts = url.split('/t/');
url = tableauServer + '/t/' + url_parts[1];
var edit_location = tableauServer + '/en/embed_wrapper.html?src=' + url + '?:embed=y';
edit_iframe = document.createElement('iframe');
edit_iframe.src = edit_location;
// This makes it not look like an iframe
edit_iframe.style.padding = '0px';
edit_iframe.style.border = 'none';
edit_iframe.style.margin = '0px';
// Also set these with the same values in the embed_wrapper.html page
edit_iframe.style.width = '100%';
edit_iframe.style.height = '100%';
$('#editViz').html(edit_iframe);
$('#editViz').show();
}
function iframe_change(new_url) {
console.log("Old URL received in iframe_change: " + url);
console.log("New URL received in iframe_change: " + new_url);
// Destroy the original edit_iframe so you can build another one later if necessary
$(edit_iframe).remove();
// Destroy the original Tableau Viz object so you can create new one with URL of the Save(d) As version
viz.dispose();
// Reset the global vizURL at this point so that it all works circularly
// But first remove any embed/authoring attributes from the URL
var url_parts = new_url.split('?');
url = url_parts[0].replace('/authoring', '/views');
// Handle site
if (url.search('/site/') !== -1) {
url_parts = url.split('#/site/');
url = url_parts[0] + "t/" + url_parts[1];
vizUrlForWebEdit = url;
console.log("URL updated in iframe_change: " + url);
}
// Adjust UI: Show buttons & navigation menu, decrease size post-edit mode
$('#VizToolbar').show();
$('body').removeClass("sidebar-collapse");
$(".content-wrapper").css("height", "");
$("#tableauViz").show();
$("#editViz").hide();
// Create a new Viz object
viz = null;
viz = new tableau.Viz(placeholderDiv, url, options);
}
function completeLoad(e) {
// Once the workbook & viz have loaded, assign them to global variables
workbook = viz.getWorkbook();
activeSheet = workbook.getActiveSheet();
// Load custom controls based on the vizzes published to the server
showVizButtons();
viz.addEventListener(tableau.TableauEventName.MARKS_SELECTION, onMarksSelection);
}
$(document).ready(initializeViz);
| unch_edit() | identifier_name |
TableauViz.js | var viz, workbook, activeSheet, selectedMarks, options, placeholderDiv;
var url = '';
var tableauServer = '';
var tableauSite = '';
var tableauPath = '';
var ticket = '';
var nameOfVizToInteract = 'Scatterplot';
function initializeViz() {
url = tableauServer + '/trusted/' + ticket + "/t" + tableauSite + tableauPath;
placeholderDiv = document.getElementById("tableauViz");
options = {
width: placeholderDiv.offsetWidth,
height: placeholderDiv.offsetHeight,
hideTabs: true,
hideToolbar: true,
":refresh": "yes",
onFirstInteractive: completeLoad
};
viz = new tableau.Viz(placeholderDiv, url, options);
}
function exportPDF() {
viz.showExportPDFDialog();
$('.tab-dialog')[0].animate({ 'marginLeft': "-=50px" });
}
function exportData() {
viz.showExportDataDialog();
}
function resetViz() {
viz.revertAllAsync();
}
function showVizButtons() {
var sheets = workbook.getPublishedSheetsInfo();
var divIndividualButtons = $('#vizButtons');
// First clear any buttons that may have been added on a previous load
divIndividualButtons.html("");
// Show 'standard' controls, common to all vizzes
divIndividualButtons.append('<button type="button" onclick="resetViz()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Reset Filters</button>');
divIndividualButtons.append('<button type="button" onclick="exportPDF()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export PDF</button>');
divIndividualButtons.append('<button type="button" onclick="exportData()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export Data</button>');
divIndividualButtons.append('<button type="button" onclick="launch_edit()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Edit</button>');
// Only show buttons to switch vizzes if there's more than one
if (sheets.length > 1) {
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
var sheet = sheets[sheetIndex];
divIndividualButtons.append('<button type="button" onclick="switchToViz(\'' + sheet.getName() + '\')" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">See ' + sheet.getName() + '</button>')
}
}
}
function switchToViz(vizName) {
workbook.activateSheetAsync(vizName).then(function (dashboard) {
dashboard.changeSizeAsync({
behavior: tableau.SheetSizeBehavior.AUTOMATIC
});
});
}
function onMarksSelection(marksEvent) {
//filter sheets of selected marks because we dont need to hear events on all of our sheets
if (marksEvent.getWorksheet().getName() == nameOfVizToInteract) {
//get,marksAsync() is a method in the API that will retun a set of the marks selected
return marksEvent.getMarksAsync().then(handleSelectedMarks);
}
}
function handleSelectedMarks(marks) {
// If selection has been cleared, no need to show a message
if (marks.length == 0) {
$('#eventBox').hide(600);
return;
}
// Save selected marks in memory so they can be submitted later
selectedMarks = marks;
$('#eventPanel').html("");
$('#eventBox').show(600);
// Logic for Equities Dashboard is specialized, any other scatterplots also are enabled but in a general sense
if (workbook.getActiveSheet().getName() == 'Individual Equities Dashboard') {
//loop through all the selected marks
var noOrders = 0;
var company = "";
var fixedClose = 0;
var fixedCloseLabel = "";
var changeFromPriorClose = 0;
var changeFromPriorCloseLabel = "";
var date = "";
for (var markIndex = 0; markIndex < marks.length; markIndex++) {
//getPairs gets tuples of data for the mark. one mark has multiple tuples
var pairs = marks[markIndex].getPairs();
for (var pairIndex = 0; pairIndex < pairs.length; pairIndex++) {
switch (pairs[pairIndex].fieldName) {
case "Company":
company = pairs[pairIndex].value;
break;
case "Date":
date = pairs[pairIndex].formattedValue;
break;
case "SUM(Fixed Close)":
fixedClose += pairs[pairIndex].value;
fixedCloseLabel = pairs[pairIndex].formattedValue;
break;
case "AGG(Change from Prior Close)":
changeFromPriorClose += pairs[pairIndex].value;
changeFromPriorCloseLabel = pairs[pairIndex].formattedValue;
break;
}
}
}
// With all values in memory, let's produce the UI
if (marks.length == 1) {
// When we select a single mark, we can show the individual details
$('#eventPanel').html("Submit <b>" + company + "</b>'s " + date + " trading period for research. The fixed close price was <b>$" + fixedCloseLabel + "</b> with a variance of <b>" + changeFromPriorCloseLabel + "</b>.");
}
else {
// But if more that one mark is selected, we show a summary (average)
var avgFixedClose = Number((fixedClose / marks.length).toFixed(2));
var avgChangeFromPriorCloseLabel = Number((changeFromPriorClose * 100 / marks.length).toFixed(2));
$('#eventPanel').html("Submit <b>" + marks.length + " " + company + "</b>'s trading periods for research. The average fixed close price was <b>$" + avgFixedClose + "</b> & the average variance of <b>" + avgChangeFromPriorCloseLabel + "%</b>.");
}
}
else {
// Save selection in memory and give the user the option to submit
var plural = ((marks.length == 1) ? "it" : "them");
var pluralS = ((selectedMarks.length == 1) ? "" : "s");
$('#eventPanel').html("You've selected <b>" + marks.length + "</b> outlier" + pluralS + "." + " Would you like to submit " + plural + " them for research?");
}
}
function submitMarks()
{
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
tableauWriteBack(selectedMarks);
//var plural = ((selectedMarks.length == 1) ? "" : "s");
//$('#eventPanel').html("Success! <b>" + selectedMarks.length + "</b> selection" + plural + " submitted for research.");
//$('#eventBox').hide(2000);
}
function resetAllMarks() {
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
$('#eventBox').hide(800);
$('#eventPanel').html("");
}
function launch_edit() {
// Adjust UI: Hide Buttons & navigation menu, increase size for edit mode
$('#VizToolbar').hide();
$('body').addClass("sidebar-collapse");
$(".content-wrapper").css("height","1200px");
$("#tableauViz").hide();
// If the URL happens to have a ticket on it, clean it up before loading the edit window
var url_parts = url.split('/t/');
url = tableauServer + '/t/' + url_parts[1];
var edit_location = tableauServer + '/en/embed_wrapper.html?src=' + url + '?:embed=y';
edit_iframe = document.createElement('iframe');
edit_iframe.src = edit_location;
// This makes it not look like an iframe
edit_iframe.style.padding = '0px';
edit_iframe.style.border = 'none';
edit_iframe.style.margin = '0px';
// Also set these with the same values in the embed_wrapper.html page
edit_iframe.style.width = '100%';
edit_iframe.style.height = '100%';
$('#editViz').html(edit_iframe);
$('#editViz').show();
}
function iframe_change(new_url) {
console.log("Old URL received in iframe_change: " + url);
console.log("New URL received in iframe_change: " + new_url);
// Destroy the original edit_iframe so you can build another one later if necessary
$(edit_iframe).remove();
// Destroy the original Tableau Viz object so you can create new one with URL of the Save(d) As version
viz.dispose();
// Reset the global vizURL at this point so that it all works circularly
// But first remove any embed/authoring attributes from the URL
var url_parts = new_url.split('?');
url = url_parts[0].replace('/authoring', '/views');
// Handle site
if (url.search('/site/') !== -1) {
| // Adjust UI: Show buttons & navigation menu, decrease size post-edit mode
$('#VizToolbar').show();
$('body').removeClass("sidebar-collapse");
$(".content-wrapper").css("height", "");
$("#tableauViz").show();
$("#editViz").hide();
// Create a new Viz object
viz = null;
viz = new tableau.Viz(placeholderDiv, url, options);
}
function completeLoad(e) {
// Once the workbook & viz have loaded, assign them to global variables
workbook = viz.getWorkbook();
activeSheet = workbook.getActiveSheet();
// Load custom controls based on the vizzes published to the server
showVizButtons();
viz.addEventListener(tableau.TableauEventName.MARKS_SELECTION, onMarksSelection);
}
$(document).ready(initializeViz);
| url_parts = url.split('#/site/');
url = url_parts[0] + "t/" + url_parts[1];
vizUrlForWebEdit = url;
console.log("URL updated in iframe_change: " + url);
}
| conditional_block |
TableauViz.js | var viz, workbook, activeSheet, selectedMarks, options, placeholderDiv;
var url = '';
var tableauServer = '';
var tableauSite = '';
var tableauPath = '';
var ticket = '';
var nameOfVizToInteract = 'Scatterplot';
function initializeViz() {
url = tableauServer + '/trusted/' + ticket + "/t" + tableauSite + tableauPath;
placeholderDiv = document.getElementById("tableauViz");
options = {
width: placeholderDiv.offsetWidth,
height: placeholderDiv.offsetHeight,
hideTabs: true,
hideToolbar: true,
":refresh": "yes",
onFirstInteractive: completeLoad
}; | function exportPDF() {
viz.showExportPDFDialog();
$('.tab-dialog')[0].animate({ 'marginLeft': "-=50px" });
}
function exportData() {
viz.showExportDataDialog();
}
function resetViz() {
viz.revertAllAsync();
}
function showVizButtons() {
var sheets = workbook.getPublishedSheetsInfo();
var divIndividualButtons = $('#vizButtons');
// First clear any buttons that may have been added on a previous load
divIndividualButtons.html("");
// Show 'standard' controls, common to all vizzes
divIndividualButtons.append('<button type="button" onclick="resetViz()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Reset Filters</button>');
divIndividualButtons.append('<button type="button" onclick="exportPDF()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export PDF</button>');
divIndividualButtons.append('<button type="button" onclick="exportData()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Export Data</button>');
divIndividualButtons.append('<button type="button" onclick="launch_edit()" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">Edit</button>');
// Only show buttons to switch vizzes if there's more than one
if (sheets.length > 1) {
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
var sheet = sheets[sheetIndex];
divIndividualButtons.append('<button type="button" onclick="switchToViz(\'' + sheet.getName() + '\')" class="btn btn-primary" style="min-width:135px; margin-right: 5px; margin-top: 5px;">See ' + sheet.getName() + '</button>')
}
}
}
function switchToViz(vizName) {
workbook.activateSheetAsync(vizName).then(function (dashboard) {
dashboard.changeSizeAsync({
behavior: tableau.SheetSizeBehavior.AUTOMATIC
});
});
}
function onMarksSelection(marksEvent) {
//filter sheets of selected marks because we dont need to hear events on all of our sheets
if (marksEvent.getWorksheet().getName() == nameOfVizToInteract) {
//get,marksAsync() is a method in the API that will retun a set of the marks selected
return marksEvent.getMarksAsync().then(handleSelectedMarks);
}
}
function handleSelectedMarks(marks) {
// If selection has been cleared, no need to show a message
if (marks.length == 0) {
$('#eventBox').hide(600);
return;
}
// Save selected marks in memory so they can be submitted later
selectedMarks = marks;
$('#eventPanel').html("");
$('#eventBox').show(600);
// Logic for Equities Dashboard is specialized, any other scatterplots also are enabled but in a general sense
if (workbook.getActiveSheet().getName() == 'Individual Equities Dashboard') {
//loop through all the selected marks
var noOrders = 0;
var company = "";
var fixedClose = 0;
var fixedCloseLabel = "";
var changeFromPriorClose = 0;
var changeFromPriorCloseLabel = "";
var date = "";
for (var markIndex = 0; markIndex < marks.length; markIndex++) {
//getPairs gets tuples of data for the mark. one mark has multiple tuples
var pairs = marks[markIndex].getPairs();
for (var pairIndex = 0; pairIndex < pairs.length; pairIndex++) {
switch (pairs[pairIndex].fieldName) {
case "Company":
company = pairs[pairIndex].value;
break;
case "Date":
date = pairs[pairIndex].formattedValue;
break;
case "SUM(Fixed Close)":
fixedClose += pairs[pairIndex].value;
fixedCloseLabel = pairs[pairIndex].formattedValue;
break;
case "AGG(Change from Prior Close)":
changeFromPriorClose += pairs[pairIndex].value;
changeFromPriorCloseLabel = pairs[pairIndex].formattedValue;
break;
}
}
}
// With all values in memory, let's produce the UI
if (marks.length == 1) {
// When we select a single mark, we can show the individual details
$('#eventPanel').html("Submit <b>" + company + "</b>'s " + date + " trading period for research. The fixed close price was <b>$" + fixedCloseLabel + "</b> with a variance of <b>" + changeFromPriorCloseLabel + "</b>.");
}
else {
// But if more that one mark is selected, we show a summary (average)
var avgFixedClose = Number((fixedClose / marks.length).toFixed(2));
var avgChangeFromPriorCloseLabel = Number((changeFromPriorClose * 100 / marks.length).toFixed(2));
$('#eventPanel').html("Submit <b>" + marks.length + " " + company + "</b>'s trading periods for research. The average fixed close price was <b>$" + avgFixedClose + "</b> & the average variance of <b>" + avgChangeFromPriorCloseLabel + "%</b>.");
}
}
else {
// Save selection in memory and give the user the option to submit
var plural = ((marks.length == 1) ? "it" : "them");
var pluralS = ((selectedMarks.length == 1) ? "" : "s");
$('#eventPanel').html("You've selected <b>" + marks.length + "</b> outlier" + pluralS + "." + " Would you like to submit " + plural + " them for research?");
}
}
function submitMarks()
{
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
tableauWriteBack(selectedMarks);
//var plural = ((selectedMarks.length == 1) ? "" : "s");
//$('#eventPanel').html("Success! <b>" + selectedMarks.length + "</b> selection" + plural + " submitted for research.");
//$('#eventBox').hide(2000);
}
function resetAllMarks() {
var referrer = viz.getWorkbook().getActiveSheet();
if (referrer.getSheetType() == "dashboard") {
// The active sheets is a dashboard, which is made of several sheets
var sheets = referrer.getWorksheets();
// Iterate over the sheets until we find the correct one and clear the marks
for (var sheetIndex = 0; sheetIndex < sheets.length; sheetIndex++) {
if (sheets[sheetIndex].getName() == nameOfVizToInteract) {
sheets[sheetIndex].clearSelectedMarksAsync();
}
}
}
else {
// This is not a dashboard so just clear the sheet's selection
referrer.clearSelectedMarksAsync();
}
$('#eventBox').hide(800);
$('#eventPanel').html("");
}
function launch_edit() {
// Adjust UI: Hide Buttons & navigation menu, increase size for edit mode
$('#VizToolbar').hide();
$('body').addClass("sidebar-collapse");
$(".content-wrapper").css("height","1200px");
$("#tableauViz").hide();
// If the URL happens to have a ticket on it, clean it up before loading the edit window
var url_parts = url.split('/t/');
url = tableauServer + '/t/' + url_parts[1];
var edit_location = tableauServer + '/en/embed_wrapper.html?src=' + url + '?:embed=y';
edit_iframe = document.createElement('iframe');
edit_iframe.src = edit_location;
// This makes it not look like an iframe
edit_iframe.style.padding = '0px';
edit_iframe.style.border = 'none';
edit_iframe.style.margin = '0px';
// Also set these with the same values in the embed_wrapper.html page
edit_iframe.style.width = '100%';
edit_iframe.style.height = '100%';
$('#editViz').html(edit_iframe);
$('#editViz').show();
}
function iframe_change(new_url) {
console.log("Old URL received in iframe_change: " + url);
console.log("New URL received in iframe_change: " + new_url);
// Destroy the original edit_iframe so you can build another one later if necessary
$(edit_iframe).remove();
// Destroy the original Tableau Viz object so you can create new one with URL of the Save(d) As version
viz.dispose();
// Reset the global vizURL at this point so that it all works circularly
// But first remove any embed/authoring attributes from the URL
var url_parts = new_url.split('?');
url = url_parts[0].replace('/authoring', '/views');
// Handle site
if (url.search('/site/') !== -1) {
url_parts = url.split('#/site/');
url = url_parts[0] + "t/" + url_parts[1];
vizUrlForWebEdit = url;
console.log("URL updated in iframe_change: " + url);
}
// Adjust UI: Show buttons & navigation menu, decrease size post-edit mode
$('#VizToolbar').show();
$('body').removeClass("sidebar-collapse");
$(".content-wrapper").css("height", "");
$("#tableauViz").show();
$("#editViz").hide();
// Create a new Viz object
viz = null;
viz = new tableau.Viz(placeholderDiv, url, options);
}
function completeLoad(e) {
// Once the workbook & viz have loaded, assign them to global variables
workbook = viz.getWorkbook();
activeSheet = workbook.getActiveSheet();
// Load custom controls based on the vizzes published to the server
showVizButtons();
viz.addEventListener(tableau.TableauEventName.MARKS_SELECTION, onMarksSelection);
}
$(document).ready(initializeViz); |
viz = new tableau.Viz(placeholderDiv, url, options);
}
| random_line_split |
__init__.py | # encoding: UTF-8
"""
# Label tokens
#
# Copyright (c) 2015, Translation Exchange, Inc.
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import six
__author__ = 'xepa4ep, a@toukmanov.ru'
import re
from ..exceptions import Error, RequiredArgumentIsNotPassed, MethodDoesNotExist
from ..rules.contexts import Value, ContextNotFound
from ..rules.case import DummyCase
from tml.strings import to_string, suggest_string
def need_to_escape(options):
""" Need escape string
Args:
options (dict): translation options
Returns:
boolean
""" | return options['escape']
return True
def escape_if_needed(text, options):
""" Escape string if it needed
Agrs:
text (string): text to escape
options (dict): tranlation options (if key safe is True - do not escape)
Returns:
text
"""
if hasattr(text, '__html__'):
# Text has escape itself:
return to_string(text.__html__())
if need_to_escape(options):
return escape(to_string(text))
return to_string(text)
ESCAPE_CHARS = (('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''))
def escape(text):
""" Escape text
Args:
text: input text
Returns:
(string): escaped HTML
"""
for find, replace in ESCAPE_CHARS:
text = text.replace(find, replace)
return text
class AbstractToken(object):
""" Base token class """
@classmethod
def validate(self, text):
""" Check that string is valid token
Args:
str (string): token string implimentation
Returns:
AbstractToken
"""
raise NotImplementedError()
def execute(self, data, options):
""" Execute token for data with options
data (dict): data
options (dict): execution options
"""
raise NotImplementedError()
class TextToken(AbstractToken):
""" Plain text """
def __init__(self, text):
""" .ctor
Args:
text (string): token text
"""
self.text = text
def execute(self, data, options):
""" Execute token
Returns: (string)
"""
return self.text
@classmethod
def validate(cls, text, language):
""" Validate tokenized string
Args:
text(string): token text
language (Language): token language
Returns:
TextToken|None
"""
if text == '':
# Empty text
return TextToken(text)
if text[0] != '{':
return TextToken(text)
def __str__(self):
return "TextToken[%s]" % self.text
class AbstractVariableToken(AbstractToken):
IS_VARIABLE = '([\$\d\w]+)'
IS_METHOD = '([\$\d\w])'
REGEXP_TOKEN = '^\{%s\}$'
def __init__(self, name):
self.name = name
def fetch(self, data):
try:
if self.name == '$0':
return data
return data[self.name]
except KeyError:
raise RequiredArgumentIsNotPassed(self.name, data)
class VariableToken(AbstractVariableToken):
""" Token for variabel {name} """
# Regext to check objects
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % AbstractVariableToken.IS_VARIABLE)
def __init__(self, name):
"""
Args:
name (string): variable name
"""
self.name = name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
return escape_if_needed(self.fetch(data), options)
def fetch(self, data):
""" Fetch variable"""
return suggest_string(super(VariableToken, self).fetch(data))
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return VariableToken(m.group(1))
def __str__(self):
return 'VariableToken[%s]' % self.name
class MethodToken(VariableToken):
# Method Token Forms
#
# {user.name}
# {user.name:gender}
HAS_METHOD = '\.(\w*\s*)'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + HAS_METHOD))
def __init__(self, name, method_name):
VariableToken.__init__(self, name)
self.method_name = method_name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
obj = self.fetch(data)
if isinstance(obj, dict) and self.method_name in obj: # if dict
return obj[self.method_name]
try:
prop = getattr(obj, self.method_name)
rv = callable(prop) and prop() or prop
return escape_if_needed(rv, options)
except AttributeError:
raise MethodDoesNotExist(self.name, self.method_name)
def fetch(self, data):
""" Fetch variable"""
return super(VariableToken, self).fetch(data)
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return MethodToken(m.group(1), m.group(2))
class RulesToken(AbstractVariableToken):
"""
Token which execute some rules on variable
{count|token, tokens}: count = 1 -> token
count = 100 -> tokens
"""
def __init__(self, name, rules, language):
""" .ctor
Args:
name (string): variable name
rules (string): rules string
language (Language): current language
"""
super(RulesToken, self).__init__(name)
self.rules = rules
self.language = language
TOKEN_TYPE_REGEX = '\|([^\|]{1}(.*))'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + TOKEN_TYPE_REGEX,))
""" Compiler for rules """
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), language)
def execute(self, data, options):
""" Execute token with var """
return self.language.contexts.execute(self.rules, self.fetch(data)).strip()
def find_context(self, token=None):
token = token or self.name
try:
return self.language.contexts.find_by_code(token)
except ContextNotFound:
return self.language.contexts.find_by_token_name(token)
def __str__(self):
return "RulesToken[%s, choices=%s]" % (self.name, self.rules)
class RulesMethodToken(RulesToken):
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), m.group(3), language)
class CaseToken(RulesToken):
""" Language keys {name::nom} """
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\:\:(.*)',))
def __init__(self, name, case, language):
super(RulesToken, self).__init__(name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
suggest_string(self.case.execute(self.fetch(data))), options)
def __str__(self):
return "CaseToken[%s, case=%s]" % (self.name, self.case)
class CaseMethodToken(RulesMethodToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\:\:(.*)'))
def __init__(self, name, method_name, case, language):
self.token = MethodToken(name, method_name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
self.case.execute(self.token.execute(data, {'escape': False})), options)
def __str__(self):
return "CaseMethodToken[%s, case=%s]" % (self.name, self.case)
class UnsupportedCase(Error):
def __init__(self, language, case):
self.language = language
self.case = case
def __str__(self):
return 'Language does not support case %s for locale %s' % (self.case, self.language.locale)
class PipeToken(RulesToken):
"""
Token which pipe rules and join it with variable
{count||token, tokens}: count = 1 -> 1 token
count = 100 -> 100 tokens
works like {name||rules} == {name} {name|rules}
"""
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\|\|(.*)',))
def __init__(self, name, rules, language):
self.token = VariableToken(name)
self.rules = RulesToken(name, rules, language)
@property
def name(self):
return self.token.name
def execute(self, data, options):
""" Execute token """
return '%s %s' % (
to_string(self.token.execute(data, options)),
to_string(self.rules.execute(data, options)))
def __str__(self):
return "PipeToken: token=%s, rules=%s" % (self.token, self.rules)
class PipeMethodToken(RulesMethodToken, PipeToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\|\|(.*)'))
def __init__(self, name, method_name, rules, language):
self.token = MethodToken(name, method_name)
self.rules = RulesToken(name, rules, language)
def __str__(self):
return "PipeMethodToken: token=%s, rules=%s" % (self.token, self.rules)
class TokenMatcher(object):
""" Class which select first supported token for text """
def __init__(self, classes):
""" .ctor
Args:
classes (AbstractToken.__class__[]): list of supported token classes
"""
self.classes = classes
def build_token(self, text, language):
""" Build token for text - return first matched token
Args:
text (string): token text
Returns:
AbstractToken: token object
"""
for cls in self.classes:
ret = cls.validate(text, language)
if ret:
return ret
# No token find:
raise InvalidTokenSyntax(text)
data_matcher = TokenMatcher([
TextToken,
VariableToken,
MethodToken,
RulesToken,
PipeToken,
PipeMethodToken,
CaseToken,
CaseMethodToken
])
def execute_all(tokens, data, options):
""" Execute all tokens
Args:
tokens (AbstractToken[]): list of tokens
data (dict): context
options (dict): execution options
Returns:
string: executed tokens
"""
return ''.join([to_string(token.execute(data, options)) for token in tokens])
class InvalidTokenSyntax(Error):
""" Unsupported token syntax """
def __init__(self, text):
self.text = text
def __str__(self):
return six.u('Token syntax is not supported for token "%s"') % self.text
from .data import DataToken as NewDataToken
from .method import MethodToken as NewMethodToken
from .transform import TransformToken as NewTransformToken
SUPPORTED_TOKENS = (
NewDataToken,
NewMethodToken,
NewTransformToken) | if 'escape' in options: | random_line_split |
__init__.py | # encoding: UTF-8
"""
# Label tokens
#
# Copyright (c) 2015, Translation Exchange, Inc.
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import six
__author__ = 'xepa4ep, a@toukmanov.ru'
import re
from ..exceptions import Error, RequiredArgumentIsNotPassed, MethodDoesNotExist
from ..rules.contexts import Value, ContextNotFound
from ..rules.case import DummyCase
from tml.strings import to_string, suggest_string
def need_to_escape(options):
""" Need escape string
Args:
options (dict): translation options
Returns:
boolean
"""
if 'escape' in options:
return options['escape']
return True
def escape_if_needed(text, options):
""" Escape string if it needed
Agrs:
text (string): text to escape
options (dict): tranlation options (if key safe is True - do not escape)
Returns:
text
"""
if hasattr(text, '__html__'):
# Text has escape itself:
return to_string(text.__html__())
if need_to_escape(options):
return escape(to_string(text))
return to_string(text)
ESCAPE_CHARS = (('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''))
def escape(text):
""" Escape text
Args:
text: input text
Returns:
(string): escaped HTML
"""
for find, replace in ESCAPE_CHARS:
text = text.replace(find, replace)
return text
class AbstractToken(object):
""" Base token class """
@classmethod
def validate(self, text):
""" Check that string is valid token
Args:
str (string): token string implimentation
Returns:
AbstractToken
"""
raise NotImplementedError()
def execute(self, data, options):
""" Execute token for data with options
data (dict): data
options (dict): execution options
"""
raise NotImplementedError()
class TextToken(AbstractToken):
""" Plain text """
def __init__(self, text):
""" .ctor
Args:
text (string): token text
"""
self.text = text
def execute(self, data, options):
""" Execute token
Returns: (string)
"""
return self.text
@classmethod
def validate(cls, text, language):
""" Validate tokenized string
Args:
text(string): token text
language (Language): token language
Returns:
TextToken|None
"""
if text == '':
# Empty text
return TextToken(text)
if text[0] != '{':
return TextToken(text)
def __str__(self):
return "TextToken[%s]" % self.text
class AbstractVariableToken(AbstractToken):
IS_VARIABLE = '([\$\d\w]+)'
IS_METHOD = '([\$\d\w])'
REGEXP_TOKEN = '^\{%s\}$'
def __init__(self, name):
self.name = name
def fetch(self, data):
try:
if self.name == '$0':
return data
return data[self.name]
except KeyError:
raise RequiredArgumentIsNotPassed(self.name, data)
class VariableToken(AbstractVariableToken):
""" Token for variabel {name} """
# Regext to check objects
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % AbstractVariableToken.IS_VARIABLE)
def __init__(self, name):
"""
Args:
name (string): variable name
"""
self.name = name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
return escape_if_needed(self.fetch(data), options)
def fetch(self, data):
""" Fetch variable"""
return suggest_string(super(VariableToken, self).fetch(data))
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return VariableToken(m.group(1))
def __str__(self):
return 'VariableToken[%s]' % self.name
class MethodToken(VariableToken):
# Method Token Forms
#
# {user.name}
# {user.name:gender}
HAS_METHOD = '\.(\w*\s*)'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + HAS_METHOD))
def __init__(self, name, method_name):
VariableToken.__init__(self, name)
self.method_name = method_name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
obj = self.fetch(data)
if isinstance(obj, dict) and self.method_name in obj: # if dict
return obj[self.method_name]
try:
prop = getattr(obj, self.method_name)
rv = callable(prop) and prop() or prop
return escape_if_needed(rv, options)
except AttributeError:
raise MethodDoesNotExist(self.name, self.method_name)
def fetch(self, data):
|
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return MethodToken(m.group(1), m.group(2))
class RulesToken(AbstractVariableToken):
"""
Token which execute some rules on variable
{count|token, tokens}: count = 1 -> token
count = 100 -> tokens
"""
def __init__(self, name, rules, language):
""" .ctor
Args:
name (string): variable name
rules (string): rules string
language (Language): current language
"""
super(RulesToken, self).__init__(name)
self.rules = rules
self.language = language
TOKEN_TYPE_REGEX = '\|([^\|]{1}(.*))'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + TOKEN_TYPE_REGEX,))
""" Compiler for rules """
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), language)
def execute(self, data, options):
""" Execute token with var """
return self.language.contexts.execute(self.rules, self.fetch(data)).strip()
def find_context(self, token=None):
token = token or self.name
try:
return self.language.contexts.find_by_code(token)
except ContextNotFound:
return self.language.contexts.find_by_token_name(token)
def __str__(self):
return "RulesToken[%s, choices=%s]" % (self.name, self.rules)
class RulesMethodToken(RulesToken):
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), m.group(3), language)
class CaseToken(RulesToken):
""" Language keys {name::nom} """
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\:\:(.*)',))
def __init__(self, name, case, language):
super(RulesToken, self).__init__(name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
suggest_string(self.case.execute(self.fetch(data))), options)
def __str__(self):
return "CaseToken[%s, case=%s]" % (self.name, self.case)
class CaseMethodToken(RulesMethodToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\:\:(.*)'))
def __init__(self, name, method_name, case, language):
self.token = MethodToken(name, method_name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
self.case.execute(self.token.execute(data, {'escape': False})), options)
def __str__(self):
return "CaseMethodToken[%s, case=%s]" % (self.name, self.case)
class UnsupportedCase(Error):
def __init__(self, language, case):
self.language = language
self.case = case
def __str__(self):
return 'Language does not support case %s for locale %s' % (self.case, self.language.locale)
class PipeToken(RulesToken):
"""
Token which pipe rules and join it with variable
{count||token, tokens}: count = 1 -> 1 token
count = 100 -> 100 tokens
works like {name||rules} == {name} {name|rules}
"""
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\|\|(.*)',))
def __init__(self, name, rules, language):
self.token = VariableToken(name)
self.rules = RulesToken(name, rules, language)
@property
def name(self):
return self.token.name
def execute(self, data, options):
""" Execute token """
return '%s %s' % (
to_string(self.token.execute(data, options)),
to_string(self.rules.execute(data, options)))
def __str__(self):
return "PipeToken: token=%s, rules=%s" % (self.token, self.rules)
class PipeMethodToken(RulesMethodToken, PipeToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\|\|(.*)'))
def __init__(self, name, method_name, rules, language):
self.token = MethodToken(name, method_name)
self.rules = RulesToken(name, rules, language)
def __str__(self):
return "PipeMethodToken: token=%s, rules=%s" % (self.token, self.rules)
class TokenMatcher(object):
""" Class which select first supported token for text """
def __init__(self, classes):
""" .ctor
Args:
classes (AbstractToken.__class__[]): list of supported token classes
"""
self.classes = classes
def build_token(self, text, language):
""" Build token for text - return first matched token
Args:
text (string): token text
Returns:
AbstractToken: token object
"""
for cls in self.classes:
ret = cls.validate(text, language)
if ret:
return ret
# No token find:
raise InvalidTokenSyntax(text)
data_matcher = TokenMatcher([
TextToken,
VariableToken,
MethodToken,
RulesToken,
PipeToken,
PipeMethodToken,
CaseToken,
CaseMethodToken
])
def execute_all(tokens, data, options):
""" Execute all tokens
Args:
tokens (AbstractToken[]): list of tokens
data (dict): context
options (dict): execution options
Returns:
string: executed tokens
"""
return ''.join([to_string(token.execute(data, options)) for token in tokens])
class InvalidTokenSyntax(Error):
""" Unsupported token syntax """
def __init__(self, text):
self.text = text
def __str__(self):
return six.u('Token syntax is not supported for token "%s"') % self.text
from .data import DataToken as NewDataToken
from .method import MethodToken as NewMethodToken
from .transform import TransformToken as NewTransformToken
SUPPORTED_TOKENS = (
NewDataToken,
NewMethodToken,
NewTransformToken)
| """ Fetch variable"""
return super(VariableToken, self).fetch(data) | identifier_body |
__init__.py | # encoding: UTF-8
"""
# Label tokens
#
# Copyright (c) 2015, Translation Exchange, Inc.
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import six
__author__ = 'xepa4ep, a@toukmanov.ru'
import re
from ..exceptions import Error, RequiredArgumentIsNotPassed, MethodDoesNotExist
from ..rules.contexts import Value, ContextNotFound
from ..rules.case import DummyCase
from tml.strings import to_string, suggest_string
def need_to_escape(options):
""" Need escape string
Args:
options (dict): translation options
Returns:
boolean
"""
if 'escape' in options:
return options['escape']
return True
def escape_if_needed(text, options):
""" Escape string if it needed
Agrs:
text (string): text to escape
options (dict): tranlation options (if key safe is True - do not escape)
Returns:
text
"""
if hasattr(text, '__html__'):
# Text has escape itself:
return to_string(text.__html__())
if need_to_escape(options):
return escape(to_string(text))
return to_string(text)
ESCAPE_CHARS = (('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''))
def escape(text):
""" Escape text
Args:
text: input text
Returns:
(string): escaped HTML
"""
for find, replace in ESCAPE_CHARS:
text = text.replace(find, replace)
return text
class AbstractToken(object):
""" Base token class """
@classmethod
def validate(self, text):
""" Check that string is valid token
Args:
str (string): token string implimentation
Returns:
AbstractToken
"""
raise NotImplementedError()
def execute(self, data, options):
""" Execute token for data with options
data (dict): data
options (dict): execution options
"""
raise NotImplementedError()
class TextToken(AbstractToken):
""" Plain text """
def __init__(self, text):
""" .ctor
Args:
text (string): token text
"""
self.text = text
def execute(self, data, options):
""" Execute token
Returns: (string)
"""
return self.text
@classmethod
def validate(cls, text, language):
""" Validate tokenized string
Args:
text(string): token text
language (Language): token language
Returns:
TextToken|None
"""
if text == '':
# Empty text
|
if text[0] != '{':
return TextToken(text)
def __str__(self):
return "TextToken[%s]" % self.text
class AbstractVariableToken(AbstractToken):
IS_VARIABLE = '([\$\d\w]+)'
IS_METHOD = '([\$\d\w])'
REGEXP_TOKEN = '^\{%s\}$'
def __init__(self, name):
self.name = name
def fetch(self, data):
try:
if self.name == '$0':
return data
return data[self.name]
except KeyError:
raise RequiredArgumentIsNotPassed(self.name, data)
class VariableToken(AbstractVariableToken):
""" Token for variabel {name} """
# Regext to check objects
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % AbstractVariableToken.IS_VARIABLE)
def __init__(self, name):
"""
Args:
name (string): variable name
"""
self.name = name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
return escape_if_needed(self.fetch(data), options)
def fetch(self, data):
""" Fetch variable"""
return suggest_string(super(VariableToken, self).fetch(data))
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return VariableToken(m.group(1))
def __str__(self):
return 'VariableToken[%s]' % self.name
class MethodToken(VariableToken):
# Method Token Forms
#
# {user.name}
# {user.name:gender}
HAS_METHOD = '\.(\w*\s*)'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + HAS_METHOD))
def __init__(self, name, method_name):
VariableToken.__init__(self, name)
self.method_name = method_name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
obj = self.fetch(data)
if isinstance(obj, dict) and self.method_name in obj: # if dict
return obj[self.method_name]
try:
prop = getattr(obj, self.method_name)
rv = callable(prop) and prop() or prop
return escape_if_needed(rv, options)
except AttributeError:
raise MethodDoesNotExist(self.name, self.method_name)
def fetch(self, data):
""" Fetch variable"""
return super(VariableToken, self).fetch(data)
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return MethodToken(m.group(1), m.group(2))
class RulesToken(AbstractVariableToken):
"""
Token which execute some rules on variable
{count|token, tokens}: count = 1 -> token
count = 100 -> tokens
"""
def __init__(self, name, rules, language):
""" .ctor
Args:
name (string): variable name
rules (string): rules string
language (Language): current language
"""
super(RulesToken, self).__init__(name)
self.rules = rules
self.language = language
TOKEN_TYPE_REGEX = '\|([^\|]{1}(.*))'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + TOKEN_TYPE_REGEX,))
""" Compiler for rules """
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), language)
def execute(self, data, options):
""" Execute token with var """
return self.language.contexts.execute(self.rules, self.fetch(data)).strip()
def find_context(self, token=None):
token = token or self.name
try:
return self.language.contexts.find_by_code(token)
except ContextNotFound:
return self.language.contexts.find_by_token_name(token)
def __str__(self):
return "RulesToken[%s, choices=%s]" % (self.name, self.rules)
class RulesMethodToken(RulesToken):
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), m.group(3), language)
class CaseToken(RulesToken):
""" Language keys {name::nom} """
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\:\:(.*)',))
def __init__(self, name, case, language):
super(RulesToken, self).__init__(name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
suggest_string(self.case.execute(self.fetch(data))), options)
def __str__(self):
return "CaseToken[%s, case=%s]" % (self.name, self.case)
class CaseMethodToken(RulesMethodToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\:\:(.*)'))
def __init__(self, name, method_name, case, language):
self.token = MethodToken(name, method_name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
self.case.execute(self.token.execute(data, {'escape': False})), options)
def __str__(self):
return "CaseMethodToken[%s, case=%s]" % (self.name, self.case)
class UnsupportedCase(Error):
def __init__(self, language, case):
self.language = language
self.case = case
def __str__(self):
return 'Language does not support case %s for locale %s' % (self.case, self.language.locale)
class PipeToken(RulesToken):
"""
Token which pipe rules and join it with variable
{count||token, tokens}: count = 1 -> 1 token
count = 100 -> 100 tokens
works like {name||rules} == {name} {name|rules}
"""
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\|\|(.*)',))
def __init__(self, name, rules, language):
self.token = VariableToken(name)
self.rules = RulesToken(name, rules, language)
@property
def name(self):
return self.token.name
def execute(self, data, options):
""" Execute token """
return '%s %s' % (
to_string(self.token.execute(data, options)),
to_string(self.rules.execute(data, options)))
def __str__(self):
return "PipeToken: token=%s, rules=%s" % (self.token, self.rules)
class PipeMethodToken(RulesMethodToken, PipeToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\|\|(.*)'))
def __init__(self, name, method_name, rules, language):
self.token = MethodToken(name, method_name)
self.rules = RulesToken(name, rules, language)
def __str__(self):
return "PipeMethodToken: token=%s, rules=%s" % (self.token, self.rules)
class TokenMatcher(object):
""" Class which select first supported token for text """
def __init__(self, classes):
""" .ctor
Args:
classes (AbstractToken.__class__[]): list of supported token classes
"""
self.classes = classes
def build_token(self, text, language):
""" Build token for text - return first matched token
Args:
text (string): token text
Returns:
AbstractToken: token object
"""
for cls in self.classes:
ret = cls.validate(text, language)
if ret:
return ret
# No token find:
raise InvalidTokenSyntax(text)
data_matcher = TokenMatcher([
TextToken,
VariableToken,
MethodToken,
RulesToken,
PipeToken,
PipeMethodToken,
CaseToken,
CaseMethodToken
])
def execute_all(tokens, data, options):
""" Execute all tokens
Args:
tokens (AbstractToken[]): list of tokens
data (dict): context
options (dict): execution options
Returns:
string: executed tokens
"""
return ''.join([to_string(token.execute(data, options)) for token in tokens])
class InvalidTokenSyntax(Error):
""" Unsupported token syntax """
def __init__(self, text):
self.text = text
def __str__(self):
return six.u('Token syntax is not supported for token "%s"') % self.text
from .data import DataToken as NewDataToken
from .method import MethodToken as NewMethodToken
from .transform import TransformToken as NewTransformToken
SUPPORTED_TOKENS = (
NewDataToken,
NewMethodToken,
NewTransformToken)
| return TextToken(text) | conditional_block |
__init__.py | # encoding: UTF-8
"""
# Label tokens
#
# Copyright (c) 2015, Translation Exchange, Inc.
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import six
__author__ = 'xepa4ep, a@toukmanov.ru'
import re
from ..exceptions import Error, RequiredArgumentIsNotPassed, MethodDoesNotExist
from ..rules.contexts import Value, ContextNotFound
from ..rules.case import DummyCase
from tml.strings import to_string, suggest_string
def need_to_escape(options):
""" Need escape string
Args:
options (dict): translation options
Returns:
boolean
"""
if 'escape' in options:
return options['escape']
return True
def escape_if_needed(text, options):
""" Escape string if it needed
Agrs:
text (string): text to escape
options (dict): tranlation options (if key safe is True - do not escape)
Returns:
text
"""
if hasattr(text, '__html__'):
# Text has escape itself:
return to_string(text.__html__())
if need_to_escape(options):
return escape(to_string(text))
return to_string(text)
ESCAPE_CHARS = (('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''))
def escape(text):
""" Escape text
Args:
text: input text
Returns:
(string): escaped HTML
"""
for find, replace in ESCAPE_CHARS:
text = text.replace(find, replace)
return text
class AbstractToken(object):
""" Base token class """
@classmethod
def validate(self, text):
""" Check that string is valid token
Args:
str (string): token string implimentation
Returns:
AbstractToken
"""
raise NotImplementedError()
def execute(self, data, options):
""" Execute token for data with options
data (dict): data
options (dict): execution options
"""
raise NotImplementedError()
class TextToken(AbstractToken):
""" Plain text """
def __init__(self, text):
""" .ctor
Args:
text (string): token text
"""
self.text = text
def execute(self, data, options):
""" Execute token
Returns: (string)
"""
return self.text
@classmethod
def validate(cls, text, language):
""" Validate tokenized string
Args:
text(string): token text
language (Language): token language
Returns:
TextToken|None
"""
if text == '':
# Empty text
return TextToken(text)
if text[0] != '{':
return TextToken(text)
def __str__(self):
return "TextToken[%s]" % self.text
class AbstractVariableToken(AbstractToken):
IS_VARIABLE = '([\$\d\w]+)'
IS_METHOD = '([\$\d\w])'
REGEXP_TOKEN = '^\{%s\}$'
def __init__(self, name):
self.name = name
def fetch(self, data):
try:
if self.name == '$0':
return data
return data[self.name]
except KeyError:
raise RequiredArgumentIsNotPassed(self.name, data)
class VariableToken(AbstractVariableToken):
""" Token for variabel {name} """
# Regext to check objects
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % AbstractVariableToken.IS_VARIABLE)
def __init__(self, name):
"""
Args:
name (string): variable name
"""
self.name = name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
return escape_if_needed(self.fetch(data), options)
def fetch(self, data):
""" Fetch variable"""
return suggest_string(super(VariableToken, self).fetch(data))
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return VariableToken(m.group(1))
def __str__(self):
return 'VariableToken[%s]' % self.name
class MethodToken(VariableToken):
# Method Token Forms
#
# {user.name}
# {user.name:gender}
HAS_METHOD = '\.(\w*\s*)'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + HAS_METHOD))
def __init__(self, name, method_name):
VariableToken.__init__(self, name)
self.method_name = method_name
def execute(self, data, options):
""" Fetch and escape variable from data
Args:
data (dict): input data
options (dict): translation options
Returns:
string
"""
obj = self.fetch(data)
if isinstance(obj, dict) and self.method_name in obj: # if dict
return obj[self.method_name]
try:
prop = getattr(obj, self.method_name)
rv = callable(prop) and prop() or prop
return escape_if_needed(rv, options)
except AttributeError:
raise MethodDoesNotExist(self.name, self.method_name)
def fetch(self, data):
""" Fetch variable"""
return super(VariableToken, self).fetch(data)
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return MethodToken(m.group(1), m.group(2))
class RulesToken(AbstractVariableToken):
"""
Token which execute some rules on variable
{count|token, tokens}: count = 1 -> token
count = 100 -> tokens
"""
def __init__(self, name, rules, language):
""" .ctor
Args:
name (string): variable name
rules (string): rules string
language (Language): current language
"""
super(RulesToken, self).__init__(name)
self.rules = rules
self.language = language
TOKEN_TYPE_REGEX = '\|([^\|]{1}(.*))'
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + TOKEN_TYPE_REGEX,))
""" Compiler for rules """
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), language)
def execute(self, data, options):
""" Execute token with var """
return self.language.contexts.execute(self.rules, self.fetch(data)).strip()
def find_context(self, token=None):
token = token or self.name
try:
return self.language.contexts.find_by_code(token)
except ContextNotFound:
return self.language.contexts.find_by_token_name(token)
def __str__(self):
return "RulesToken[%s, choices=%s]" % (self.name, self.rules)
class RulesMethodToken(RulesToken):
@classmethod
def validate(cls, text, language):
m = cls.IS_TOKEN.match(text)
if m:
return cls(m.group(1), m.group(2), m.group(3), language)
class CaseToken(RulesToken):
""" Language keys {name::nom} """
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\:\:(.*)',))
def __init__(self, name, case, language):
super(RulesToken, self).__init__(name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
suggest_string(self.case.execute(self.fetch(data))), options)
def __str__(self):
return "CaseToken[%s, case=%s]" % (self.name, self.case)
class | (RulesMethodToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\:\:(.*)'))
def __init__(self, name, method_name, case, language):
self.token = MethodToken(name, method_name)
self.case = language.cases.get(str(case), DummyCase())
def execute(self, data, options):
""" Execute with rules options """
return escape_if_needed(
self.case.execute(self.token.execute(data, {'escape': False})), options)
def __str__(self):
return "CaseMethodToken[%s, case=%s]" % (self.name, self.case)
class UnsupportedCase(Error):
def __init__(self, language, case):
self.language = language
self.case = case
def __str__(self):
return 'Language does not support case %s for locale %s' % (self.case, self.language.locale)
class PipeToken(RulesToken):
"""
Token which pipe rules and join it with variable
{count||token, tokens}: count = 1 -> 1 token
count = 100 -> 100 tokens
works like {name||rules} == {name} {name|rules}
"""
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (AbstractVariableToken.IS_VARIABLE + '\|\|(.*)',))
def __init__(self, name, rules, language):
self.token = VariableToken(name)
self.rules = RulesToken(name, rules, language)
@property
def name(self):
return self.token.name
def execute(self, data, options):
""" Execute token """
return '%s %s' % (
to_string(self.token.execute(data, options)),
to_string(self.rules.execute(data, options)))
def __str__(self):
return "PipeToken: token=%s, rules=%s" % (self.token, self.rules)
class PipeMethodToken(RulesMethodToken, PipeToken):
IS_TOKEN = re.compile(AbstractVariableToken.REGEXP_TOKEN % (
AbstractVariableToken.IS_VARIABLE + MethodToken.HAS_METHOD + '\|\|(.*)'))
def __init__(self, name, method_name, rules, language):
self.token = MethodToken(name, method_name)
self.rules = RulesToken(name, rules, language)
def __str__(self):
return "PipeMethodToken: token=%s, rules=%s" % (self.token, self.rules)
class TokenMatcher(object):
""" Class which select first supported token for text """
def __init__(self, classes):
""" .ctor
Args:
classes (AbstractToken.__class__[]): list of supported token classes
"""
self.classes = classes
def build_token(self, text, language):
""" Build token for text - return first matched token
Args:
text (string): token text
Returns:
AbstractToken: token object
"""
for cls in self.classes:
ret = cls.validate(text, language)
if ret:
return ret
# No token find:
raise InvalidTokenSyntax(text)
data_matcher = TokenMatcher([
TextToken,
VariableToken,
MethodToken,
RulesToken,
PipeToken,
PipeMethodToken,
CaseToken,
CaseMethodToken
])
def execute_all(tokens, data, options):
""" Execute all tokens
Args:
tokens (AbstractToken[]): list of tokens
data (dict): context
options (dict): execution options
Returns:
string: executed tokens
"""
return ''.join([to_string(token.execute(data, options)) for token in tokens])
class InvalidTokenSyntax(Error):
""" Unsupported token syntax """
def __init__(self, text):
self.text = text
def __str__(self):
return six.u('Token syntax is not supported for token "%s"') % self.text
from .data import DataToken as NewDataToken
from .method import MethodToken as NewMethodToken
from .transform import TransformToken as NewTransformToken
SUPPORTED_TOKENS = (
NewDataToken,
NewMethodToken,
NewTransformToken)
| CaseMethodToken | identifier_name |
FindPoints.py | from sys import argv
from skimage import io
from skimage.color import hsv2rgb, rgb2hsv, rgb2grey
from skimage.measure import label
from ImageFiltering import *
from Signature import *
from CropPoints import *
import numpy as np
from optparse import OptionParser
from scipy.ndimage.measurements import center_of_mass
from scipy.ndimage import label
def loadcenters(filename, mask, potsize=500):
f = open(filename)
centers = []
for line in f:
info = line.split(" ")[2:]
center = info[2]
center = center.split(",")
center[0] = int(round(float(center[0])))
center[1] = int(round(float(center[1])))
center = center[::-1]
if center[0]<150 or center[1]<150:
print "Centre on the edge, and will therefore be removed"
continue
if center[1]>(width-150) or center[0]>(height-150):
print "Centre on the edge, and will therefore be removed"
continue
size = [int(i) for i in info[1].split("+")[0].split("x")]
if size[0]>potsize:
#centers.append(center)
print "WARNING: Cluster size too big, potential merge of pots detected"
print "Employing K-means on the merged blob to find the pots"
c = (size[0]//potsize)+1
top, bottom, left, right = control_edge(center, potsize, size[0]+100)
npoint = local_Kmeans(mask[top:bottom,left:right], c)
temp_centers = []
for point in npoint:
diff_x, diff_y = point[0]-int(potsize/2), point[1]-int((size[0]+100)/2)
point = [center[0]+diff_x, center[1]+diff_y]
if point[0]<150:
continue
if point[1]<150:
continue
if point[0]>(height-150):
continue
if point[1]>(width-150):
continue
temp_centers.append(point)
min_distance = 300
cgroup = []
## Average centers that are to close together.
for tcenter in temp_centers:
for tcenter2 in temp_centers:
if tcenter==tcenter2: continue
dist = distance(tcenter, tcenter2)
if dist<min_distance:
cgroup = [tcenter, tcenter2]
min_distance = dist
if cgroup==[]:
for tcenter in temp_centers:
centers.append(tcenter)
else:
ncenter = findCentre(cgroup)
centers.append(ncenter)
for tcenter in temp_centers:
if tcenter not in cgroup:
centers.append(tcenter)
else:
centers.append(center)
return centers
def groupCentres(centres):
groups = []
for i in range(len(centres)):
temp_group = [centres[i]]
for j in range(i, len(centres)):
if i==j:
continue
temp_group.append(centres[j])
n = len(temp_group)
if n<3: continue
if n>10: continue
temp_group = orderGroup(temp_group)
gheight, gwidth = dimension_of_group(temp_group, "dimension")
if len(temp_group)!=n: continue
if gheight>1300: continue
if gwidth>2300: continue
groups.append(orderGroup(temp_group))
return groups
def closestGroupSignature(groups, signature, radius):
number_of_matches = 0
bestgroup = []
i = 1
j = 0
for group in groups:
psignature = overlaySignature(signature, group)
match = []
for point in psignature.values():
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
match.append(centre)
continue
matches = len(match)
if number_of_matches<matches:
bestgroup = group
j = i
number_of_matches = matches
i += 1
return bestgroup
def getPointdiff(point1, point2):
return point1[0]-point2[0], point1[1]-point2[1]
def dimension_of_group(group, return_value="centre"):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
if return_value=="centre" or return_value=="center":
return [(max_y-min_y)/2+min_y, (max_x-min_x)/2+min_x]
if return_value=="rectangle":
return [[min_y, min_x], [min_y, max_x], [max_y, min_x], [max_y, max_x]]
if return_value=="dimension":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return height, width
if return_value=="area":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return width*height
return max_y, min_y, max_x, min_x
def overlaySignature(signature, group):
centre = dimension_of_group(group, "centre")
sign_height, sign_width = dimension_of_group(signature.values(), "dimension")
sign_height = int(sign_height/2)
sign_width = int(sign_width/2)
if (centre[0]-sign_height)<0:
print "Above"
centre[0] += abs(centre[0]-sign_height)+dim/2
if (centre[0]+sign_height)>height:
print "Below"
centre[0] -= abs(height-(centre[0]+sign_height))+dim/2
if (centre[1]-sign_width)<0:
print "Left"
centre[1] += abs(centre[1]-sign_width)+dim/2
if (centre[1]+sign_width)>width:
print "Right"
centre[1] -= abs(width-(centre[1]+sign_width))+dim/2
diff_y, diff_x = getPointdiff(signature['0'], centre)
for point in signature:
if point=='0':
signature['0'] = centre
continue
signature[point] = [signature[point][0]-diff_y, signature[point][1]-diff_x]
return signature
def splitGroup(group):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
abovecentres = []
belowcentres = []
group = sorted(group, key=lambda x: x[1])
for centre in group:
if abs(centre[0]-max_y)<abs(centre[0]-min_y):
belowcentres.append(centre)
if abs(centre[0]-max_y)>abs(centre[0]-min_y):
abovecentres.append(centre)
b_median = belowcentres[len(belowcentres)/2][0]
a_median = abovecentres[len(abovecentres)/2][0]
belowcentres = filter(lambda x: x[0]<(b_median+400) and x[0]>(b_median-400), belowcentres)
abovecentres = filter(lambda x: x[0]<(a_median+400) and x[0]>(a_median-400), abovecentres)
return abovecentres, belowcentres
def orderGroup(group):
abovecentres, belowcentres = splitGroup(group)
return abovecentres+belowcentres
def control_edge(point, cheight, cwidth):
left = (point[1]-(cwidth/2))
right = (point[1]+(cwidth/2))
top = (point[0]-(cheight/2))
bottom = (point[0]+(cheight/2))
if left<0:
diff = 0-left
left += diff+1
right += diff+1
if right>width:
diff = width-right
left += diff-1
right += diff-1
if top<0:
diff = 0-top
top += diff+1
bottom += diff+1
if bottom>height:
diff = height-bottom
top += diff-1
bottom += diff-1
return top, bottom, left, right
def local_Kmeans(mask, k=1):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
if len(blackpixels)==0: return [200, 200]
centers = kmeans(blackpixels.T.astype(float), k, iter=20, thresh=1e-08)[0].astype(int)
return centers
def local_center_of_mass(mask):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
center = np.squeeze(np.asarray(blackpixels.mean(axis=1).astype(int).flatten('C')))
return center
def claim_region(mask, top, bottom, left, right):
mask[top:bottom, left:right] = 255
def testRadius(signature, group, radius, mask, dim):
new_group = []
del signature['0']
for point in signature.values():
match = []
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
if match==[]:
match = centre
if distance(match, point)>distance(centre, point):
|
if match==[]:
#new_group.append(point)
top, bottom, left, right = control_edge(point, dim, dim)
try:
npoint = local_Kmeans(mask[top:bottom,left:right])[0]
npoint = local_center_of_mass(mask[top:bottom,left:right])
except:
new_group.append(point)
continue
diff_x, diff_y = npoint[0]-(dim/2), npoint[1]-(dim/2)
claim_region(mask, top, bottom, left, right)
new_group.append([point[0]+diff_x, point[1]+diff_y])
else:
top, bottom, left, right = control_edge(point, dim, dim)
claim_region(mask, top, bottom, left, right)
new_group.append(match)
return new_group
def test_content(img, point, dim):
#print img.shape
top, bottom, left, right = control_edge(point, dim, dim)
img = img[top:bottom,left:right]
labmask = LABMaskImage(img)
hsvmask = HSVMaskImage(img)
mask = combinemasks(labmask, hsvmask)
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
return blackpixels.shape[1]
if __name__=="__main__":
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option('-i', type="string", nargs=1, dest="input", help="input image")
parser.add_option('-o', type="string", nargs=1, dest="output", help="output file")
parser.add_option('-c', type="string", nargs=1, dest="centers", help="centers")
parser.add_option('-s', type="string", nargs=1, dest="signature", help="signature")
parser.add_option('-d', type="int", nargs=1, dest="dimension", help="dimension")
options, args = parser.parse_args()
if options.input==None:
raise "No input image given, please give input image: -i image"
else:
in_file = options.input
if options.output==None:
raise "No output name given, please give output name: -o filename"
else:
out_file = options.output
if options.centers==None:
raise "No centers given, unable to match center points: -c filename.log"
else:
centers_filename = options.centers
if options.signature==None:
raise "No signature given, unable to find correct center points: -s filename.sign"
else:
sign_filename = options.signature
if options.dimension==None:
print "Notice: No dimension given, using defualt size 400: -d int"
dim = 400
else:
dim = options.dimension
inimg = io.imread(in_file)
height, width, _ = inimg.shape
print height, width, dim
## Overall mask
labmask = LABMaskImage(inimg)
hsvmask = HSVMaskImage(inimg)
mask = combinemasks(labmask, hsvmask)
#centers = kmeans(blackpixels.T.astype(float), 10, iter=100, thresh=1e-08)[0].astype(int)
print "Loading Centres"
centres = loadcenters(centers_filename, mask)
print "Grouping centres"
groups = groupCentres(centres)
print "Loading signature"
signature, signature_value = readSignature(sign_filename)
#centres = orderGroup(centres)
print centres
print "Find closest group to signature"
#centres = closestGroup(groups, signature_value)
centres = closestGroupSignature(groups, signature, 250)
print(centres)
print "Overlay signature on the group"
signature = overlaySignature(signature, centres)
copy_centres = centres
print "Test and fix fit of signature centers to group"
centres = testRadius(signature, centres, 200, mask, 500)
centres = orderGroup(centres)
i = 10
c = 0
print "Cropping images"
for centre in centres:
count = test_content(inimg, centre, dim)
if count<1000:
i -= 1
continue
#crop_square(centre, dim, inimg, i, in_file, out_file)
try:
square(copy_centres[c], dim, inimg, 0, 3, color=(55, 255, 55))
square(copy_centres[c], 40, inimg, 0, 5, color=(55, 255, 55))
except:
pass
square(centre, 10, inimg, 0, 5, color=(55, 55, 255))
square(centre, dim, inimg, i, 3, color=(55, 55, 255))
square(signature[str(i)], 200, inimg, 0, 3, color=(255, 55, 55))
i -= 1
c += 1
#square(signature['0'], 50, inimg, 0, 3, color=(255, 55, 55))
io.imsave(out_file, inimg)
| match = centre | conditional_block |
FindPoints.py | from sys import argv
from skimage import io
from skimage.color import hsv2rgb, rgb2hsv, rgb2grey
from skimage.measure import label
from ImageFiltering import *
from Signature import *
from CropPoints import *
import numpy as np
from optparse import OptionParser
from scipy.ndimage.measurements import center_of_mass
from scipy.ndimage import label
def loadcenters(filename, mask, potsize=500):
f = open(filename)
centers = []
for line in f:
info = line.split(" ")[2:]
center = info[2]
center = center.split(",")
center[0] = int(round(float(center[0])))
center[1] = int(round(float(center[1])))
center = center[::-1]
if center[0]<150 or center[1]<150:
print "Centre on the edge, and will therefore be removed"
continue
if center[1]>(width-150) or center[0]>(height-150):
print "Centre on the edge, and will therefore be removed"
continue
size = [int(i) for i in info[1].split("+")[0].split("x")]
if size[0]>potsize:
#centers.append(center)
print "WARNING: Cluster size too big, potential merge of pots detected"
print "Employing K-means on the merged blob to find the pots"
c = (size[0]//potsize)+1
top, bottom, left, right = control_edge(center, potsize, size[0]+100)
npoint = local_Kmeans(mask[top:bottom,left:right], c)
temp_centers = []
for point in npoint:
diff_x, diff_y = point[0]-int(potsize/2), point[1]-int((size[0]+100)/2)
point = [center[0]+diff_x, center[1]+diff_y]
if point[0]<150:
continue
if point[1]<150:
continue
if point[0]>(height-150):
continue
if point[1]>(width-150):
continue
temp_centers.append(point)
min_distance = 300
cgroup = []
## Average centers that are to close together.
for tcenter in temp_centers:
for tcenter2 in temp_centers:
if tcenter==tcenter2: continue
dist = distance(tcenter, tcenter2)
if dist<min_distance:
cgroup = [tcenter, tcenter2]
min_distance = dist
if cgroup==[]:
for tcenter in temp_centers:
centers.append(tcenter)
else:
ncenter = findCentre(cgroup)
centers.append(ncenter)
for tcenter in temp_centers:
if tcenter not in cgroup:
centers.append(tcenter)
else:
centers.append(center)
return centers
def groupCentres(centres):
groups = []
for i in range(len(centres)):
temp_group = [centres[i]]
for j in range(i, len(centres)):
if i==j:
continue
temp_group.append(centres[j])
n = len(temp_group)
if n<3: continue
if n>10: continue
temp_group = orderGroup(temp_group)
gheight, gwidth = dimension_of_group(temp_group, "dimension")
if len(temp_group)!=n: continue
if gheight>1300: continue
if gwidth>2300: continue
groups.append(orderGroup(temp_group))
return groups
def closestGroupSignature(groups, signature, radius):
number_of_matches = 0
bestgroup = []
i = 1
j = 0
for group in groups:
psignature = overlaySignature(signature, group)
match = []
for point in psignature.values():
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
match.append(centre)
continue
matches = len(match)
if number_of_matches<matches:
bestgroup = group
j = i
number_of_matches = matches
i += 1
return bestgroup
def getPointdiff(point1, point2):
return point1[0]-point2[0], point1[1]-point2[1]
def dimension_of_group(group, return_value="centre"):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
if return_value=="centre" or return_value=="center":
return [(max_y-min_y)/2+min_y, (max_x-min_x)/2+min_x]
if return_value=="rectangle":
return [[min_y, min_x], [min_y, max_x], [max_y, min_x], [max_y, max_x]]
if return_value=="dimension":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return height, width
if return_value=="area":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return width*height
return max_y, min_y, max_x, min_x
def overlaySignature(signature, group):
centre = dimension_of_group(group, "centre")
sign_height, sign_width = dimension_of_group(signature.values(), "dimension")
sign_height = int(sign_height/2)
sign_width = int(sign_width/2)
if (centre[0]-sign_height)<0:
print "Above"
centre[0] += abs(centre[0]-sign_height)+dim/2
if (centre[0]+sign_height)>height:
print "Below"
centre[0] -= abs(height-(centre[0]+sign_height))+dim/2
if (centre[1]-sign_width)<0:
print "Left"
centre[1] += abs(centre[1]-sign_width)+dim/2
if (centre[1]+sign_width)>width:
print "Right"
centre[1] -= abs(width-(centre[1]+sign_width))+dim/2
diff_y, diff_x = getPointdiff(signature['0'], centre)
for point in signature:
if point=='0':
signature['0'] = centre
continue
signature[point] = [signature[point][0]-diff_y, signature[point][1]-diff_x]
return signature
def splitGroup(group):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
abovecentres = []
belowcentres = []
group = sorted(group, key=lambda x: x[1])
for centre in group:
if abs(centre[0]-max_y)<abs(centre[0]-min_y):
belowcentres.append(centre)
if abs(centre[0]-max_y)>abs(centre[0]-min_y):
abovecentres.append(centre)
b_median = belowcentres[len(belowcentres)/2][0]
a_median = abovecentres[len(abovecentres)/2][0]
belowcentres = filter(lambda x: x[0]<(b_median+400) and x[0]>(b_median-400), belowcentres)
abovecentres = filter(lambda x: x[0]<(a_median+400) and x[0]>(a_median-400), abovecentres)
return abovecentres, belowcentres
def orderGroup(group):
abovecentres, belowcentres = splitGroup(group)
return abovecentres+belowcentres
def control_edge(point, cheight, cwidth):
left = (point[1]-(cwidth/2))
right = (point[1]+(cwidth/2))
top = (point[0]-(cheight/2))
bottom = (point[0]+(cheight/2))
if left<0:
diff = 0-left
left += diff+1
right += diff+1
if right>width:
diff = width-right
left += diff-1
right += diff-1
if top<0:
diff = 0-top
top += diff+1
bottom += diff+1
if bottom>height:
diff = height-bottom
top += diff-1
bottom += diff-1
return top, bottom, left, right
def local_Kmeans(mask, k=1):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
if len(blackpixels)==0: return [200, 200]
centers = kmeans(blackpixels.T.astype(float), k, iter=20, thresh=1e-08)[0].astype(int)
return centers
def local_center_of_mass(mask):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
center = np.squeeze(np.asarray(blackpixels.mean(axis=1).astype(int).flatten('C')))
return center
def claim_region(mask, top, bottom, left, right):
mask[top:bottom, left:right] = 255
def testRadius(signature, group, radius, mask, dim):
new_group = []
del signature['0']
for point in signature.values():
match = []
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
if match==[]:
match = centre
if distance(match, point)>distance(centre, point):
match = centre
if match==[]:
#new_group.append(point)
top, bottom, left, right = control_edge(point, dim, dim)
try:
npoint = local_Kmeans(mask[top:bottom,left:right])[0]
npoint = local_center_of_mass(mask[top:bottom,left:right])
except:
new_group.append(point)
continue
diff_x, diff_y = npoint[0]-(dim/2), npoint[1]-(dim/2)
claim_region(mask, top, bottom, left, right)
new_group.append([point[0]+diff_x, point[1]+diff_y])
else:
top, bottom, left, right = control_edge(point, dim, dim)
claim_region(mask, top, bottom, left, right)
new_group.append(match)
return new_group
def test_content(img, point, dim):
#print img.shape
|
if __name__=="__main__":
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option('-i', type="string", nargs=1, dest="input", help="input image")
parser.add_option('-o', type="string", nargs=1, dest="output", help="output file")
parser.add_option('-c', type="string", nargs=1, dest="centers", help="centers")
parser.add_option('-s', type="string", nargs=1, dest="signature", help="signature")
parser.add_option('-d', type="int", nargs=1, dest="dimension", help="dimension")
options, args = parser.parse_args()
if options.input==None:
raise "No input image given, please give input image: -i image"
else:
in_file = options.input
if options.output==None:
raise "No output name given, please give output name: -o filename"
else:
out_file = options.output
if options.centers==None:
raise "No centers given, unable to match center points: -c filename.log"
else:
centers_filename = options.centers
if options.signature==None:
raise "No signature given, unable to find correct center points: -s filename.sign"
else:
sign_filename = options.signature
if options.dimension==None:
print "Notice: No dimension given, using defualt size 400: -d int"
dim = 400
else:
dim = options.dimension
inimg = io.imread(in_file)
height, width, _ = inimg.shape
print height, width, dim
## Overall mask
labmask = LABMaskImage(inimg)
hsvmask = HSVMaskImage(inimg)
mask = combinemasks(labmask, hsvmask)
#centers = kmeans(blackpixels.T.astype(float), 10, iter=100, thresh=1e-08)[0].astype(int)
print "Loading Centres"
centres = loadcenters(centers_filename, mask)
print "Grouping centres"
groups = groupCentres(centres)
print "Loading signature"
signature, signature_value = readSignature(sign_filename)
#centres = orderGroup(centres)
print centres
print "Find closest group to signature"
#centres = closestGroup(groups, signature_value)
centres = closestGroupSignature(groups, signature, 250)
print(centres)
print "Overlay signature on the group"
signature = overlaySignature(signature, centres)
copy_centres = centres
print "Test and fix fit of signature centers to group"
centres = testRadius(signature, centres, 200, mask, 500)
centres = orderGroup(centres)
i = 10
c = 0
print "Cropping images"
for centre in centres:
count = test_content(inimg, centre, dim)
if count<1000:
i -= 1
continue
#crop_square(centre, dim, inimg, i, in_file, out_file)
try:
square(copy_centres[c], dim, inimg, 0, 3, color=(55, 255, 55))
square(copy_centres[c], 40, inimg, 0, 5, color=(55, 255, 55))
except:
pass
square(centre, 10, inimg, 0, 5, color=(55, 55, 255))
square(centre, dim, inimg, i, 3, color=(55, 55, 255))
square(signature[str(i)], 200, inimg, 0, 3, color=(255, 55, 55))
i -= 1
c += 1
#square(signature['0'], 50, inimg, 0, 3, color=(255, 55, 55))
io.imsave(out_file, inimg)
| top, bottom, left, right = control_edge(point, dim, dim)
img = img[top:bottom,left:right]
labmask = LABMaskImage(img)
hsvmask = HSVMaskImage(img)
mask = combinemasks(labmask, hsvmask)
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
return blackpixels.shape[1] | identifier_body |
FindPoints.py | from sys import argv
from skimage import io
from skimage.color import hsv2rgb, rgb2hsv, rgb2grey
from skimage.measure import label
from ImageFiltering import *
from Signature import *
from CropPoints import *
import numpy as np
from optparse import OptionParser
from scipy.ndimage.measurements import center_of_mass
from scipy.ndimage import label
def | (filename, mask, potsize=500):
f = open(filename)
centers = []
for line in f:
info = line.split(" ")[2:]
center = info[2]
center = center.split(",")
center[0] = int(round(float(center[0])))
center[1] = int(round(float(center[1])))
center = center[::-1]
if center[0]<150 or center[1]<150:
print "Centre on the edge, and will therefore be removed"
continue
if center[1]>(width-150) or center[0]>(height-150):
print "Centre on the edge, and will therefore be removed"
continue
size = [int(i) for i in info[1].split("+")[0].split("x")]
if size[0]>potsize:
#centers.append(center)
print "WARNING: Cluster size too big, potential merge of pots detected"
print "Employing K-means on the merged blob to find the pots"
c = (size[0]//potsize)+1
top, bottom, left, right = control_edge(center, potsize, size[0]+100)
npoint = local_Kmeans(mask[top:bottom,left:right], c)
temp_centers = []
for point in npoint:
diff_x, diff_y = point[0]-int(potsize/2), point[1]-int((size[0]+100)/2)
point = [center[0]+diff_x, center[1]+diff_y]
if point[0]<150:
continue
if point[1]<150:
continue
if point[0]>(height-150):
continue
if point[1]>(width-150):
continue
temp_centers.append(point)
min_distance = 300
cgroup = []
## Average centers that are to close together.
for tcenter in temp_centers:
for tcenter2 in temp_centers:
if tcenter==tcenter2: continue
dist = distance(tcenter, tcenter2)
if dist<min_distance:
cgroup = [tcenter, tcenter2]
min_distance = dist
if cgroup==[]:
for tcenter in temp_centers:
centers.append(tcenter)
else:
ncenter = findCentre(cgroup)
centers.append(ncenter)
for tcenter in temp_centers:
if tcenter not in cgroup:
centers.append(tcenter)
else:
centers.append(center)
return centers
def groupCentres(centres):
groups = []
for i in range(len(centres)):
temp_group = [centres[i]]
for j in range(i, len(centres)):
if i==j:
continue
temp_group.append(centres[j])
n = len(temp_group)
if n<3: continue
if n>10: continue
temp_group = orderGroup(temp_group)
gheight, gwidth = dimension_of_group(temp_group, "dimension")
if len(temp_group)!=n: continue
if gheight>1300: continue
if gwidth>2300: continue
groups.append(orderGroup(temp_group))
return groups
def closestGroupSignature(groups, signature, radius):
number_of_matches = 0
bestgroup = []
i = 1
j = 0
for group in groups:
psignature = overlaySignature(signature, group)
match = []
for point in psignature.values():
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
match.append(centre)
continue
matches = len(match)
if number_of_matches<matches:
bestgroup = group
j = i
number_of_matches = matches
i += 1
return bestgroup
def getPointdiff(point1, point2):
return point1[0]-point2[0], point1[1]-point2[1]
def dimension_of_group(group, return_value="centre"):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
if return_value=="centre" or return_value=="center":
return [(max_y-min_y)/2+min_y, (max_x-min_x)/2+min_x]
if return_value=="rectangle":
return [[min_y, min_x], [min_y, max_x], [max_y, min_x], [max_y, max_x]]
if return_value=="dimension":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return height, width
if return_value=="area":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return width*height
return max_y, min_y, max_x, min_x
def overlaySignature(signature, group):
centre = dimension_of_group(group, "centre")
sign_height, sign_width = dimension_of_group(signature.values(), "dimension")
sign_height = int(sign_height/2)
sign_width = int(sign_width/2)
if (centre[0]-sign_height)<0:
print "Above"
centre[0] += abs(centre[0]-sign_height)+dim/2
if (centre[0]+sign_height)>height:
print "Below"
centre[0] -= abs(height-(centre[0]+sign_height))+dim/2
if (centre[1]-sign_width)<0:
print "Left"
centre[1] += abs(centre[1]-sign_width)+dim/2
if (centre[1]+sign_width)>width:
print "Right"
centre[1] -= abs(width-(centre[1]+sign_width))+dim/2
diff_y, diff_x = getPointdiff(signature['0'], centre)
for point in signature:
if point=='0':
signature['0'] = centre
continue
signature[point] = [signature[point][0]-diff_y, signature[point][1]-diff_x]
return signature
def splitGroup(group):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
abovecentres = []
belowcentres = []
group = sorted(group, key=lambda x: x[1])
for centre in group:
if abs(centre[0]-max_y)<abs(centre[0]-min_y):
belowcentres.append(centre)
if abs(centre[0]-max_y)>abs(centre[0]-min_y):
abovecentres.append(centre)
b_median = belowcentres[len(belowcentres)/2][0]
a_median = abovecentres[len(abovecentres)/2][0]
belowcentres = filter(lambda x: x[0]<(b_median+400) and x[0]>(b_median-400), belowcentres)
abovecentres = filter(lambda x: x[0]<(a_median+400) and x[0]>(a_median-400), abovecentres)
return abovecentres, belowcentres
def orderGroup(group):
abovecentres, belowcentres = splitGroup(group)
return abovecentres+belowcentres
def control_edge(point, cheight, cwidth):
left = (point[1]-(cwidth/2))
right = (point[1]+(cwidth/2))
top = (point[0]-(cheight/2))
bottom = (point[0]+(cheight/2))
if left<0:
diff = 0-left
left += diff+1
right += diff+1
if right>width:
diff = width-right
left += diff-1
right += diff-1
if top<0:
diff = 0-top
top += diff+1
bottom += diff+1
if bottom>height:
diff = height-bottom
top += diff-1
bottom += diff-1
return top, bottom, left, right
def local_Kmeans(mask, k=1):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
if len(blackpixels)==0: return [200, 200]
centers = kmeans(blackpixels.T.astype(float), k, iter=20, thresh=1e-08)[0].astype(int)
return centers
def local_center_of_mass(mask):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
center = np.squeeze(np.asarray(blackpixels.mean(axis=1).astype(int).flatten('C')))
return center
def claim_region(mask, top, bottom, left, right):
mask[top:bottom, left:right] = 255
def testRadius(signature, group, radius, mask, dim):
new_group = []
del signature['0']
for point in signature.values():
match = []
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
if match==[]:
match = centre
if distance(match, point)>distance(centre, point):
match = centre
if match==[]:
#new_group.append(point)
top, bottom, left, right = control_edge(point, dim, dim)
try:
npoint = local_Kmeans(mask[top:bottom,left:right])[0]
npoint = local_center_of_mass(mask[top:bottom,left:right])
except:
new_group.append(point)
continue
diff_x, diff_y = npoint[0]-(dim/2), npoint[1]-(dim/2)
claim_region(mask, top, bottom, left, right)
new_group.append([point[0]+diff_x, point[1]+diff_y])
else:
top, bottom, left, right = control_edge(point, dim, dim)
claim_region(mask, top, bottom, left, right)
new_group.append(match)
return new_group
def test_content(img, point, dim):
#print img.shape
top, bottom, left, right = control_edge(point, dim, dim)
img = img[top:bottom,left:right]
labmask = LABMaskImage(img)
hsvmask = HSVMaskImage(img)
mask = combinemasks(labmask, hsvmask)
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
return blackpixels.shape[1]
if __name__=="__main__":
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option('-i', type="string", nargs=1, dest="input", help="input image")
parser.add_option('-o', type="string", nargs=1, dest="output", help="output file")
parser.add_option('-c', type="string", nargs=1, dest="centers", help="centers")
parser.add_option('-s', type="string", nargs=1, dest="signature", help="signature")
parser.add_option('-d', type="int", nargs=1, dest="dimension", help="dimension")
options, args = parser.parse_args()
if options.input==None:
raise "No input image given, please give input image: -i image"
else:
in_file = options.input
if options.output==None:
raise "No output name given, please give output name: -o filename"
else:
out_file = options.output
if options.centers==None:
raise "No centers given, unable to match center points: -c filename.log"
else:
centers_filename = options.centers
if options.signature==None:
raise "No signature given, unable to find correct center points: -s filename.sign"
else:
sign_filename = options.signature
if options.dimension==None:
print "Notice: No dimension given, using defualt size 400: -d int"
dim = 400
else:
dim = options.dimension
inimg = io.imread(in_file)
height, width, _ = inimg.shape
print height, width, dim
## Overall mask
labmask = LABMaskImage(inimg)
hsvmask = HSVMaskImage(inimg)
mask = combinemasks(labmask, hsvmask)
#centers = kmeans(blackpixels.T.astype(float), 10, iter=100, thresh=1e-08)[0].astype(int)
print "Loading Centres"
centres = loadcenters(centers_filename, mask)
print "Grouping centres"
groups = groupCentres(centres)
print "Loading signature"
signature, signature_value = readSignature(sign_filename)
#centres = orderGroup(centres)
print centres
print "Find closest group to signature"
#centres = closestGroup(groups, signature_value)
centres = closestGroupSignature(groups, signature, 250)
print(centres)
print "Overlay signature on the group"
signature = overlaySignature(signature, centres)
copy_centres = centres
print "Test and fix fit of signature centers to group"
centres = testRadius(signature, centres, 200, mask, 500)
centres = orderGroup(centres)
i = 10
c = 0
print "Cropping images"
for centre in centres:
count = test_content(inimg, centre, dim)
if count<1000:
i -= 1
continue
#crop_square(centre, dim, inimg, i, in_file, out_file)
try:
square(copy_centres[c], dim, inimg, 0, 3, color=(55, 255, 55))
square(copy_centres[c], 40, inimg, 0, 5, color=(55, 255, 55))
except:
pass
square(centre, 10, inimg, 0, 5, color=(55, 55, 255))
square(centre, dim, inimg, i, 3, color=(55, 55, 255))
square(signature[str(i)], 200, inimg, 0, 3, color=(255, 55, 55))
i -= 1
c += 1
#square(signature['0'], 50, inimg, 0, 3, color=(255, 55, 55))
io.imsave(out_file, inimg)
| loadcenters | identifier_name |
FindPoints.py | from sys import argv
from skimage import io
from skimage.color import hsv2rgb, rgb2hsv, rgb2grey
from skimage.measure import label
from ImageFiltering import *
from Signature import *
from CropPoints import *
import numpy as np
from optparse import OptionParser
from scipy.ndimage.measurements import center_of_mass
from scipy.ndimage import label
def loadcenters(filename, mask, potsize=500):
f = open(filename) | center = info[2]
center = center.split(",")
center[0] = int(round(float(center[0])))
center[1] = int(round(float(center[1])))
center = center[::-1]
if center[0]<150 or center[1]<150:
print "Centre on the edge, and will therefore be removed"
continue
if center[1]>(width-150) or center[0]>(height-150):
print "Centre on the edge, and will therefore be removed"
continue
size = [int(i) for i in info[1].split("+")[0].split("x")]
if size[0]>potsize:
#centers.append(center)
print "WARNING: Cluster size too big, potential merge of pots detected"
print "Employing K-means on the merged blob to find the pots"
c = (size[0]//potsize)+1
top, bottom, left, right = control_edge(center, potsize, size[0]+100)
npoint = local_Kmeans(mask[top:bottom,left:right], c)
temp_centers = []
for point in npoint:
diff_x, diff_y = point[0]-int(potsize/2), point[1]-int((size[0]+100)/2)
point = [center[0]+diff_x, center[1]+diff_y]
if point[0]<150:
continue
if point[1]<150:
continue
if point[0]>(height-150):
continue
if point[1]>(width-150):
continue
temp_centers.append(point)
min_distance = 300
cgroup = []
## Average centers that are to close together.
for tcenter in temp_centers:
for tcenter2 in temp_centers:
if tcenter==tcenter2: continue
dist = distance(tcenter, tcenter2)
if dist<min_distance:
cgroup = [tcenter, tcenter2]
min_distance = dist
if cgroup==[]:
for tcenter in temp_centers:
centers.append(tcenter)
else:
ncenter = findCentre(cgroup)
centers.append(ncenter)
for tcenter in temp_centers:
if tcenter not in cgroup:
centers.append(tcenter)
else:
centers.append(center)
return centers
def groupCentres(centres):
groups = []
for i in range(len(centres)):
temp_group = [centres[i]]
for j in range(i, len(centres)):
if i==j:
continue
temp_group.append(centres[j])
n = len(temp_group)
if n<3: continue
if n>10: continue
temp_group = orderGroup(temp_group)
gheight, gwidth = dimension_of_group(temp_group, "dimension")
if len(temp_group)!=n: continue
if gheight>1300: continue
if gwidth>2300: continue
groups.append(orderGroup(temp_group))
return groups
def closestGroupSignature(groups, signature, radius):
number_of_matches = 0
bestgroup = []
i = 1
j = 0
for group in groups:
psignature = overlaySignature(signature, group)
match = []
for point in psignature.values():
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
match.append(centre)
continue
matches = len(match)
if number_of_matches<matches:
bestgroup = group
j = i
number_of_matches = matches
i += 1
return bestgroup
def getPointdiff(point1, point2):
return point1[0]-point2[0], point1[1]-point2[1]
def dimension_of_group(group, return_value="centre"):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
if return_value=="centre" or return_value=="center":
return [(max_y-min_y)/2+min_y, (max_x-min_x)/2+min_x]
if return_value=="rectangle":
return [[min_y, min_x], [min_y, max_x], [max_y, min_x], [max_y, max_x]]
if return_value=="dimension":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return height, width
if return_value=="area":
width = distance([min_y, min_x], [min_y, max_x])
height = distance([min_y, min_x], [max_y, min_x])
return width*height
return max_y, min_y, max_x, min_x
def overlaySignature(signature, group):
centre = dimension_of_group(group, "centre")
sign_height, sign_width = dimension_of_group(signature.values(), "dimension")
sign_height = int(sign_height/2)
sign_width = int(sign_width/2)
if (centre[0]-sign_height)<0:
print "Above"
centre[0] += abs(centre[0]-sign_height)+dim/2
if (centre[0]+sign_height)>height:
print "Below"
centre[0] -= abs(height-(centre[0]+sign_height))+dim/2
if (centre[1]-sign_width)<0:
print "Left"
centre[1] += abs(centre[1]-sign_width)+dim/2
if (centre[1]+sign_width)>width:
print "Right"
centre[1] -= abs(width-(centre[1]+sign_width))+dim/2
diff_y, diff_x = getPointdiff(signature['0'], centre)
for point in signature:
if point=='0':
signature['0'] = centre
continue
signature[point] = [signature[point][0]-diff_y, signature[point][1]-diff_x]
return signature
def splitGroup(group):
max_y = max(group, key=lambda x: x[0])[0]
min_y = min(group, key=lambda x: x[0])[0]
max_x = max(group, key=lambda x: x[1])[1]
min_x = min(group, key=lambda x: x[1])[1]
abovecentres = []
belowcentres = []
group = sorted(group, key=lambda x: x[1])
for centre in group:
if abs(centre[0]-max_y)<abs(centre[0]-min_y):
belowcentres.append(centre)
if abs(centre[0]-max_y)>abs(centre[0]-min_y):
abovecentres.append(centre)
b_median = belowcentres[len(belowcentres)/2][0]
a_median = abovecentres[len(abovecentres)/2][0]
belowcentres = filter(lambda x: x[0]<(b_median+400) and x[0]>(b_median-400), belowcentres)
abovecentres = filter(lambda x: x[0]<(a_median+400) and x[0]>(a_median-400), abovecentres)
return abovecentres, belowcentres
def orderGroup(group):
abovecentres, belowcentres = splitGroup(group)
return abovecentres+belowcentres
def control_edge(point, cheight, cwidth):
left = (point[1]-(cwidth/2))
right = (point[1]+(cwidth/2))
top = (point[0]-(cheight/2))
bottom = (point[0]+(cheight/2))
if left<0:
diff = 0-left
left += diff+1
right += diff+1
if right>width:
diff = width-right
left += diff-1
right += diff-1
if top<0:
diff = 0-top
top += diff+1
bottom += diff+1
if bottom>height:
diff = height-bottom
top += diff-1
bottom += diff-1
return top, bottom, left, right
def local_Kmeans(mask, k=1):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
if len(blackpixels)==0: return [200, 200]
centers = kmeans(blackpixels.T.astype(float), k, iter=20, thresh=1e-08)[0].astype(int)
return centers
def local_center_of_mass(mask):
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
center = np.squeeze(np.asarray(blackpixels.mean(axis=1).astype(int).flatten('C')))
return center
def claim_region(mask, top, bottom, left, right):
mask[top:bottom, left:right] = 255
def testRadius(signature, group, radius, mask, dim):
new_group = []
del signature['0']
for point in signature.values():
match = []
for centre in group:
if centre[0]<(point[0]+radius) and centre[0]>(point[0]-radius) and centre[1]<(point[1]+radius) and centre[1]>(point[1]-radius):
if match==[]:
match = centre
if distance(match, point)>distance(centre, point):
match = centre
if match==[]:
#new_group.append(point)
top, bottom, left, right = control_edge(point, dim, dim)
try:
npoint = local_Kmeans(mask[top:bottom,left:right])[0]
npoint = local_center_of_mass(mask[top:bottom,left:right])
except:
new_group.append(point)
continue
diff_x, diff_y = npoint[0]-(dim/2), npoint[1]-(dim/2)
claim_region(mask, top, bottom, left, right)
new_group.append([point[0]+diff_x, point[1]+diff_y])
else:
top, bottom, left, right = control_edge(point, dim, dim)
claim_region(mask, top, bottom, left, right)
new_group.append(match)
return new_group
def test_content(img, point, dim):
#print img.shape
top, bottom, left, right = control_edge(point, dim, dim)
img = img[top:bottom,left:right]
labmask = LABMaskImage(img)
hsvmask = HSVMaskImage(img)
mask = combinemasks(labmask, hsvmask)
blackpixels = np.where(mask==0)
blackpixels = np.matrix(blackpixels)
return blackpixels.shape[1]
if __name__=="__main__":
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option('-i', type="string", nargs=1, dest="input", help="input image")
parser.add_option('-o', type="string", nargs=1, dest="output", help="output file")
parser.add_option('-c', type="string", nargs=1, dest="centers", help="centers")
parser.add_option('-s', type="string", nargs=1, dest="signature", help="signature")
parser.add_option('-d', type="int", nargs=1, dest="dimension", help="dimension")
options, args = parser.parse_args()
if options.input==None:
raise "No input image given, please give input image: -i image"
else:
in_file = options.input
if options.output==None:
raise "No output name given, please give output name: -o filename"
else:
out_file = options.output
if options.centers==None:
raise "No centers given, unable to match center points: -c filename.log"
else:
centers_filename = options.centers
if options.signature==None:
raise "No signature given, unable to find correct center points: -s filename.sign"
else:
sign_filename = options.signature
if options.dimension==None:
print "Notice: No dimension given, using defualt size 400: -d int"
dim = 400
else:
dim = options.dimension
inimg = io.imread(in_file)
height, width, _ = inimg.shape
print height, width, dim
## Overall mask
labmask = LABMaskImage(inimg)
hsvmask = HSVMaskImage(inimg)
mask = combinemasks(labmask, hsvmask)
#centers = kmeans(blackpixels.T.astype(float), 10, iter=100, thresh=1e-08)[0].astype(int)
print "Loading Centres"
centres = loadcenters(centers_filename, mask)
print "Grouping centres"
groups = groupCentres(centres)
print "Loading signature"
signature, signature_value = readSignature(sign_filename)
#centres = orderGroup(centres)
print centres
print "Find closest group to signature"
#centres = closestGroup(groups, signature_value)
centres = closestGroupSignature(groups, signature, 250)
print(centres)
print "Overlay signature on the group"
signature = overlaySignature(signature, centres)
copy_centres = centres
print "Test and fix fit of signature centers to group"
centres = testRadius(signature, centres, 200, mask, 500)
centres = orderGroup(centres)
i = 10
c = 0
print "Cropping images"
for centre in centres:
count = test_content(inimg, centre, dim)
if count<1000:
i -= 1
continue
#crop_square(centre, dim, inimg, i, in_file, out_file)
try:
square(copy_centres[c], dim, inimg, 0, 3, color=(55, 255, 55))
square(copy_centres[c], 40, inimg, 0, 5, color=(55, 255, 55))
except:
pass
square(centre, 10, inimg, 0, 5, color=(55, 55, 255))
square(centre, dim, inimg, i, 3, color=(55, 55, 255))
square(signature[str(i)], 200, inimg, 0, 3, color=(255, 55, 55))
i -= 1
c += 1
#square(signature['0'], 50, inimg, 0, 3, color=(255, 55, 55))
io.imsave(out_file, inimg) | centers = []
for line in f:
info = line.split(" ")[2:] | random_line_split |
provider_validation.go | package configs
import (
"fmt"
"sort"
"strings"
"github.com/camptocamp/terraboard/internal/terraform/addrs"
"github.com/hashicorp/hcl/v2"
)
// validateProviderConfigs walks the full configuration tree from the root
// module outward, static validation rules to the various combinations of
// provider configuration, required_providers values, and module call providers
// mappings.
//
// To retain compatibility with previous terraform versions, empty "proxy
// provider blocks" are still allowed within modules, though they will
// generate warnings when the configuration is loaded. The new validation
// however will generate an error if a suitable provider configuration is not
// passed in through the module call.
//
// The call argument is the ModuleCall for the provided Config cfg. The
// noProviderConfigRange argument is passed down the call stack, indicating
// that the module call, or a parent module call, has used a feature (at the
// specified source location) that precludes providers from being configured at
// all within the module.
func validateProviderConfigs(parentCall *ModuleCall, cfg *Config, noProviderConfigRange *hcl.Range) (diags hcl.Diagnostics) {
mod := cfg.Module
for name, child := range cfg.Children {
mc := mod.ModuleCalls[name]
childNoProviderConfigRange := noProviderConfigRange
// if the module call has any of count, for_each or depends_on,
// providers are prohibited from being configured in this module, or
// any module beneath this module.
switch {
case mc.Count != nil:
childNoProviderConfigRange = mc.Count.Range().Ptr()
case mc.ForEach != nil:
childNoProviderConfigRange = mc.ForEach.Range().Ptr()
case mc.DependsOn != nil:
if len(mc.DependsOn) > 0 {
childNoProviderConfigRange = mc.DependsOn[0].SourceRange().Ptr()
} else {
// Weird! We'll just use the call itself, then.
childNoProviderConfigRange = mc.DeclRange.Ptr()
}
}
diags = append(diags, validateProviderConfigs(mc, child, childNoProviderConfigRange)...)
}
// the set of provider configuration names passed into the module, with the
// source range of the provider assignment in the module call.
passedIn := map[string]PassedProviderConfig{}
// the set of empty configurations that could be proxy configurations, with
// the source range of the empty configuration block.
emptyConfigs := map[string]hcl.Range{}
// the set of provider with a defined configuration, with the source range
// of the configuration block declaration.
configured := map[string]hcl.Range{}
// the set of configuration_aliases defined in the required_providers
// block, with the fully qualified provider type.
configAliases := map[string]addrs.AbsProviderConfig{}
// the set of provider names defined in the required_providers block, and
// their provider types.
localNames := map[string]addrs.Provider{}
for _, pc := range mod.ProviderConfigs {
name := providerName(pc.Name, pc.Alias)
// Validate the config against an empty schema to see if it's empty.
_, pcConfigDiags := pc.Config.Content(&hcl.BodySchema{})
if pcConfigDiags.HasErrors() || pc.Version.Required != nil {
configured[name] = pc.DeclRange
} else {
emptyConfigs[name] = pc.DeclRange
}
}
if mod.ProviderRequirements != nil {
for _, req := range mod.ProviderRequirements.RequiredProviders {
localNames[req.Name] = req.Type
for _, alias := range req.Aliases {
addr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: req.Type,
Alias: alias.Alias,
}
configAliases[providerName(alias.LocalName, alias.Alias)] = addr
}
}
}
// collect providers passed from the parent
if parentCall != nil {
for _, passed := range parentCall.Providers {
name := providerName(passed.InChild.Name, passed.InChild.Alias)
passedIn[name] = passed
}
}
parentModuleText := "the root module"
moduleText := "the root module"
if !cfg.Path.IsRoot() {
moduleText = cfg.Path.String()
if parent := cfg.Path.Parent(); !parent.IsRoot() {
// module address are prefixed with `module.`
parentModuleText = parent.String()
}
}
// Verify that any module calls only refer to named providers, and that
// those providers will have a configuration at runtime. This way we can
// direct users where to add the missing configuration, because the runtime
// error is only "missing provider X".
for _, modCall := range mod.ModuleCalls {
for _, passed := range modCall.Providers {
// aliased providers are handled more strictly, and are never
// inherited, so they are validated within modules further down.
// Skip these checks to prevent redundant diagnostics.
if passed.InParent.Alias != "" {
continue
}
name := passed.InParent.String()
_, confOK := configured[name]
_, localOK := localNames[name]
_, passedOK := passedIn[name]
// This name was not declared somewhere within in the
// configuration. We ignore empty configs, because they will
// already produce a warning.
if !(confOK || localOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for provider %q.\n\nTo clarify your intent and silence this warning, add to %s a required_providers entry named %q with source = %q, or a different source address if appropriate.",
name, moduleText, defAddr.ForDisplay(),
parentModuleText, name, defAddr.ForDisplay(),
),
Subject: &passed.InParent.NameRange,
})
continue
}
// Now we may have named this provider within the module, but
// there won't be a configuration available at runtime if the
// parent module did not pass one in.
if !cfg.Path.IsRoot() && !(confOK || passedOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The configuration for %s expects to inherit a configuration for provider %s with local name %q, but %s doesn't pass a configuration under that name.\n\nTo satisfy this requirement, add an entry for %q to the \"providers\" argument in the module %q block.",
moduleText, defAddr.ForDisplay(), name, parentModuleText,
name, parentCall.Name,
),
Subject: parentCall.DeclRange.Ptr(),
})
}
}
}
if cfg.Path.IsRoot() {
// nothing else to do in the root module
return diags
}
// there cannot be any configurations if no provider config is allowed
if len(configured) > 0 && noProviderConfigRange != nil {
// We report this from the perspective of the use of count, for_each,
// or depends_on rather than from inside the module, because the
// recipient of this message is more likely to be the author of the
// calling module (trying to use an older module that hasn't been
// updated yet) than of the called module.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Module is incompatible with count, for_each, and depends_on",
Detail: fmt.Sprintf(
"The module at %s is a legacy module which contains its own local provider configurations, and so calls to it may not use the count, for_each, or depends_on arguments.\n\nIf you also control the module %q, consider updating this module to instead expect provider configurations to be passed by its caller.",
cfg.Path, cfg.SourceAddr,
),
Subject: noProviderConfigRange,
})
}
// now check that the user is not attempting to override a config
for name := range configured {
if passed, ok := passedIn[name]; ok {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot override provider configuration",
Detail: fmt.Sprintf(
"The configuration of %s has its own local configuration for %s, and so it cannot accept an overridden configuration provided by %s.",
moduleText, name, parentModuleText,
),
Subject: &passed.InChild.NameRange,
})
}
}
// A declared alias requires either a matching configuration within the
// module, or one must be passed in.
for name, providerAddr := range configAliases {
_, confOk := configured[name]
_, passedOk := passedIn[name]
if confOk || passedOk {
continue
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The child module requires an additional configuration for provider %s, with the local name %q.\n\nRefer to the module's documentation to understand the intended purpose of this additional provider configuration, and then add an entry for %s in the \"providers\" meta-argument in the module block to choose which provider configuration the module should use for that purpose.",
providerAddr.Provider.ForDisplay(), name,
name,
),
Subject: &parentCall.DeclRange,
})
}
// You cannot pass in a provider that cannot be used
for name, passed := range passedIn {
childTy := passed.InChild.providerType
// get a default type if there was none set
if childTy.IsZero() {
// This means the child module is only using an inferred
// provider type. We allow this but will generate a warning to
// declare provider_requirements below.
childTy = addrs.NewDefaultProvider(passed.InChild.Name)
}
providerAddr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: childTy,
Alias: passed.InChild.Alias,
}
localAddr, localName := localNames[name]
if localName {
providerAddr.Provider = localAddr
}
aliasAddr, configAlias := configAliases[name]
if configAlias {
providerAddr = aliasAddr
}
| if !(localName || configAlias || emptyConfig) {
// we still allow default configs, so switch to a warning if the incoming provider is a default
if providerAddr.Provider.IsDefault() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for %q.\n\nIf you also control the child module, add a required_providers entry named %q with the source address %q.",
name, moduleText, providerAddr.Provider.ForDisplay(),
name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"The child module does not declare any provider requirement with the local name %q.\n\nIf you also control the child module, you can add a required_providers entry named %q with the source address %q to accept this provider configuration.",
name, name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
}
}
// The provider being passed in must also be of the correct type.
pTy := passed.InParent.providerType
if pTy.IsZero() {
// While we would like to ensure required_providers exists here,
// implied default configuration is still allowed.
pTy = addrs.NewDefaultProvider(passed.InParent.Name)
}
// use the full address for a nice diagnostic output
parentAddr := addrs.AbsProviderConfig{
Module: cfg.Parent.Path,
Provider: pTy,
Alias: passed.InParent.Alias,
}
if cfg.Parent.Module.ProviderRequirements != nil {
req, defined := cfg.Parent.Module.ProviderRequirements.RequiredProviders[name]
if defined {
parentAddr.Provider = req.Type
}
}
if !providerAddr.Provider.Equals(parentAddr.Provider) {
// If this module declares the same source address for a different
// local name then we'll prefer to suggest changing to match
// the child module's chosen name, assuming that it was the local
// name that was wrong rather than the source address.
var otherLocalName string
for localName, sourceAddr := range localNames {
if sourceAddr.Equals(parentAddr.Provider) {
otherLocalName = localName
break
}
}
const errSummary = "Provider type mismatch"
if otherLocalName != "" {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The assigned configuration is for provider %q, but local name %q in %s represents %q.\n\nTo pass this configuration to the child module, use the local name %q instead.",
parentAddr.Provider.ForDisplay(), passed.InChild.Name,
parentModuleText, providerAddr.Provider.ForDisplay(),
otherLocalName,
),
Subject: &passed.InChild.NameRange,
})
} else {
// If there is no declared requirement for the provider the
// caller is trying to pass under any name then we'll instead
// report it as an unsuitable configuration to pass into the
// child module's provider configuration slot.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The local name %q in %s represents provider %q, but %q in %s represents %q.\n\nEach provider has its own distinct configuration schema and provider types, so this module's %q can be assigned only a configuration for %s, which is not required by %s.",
passed.InParent, parentModuleText, parentAddr.Provider.ForDisplay(),
passed.InChild, moduleText, providerAddr.Provider.ForDisplay(),
passed.InChild, providerAddr.Provider.ForDisplay(),
moduleText,
),
Subject: passed.InParent.NameRange.Ptr(),
})
}
}
}
// Empty configurations are no longer needed. Since the replacement for
// this calls for one entry per provider rather than one entry per
// provider _configuration_, we'll first gather them up by provider
// and then report a single warning for each, whereby we can show a direct
// example of what the replacement should look like.
type ProviderReqSuggestion struct {
SourceAddr addrs.Provider
SourceRanges []hcl.Range
RequiredConfigs []string
AliasCount int
}
providerReqSuggestions := make(map[string]*ProviderReqSuggestion)
for name, src := range emptyConfigs {
providerLocalName := name
if idx := strings.IndexByte(providerLocalName, '.'); idx >= 0 {
providerLocalName = providerLocalName[:idx]
}
sourceAddr, ok := localNames[name]
if !ok {
sourceAddr = addrs.NewDefaultProvider(providerLocalName)
}
suggestion := providerReqSuggestions[providerLocalName]
if suggestion == nil {
providerReqSuggestions[providerLocalName] = &ProviderReqSuggestion{
SourceAddr: sourceAddr,
}
suggestion = providerReqSuggestions[providerLocalName]
}
if providerLocalName != name {
// It's an aliased provider config, then.
suggestion.AliasCount++
}
suggestion.RequiredConfigs = append(suggestion.RequiredConfigs, name)
suggestion.SourceRanges = append(suggestion.SourceRanges, src)
}
for name, suggestion := range providerReqSuggestions {
var buf strings.Builder
fmt.Fprintf(
&buf,
"Earlier versions of Terraform used empty provider blocks (\"proxy provider configurations\") for child modules to declare their need to be passed a provider configuration by their callers. That approach was ambiguous and is now deprecated.\n\nIf you control this module, you can migrate to the new declaration syntax by removing all of the empty provider %q blocks and then adding or updating an entry like the following to the required_providers block of %s:\n",
name, moduleText,
)
fmt.Fprintf(&buf, " %s = {\n", name)
fmt.Fprintf(&buf, " source = %q\n", suggestion.SourceAddr.ForDisplay())
if suggestion.AliasCount > 0 {
// A lexical sort is fine because all of these strings are
// guaranteed to start with the same provider local name, and
// so we're only really sorting by the alias part.
sort.Strings(suggestion.RequiredConfigs)
fmt.Fprintln(&buf, " configuration_aliases = [")
for _, addrStr := range suggestion.RequiredConfigs {
fmt.Fprintf(&buf, " %s,\n", addrStr)
}
fmt.Fprintln(&buf, " ]")
}
fmt.Fprint(&buf, " }")
// We're arbitrarily going to just take the one source range that
// sorts earliest here. Multiple should be rare, so this is only to
// ensure that we produce a deterministic result in the edge case.
sort.Slice(suggestion.SourceRanges, func(i, j int) bool {
return suggestion.SourceRanges[i].String() < suggestion.SourceRanges[j].String()
})
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Redundant empty provider block",
Detail: buf.String(),
Subject: suggestion.SourceRanges[0].Ptr(),
})
}
return diags
}
func providerName(name, alias string) string {
if alias != "" {
name = name + "." + alias
}
return name
} | _, emptyConfig := emptyConfigs[name]
| random_line_split |
provider_validation.go | package configs
import (
"fmt"
"sort"
"strings"
"github.com/camptocamp/terraboard/internal/terraform/addrs"
"github.com/hashicorp/hcl/v2"
)
// validateProviderConfigs walks the full configuration tree from the root
// module outward, static validation rules to the various combinations of
// provider configuration, required_providers values, and module call providers
// mappings.
//
// To retain compatibility with previous terraform versions, empty "proxy
// provider blocks" are still allowed within modules, though they will
// generate warnings when the configuration is loaded. The new validation
// however will generate an error if a suitable provider configuration is not
// passed in through the module call.
//
// The call argument is the ModuleCall for the provided Config cfg. The
// noProviderConfigRange argument is passed down the call stack, indicating
// that the module call, or a parent module call, has used a feature (at the
// specified source location) that precludes providers from being configured at
// all within the module.
func validateProviderConfigs(parentCall *ModuleCall, cfg *Config, noProviderConfigRange *hcl.Range) (diags hcl.Diagnostics) {
mod := cfg.Module
for name, child := range cfg.Children {
mc := mod.ModuleCalls[name]
childNoProviderConfigRange := noProviderConfigRange
// if the module call has any of count, for_each or depends_on,
// providers are prohibited from being configured in this module, or
// any module beneath this module.
switch {
case mc.Count != nil:
childNoProviderConfigRange = mc.Count.Range().Ptr()
case mc.ForEach != nil:
childNoProviderConfigRange = mc.ForEach.Range().Ptr()
case mc.DependsOn != nil:
if len(mc.DependsOn) > 0 {
childNoProviderConfigRange = mc.DependsOn[0].SourceRange().Ptr()
} else {
// Weird! We'll just use the call itself, then.
childNoProviderConfigRange = mc.DeclRange.Ptr()
}
}
diags = append(diags, validateProviderConfigs(mc, child, childNoProviderConfigRange)...)
}
// the set of provider configuration names passed into the module, with the
// source range of the provider assignment in the module call.
passedIn := map[string]PassedProviderConfig{}
// the set of empty configurations that could be proxy configurations, with
// the source range of the empty configuration block.
emptyConfigs := map[string]hcl.Range{}
// the set of provider with a defined configuration, with the source range
// of the configuration block declaration.
configured := map[string]hcl.Range{}
// the set of configuration_aliases defined in the required_providers
// block, with the fully qualified provider type.
configAliases := map[string]addrs.AbsProviderConfig{}
// the set of provider names defined in the required_providers block, and
// their provider types.
localNames := map[string]addrs.Provider{}
for _, pc := range mod.ProviderConfigs {
name := providerName(pc.Name, pc.Alias)
// Validate the config against an empty schema to see if it's empty.
_, pcConfigDiags := pc.Config.Content(&hcl.BodySchema{})
if pcConfigDiags.HasErrors() || pc.Version.Required != nil {
configured[name] = pc.DeclRange
} else {
emptyConfigs[name] = pc.DeclRange
}
}
if mod.ProviderRequirements != nil {
for _, req := range mod.ProviderRequirements.RequiredProviders {
localNames[req.Name] = req.Type
for _, alias := range req.Aliases {
addr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: req.Type,
Alias: alias.Alias,
}
configAliases[providerName(alias.LocalName, alias.Alias)] = addr
}
}
}
// collect providers passed from the parent
if parentCall != nil {
for _, passed := range parentCall.Providers {
name := providerName(passed.InChild.Name, passed.InChild.Alias)
passedIn[name] = passed
}
}
parentModuleText := "the root module"
moduleText := "the root module"
if !cfg.Path.IsRoot() {
moduleText = cfg.Path.String()
if parent := cfg.Path.Parent(); !parent.IsRoot() {
// module address are prefixed with `module.`
parentModuleText = parent.String()
}
}
// Verify that any module calls only refer to named providers, and that
// those providers will have a configuration at runtime. This way we can
// direct users where to add the missing configuration, because the runtime
// error is only "missing provider X".
for _, modCall := range mod.ModuleCalls {
for _, passed := range modCall.Providers {
// aliased providers are handled more strictly, and are never
// inherited, so they are validated within modules further down.
// Skip these checks to prevent redundant diagnostics.
if passed.InParent.Alias != "" {
continue
}
name := passed.InParent.String()
_, confOK := configured[name]
_, localOK := localNames[name]
_, passedOK := passedIn[name]
// This name was not declared somewhere within in the
// configuration. We ignore empty configs, because they will
// already produce a warning.
if !(confOK || localOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for provider %q.\n\nTo clarify your intent and silence this warning, add to %s a required_providers entry named %q with source = %q, or a different source address if appropriate.",
name, moduleText, defAddr.ForDisplay(),
parentModuleText, name, defAddr.ForDisplay(),
),
Subject: &passed.InParent.NameRange,
})
continue
}
// Now we may have named this provider within the module, but
// there won't be a configuration available at runtime if the
// parent module did not pass one in.
if !cfg.Path.IsRoot() && !(confOK || passedOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The configuration for %s expects to inherit a configuration for provider %s with local name %q, but %s doesn't pass a configuration under that name.\n\nTo satisfy this requirement, add an entry for %q to the \"providers\" argument in the module %q block.",
moduleText, defAddr.ForDisplay(), name, parentModuleText,
name, parentCall.Name,
),
Subject: parentCall.DeclRange.Ptr(),
})
}
}
}
if cfg.Path.IsRoot() {
// nothing else to do in the root module
return diags
}
// there cannot be any configurations if no provider config is allowed
if len(configured) > 0 && noProviderConfigRange != nil {
// We report this from the perspective of the use of count, for_each,
// or depends_on rather than from inside the module, because the
// recipient of this message is more likely to be the author of the
// calling module (trying to use an older module that hasn't been
// updated yet) than of the called module.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Module is incompatible with count, for_each, and depends_on",
Detail: fmt.Sprintf(
"The module at %s is a legacy module which contains its own local provider configurations, and so calls to it may not use the count, for_each, or depends_on arguments.\n\nIf you also control the module %q, consider updating this module to instead expect provider configurations to be passed by its caller.",
cfg.Path, cfg.SourceAddr,
),
Subject: noProviderConfigRange,
})
}
// now check that the user is not attempting to override a config
for name := range configured {
if passed, ok := passedIn[name]; ok {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot override provider configuration",
Detail: fmt.Sprintf(
"The configuration of %s has its own local configuration for %s, and so it cannot accept an overridden configuration provided by %s.",
moduleText, name, parentModuleText,
),
Subject: &passed.InChild.NameRange,
})
}
}
// A declared alias requires either a matching configuration within the
// module, or one must be passed in.
for name, providerAddr := range configAliases {
_, confOk := configured[name]
_, passedOk := passedIn[name]
if confOk || passedOk {
continue
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The child module requires an additional configuration for provider %s, with the local name %q.\n\nRefer to the module's documentation to understand the intended purpose of this additional provider configuration, and then add an entry for %s in the \"providers\" meta-argument in the module block to choose which provider configuration the module should use for that purpose.",
providerAddr.Provider.ForDisplay(), name,
name,
),
Subject: &parentCall.DeclRange,
})
}
// You cannot pass in a provider that cannot be used
for name, passed := range passedIn {
childTy := passed.InChild.providerType
// get a default type if there was none set
if childTy.IsZero() {
// This means the child module is only using an inferred
// provider type. We allow this but will generate a warning to
// declare provider_requirements below.
childTy = addrs.NewDefaultProvider(passed.InChild.Name)
}
providerAddr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: childTy,
Alias: passed.InChild.Alias,
}
localAddr, localName := localNames[name]
if localName {
providerAddr.Provider = localAddr
}
aliasAddr, configAlias := configAliases[name]
if configAlias {
providerAddr = aliasAddr
}
_, emptyConfig := emptyConfigs[name]
if !(localName || configAlias || emptyConfig) {
// we still allow default configs, so switch to a warning if the incoming provider is a default
if providerAddr.Provider.IsDefault() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for %q.\n\nIf you also control the child module, add a required_providers entry named %q with the source address %q.",
name, moduleText, providerAddr.Provider.ForDisplay(),
name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"The child module does not declare any provider requirement with the local name %q.\n\nIf you also control the child module, you can add a required_providers entry named %q with the source address %q to accept this provider configuration.",
name, name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
}
}
// The provider being passed in must also be of the correct type.
pTy := passed.InParent.providerType
if pTy.IsZero() {
// While we would like to ensure required_providers exists here,
// implied default configuration is still allowed.
pTy = addrs.NewDefaultProvider(passed.InParent.Name)
}
// use the full address for a nice diagnostic output
parentAddr := addrs.AbsProviderConfig{
Module: cfg.Parent.Path,
Provider: pTy,
Alias: passed.InParent.Alias,
}
if cfg.Parent.Module.ProviderRequirements != nil {
req, defined := cfg.Parent.Module.ProviderRequirements.RequiredProviders[name]
if defined {
parentAddr.Provider = req.Type
}
}
if !providerAddr.Provider.Equals(parentAddr.Provider) {
// If this module declares the same source address for a different
// local name then we'll prefer to suggest changing to match
// the child module's chosen name, assuming that it was the local
// name that was wrong rather than the source address.
var otherLocalName string
for localName, sourceAddr := range localNames |
const errSummary = "Provider type mismatch"
if otherLocalName != "" {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The assigned configuration is for provider %q, but local name %q in %s represents %q.\n\nTo pass this configuration to the child module, use the local name %q instead.",
parentAddr.Provider.ForDisplay(), passed.InChild.Name,
parentModuleText, providerAddr.Provider.ForDisplay(),
otherLocalName,
),
Subject: &passed.InChild.NameRange,
})
} else {
// If there is no declared requirement for the provider the
// caller is trying to pass under any name then we'll instead
// report it as an unsuitable configuration to pass into the
// child module's provider configuration slot.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The local name %q in %s represents provider %q, but %q in %s represents %q.\n\nEach provider has its own distinct configuration schema and provider types, so this module's %q can be assigned only a configuration for %s, which is not required by %s.",
passed.InParent, parentModuleText, parentAddr.Provider.ForDisplay(),
passed.InChild, moduleText, providerAddr.Provider.ForDisplay(),
passed.InChild, providerAddr.Provider.ForDisplay(),
moduleText,
),
Subject: passed.InParent.NameRange.Ptr(),
})
}
}
}
// Empty configurations are no longer needed. Since the replacement for
// this calls for one entry per provider rather than one entry per
// provider _configuration_, we'll first gather them up by provider
// and then report a single warning for each, whereby we can show a direct
// example of what the replacement should look like.
type ProviderReqSuggestion struct {
SourceAddr addrs.Provider
SourceRanges []hcl.Range
RequiredConfigs []string
AliasCount int
}
providerReqSuggestions := make(map[string]*ProviderReqSuggestion)
for name, src := range emptyConfigs {
providerLocalName := name
if idx := strings.IndexByte(providerLocalName, '.'); idx >= 0 {
providerLocalName = providerLocalName[:idx]
}
sourceAddr, ok := localNames[name]
if !ok {
sourceAddr = addrs.NewDefaultProvider(providerLocalName)
}
suggestion := providerReqSuggestions[providerLocalName]
if suggestion == nil {
providerReqSuggestions[providerLocalName] = &ProviderReqSuggestion{
SourceAddr: sourceAddr,
}
suggestion = providerReqSuggestions[providerLocalName]
}
if providerLocalName != name {
// It's an aliased provider config, then.
suggestion.AliasCount++
}
suggestion.RequiredConfigs = append(suggestion.RequiredConfigs, name)
suggestion.SourceRanges = append(suggestion.SourceRanges, src)
}
for name, suggestion := range providerReqSuggestions {
var buf strings.Builder
fmt.Fprintf(
&buf,
"Earlier versions of Terraform used empty provider blocks (\"proxy provider configurations\") for child modules to declare their need to be passed a provider configuration by their callers. That approach was ambiguous and is now deprecated.\n\nIf you control this module, you can migrate to the new declaration syntax by removing all of the empty provider %q blocks and then adding or updating an entry like the following to the required_providers block of %s:\n",
name, moduleText,
)
fmt.Fprintf(&buf, " %s = {\n", name)
fmt.Fprintf(&buf, " source = %q\n", suggestion.SourceAddr.ForDisplay())
if suggestion.AliasCount > 0 {
// A lexical sort is fine because all of these strings are
// guaranteed to start with the same provider local name, and
// so we're only really sorting by the alias part.
sort.Strings(suggestion.RequiredConfigs)
fmt.Fprintln(&buf, " configuration_aliases = [")
for _, addrStr := range suggestion.RequiredConfigs {
fmt.Fprintf(&buf, " %s,\n", addrStr)
}
fmt.Fprintln(&buf, " ]")
}
fmt.Fprint(&buf, " }")
// We're arbitrarily going to just take the one source range that
// sorts earliest here. Multiple should be rare, so this is only to
// ensure that we produce a deterministic result in the edge case.
sort.Slice(suggestion.SourceRanges, func(i, j int) bool {
return suggestion.SourceRanges[i].String() < suggestion.SourceRanges[j].String()
})
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Redundant empty provider block",
Detail: buf.String(),
Subject: suggestion.SourceRanges[0].Ptr(),
})
}
return diags
}
func providerName(name, alias string) string {
if alias != "" {
name = name + "." + alias
}
return name
}
| {
if sourceAddr.Equals(parentAddr.Provider) {
otherLocalName = localName
break
}
} | conditional_block |
provider_validation.go | package configs
import (
"fmt"
"sort"
"strings"
"github.com/camptocamp/terraboard/internal/terraform/addrs"
"github.com/hashicorp/hcl/v2"
)
// validateProviderConfigs walks the full configuration tree from the root
// module outward, static validation rules to the various combinations of
// provider configuration, required_providers values, and module call providers
// mappings.
//
// To retain compatibility with previous terraform versions, empty "proxy
// provider blocks" are still allowed within modules, though they will
// generate warnings when the configuration is loaded. The new validation
// however will generate an error if a suitable provider configuration is not
// passed in through the module call.
//
// The call argument is the ModuleCall for the provided Config cfg. The
// noProviderConfigRange argument is passed down the call stack, indicating
// that the module call, or a parent module call, has used a feature (at the
// specified source location) that precludes providers from being configured at
// all within the module.
func validateProviderConfigs(parentCall *ModuleCall, cfg *Config, noProviderConfigRange *hcl.Range) (diags hcl.Diagnostics) {
mod := cfg.Module
for name, child := range cfg.Children {
mc := mod.ModuleCalls[name]
childNoProviderConfigRange := noProviderConfigRange
// if the module call has any of count, for_each or depends_on,
// providers are prohibited from being configured in this module, or
// any module beneath this module.
switch {
case mc.Count != nil:
childNoProviderConfigRange = mc.Count.Range().Ptr()
case mc.ForEach != nil:
childNoProviderConfigRange = mc.ForEach.Range().Ptr()
case mc.DependsOn != nil:
if len(mc.DependsOn) > 0 {
childNoProviderConfigRange = mc.DependsOn[0].SourceRange().Ptr()
} else {
// Weird! We'll just use the call itself, then.
childNoProviderConfigRange = mc.DeclRange.Ptr()
}
}
diags = append(diags, validateProviderConfigs(mc, child, childNoProviderConfigRange)...)
}
// the set of provider configuration names passed into the module, with the
// source range of the provider assignment in the module call.
passedIn := map[string]PassedProviderConfig{}
// the set of empty configurations that could be proxy configurations, with
// the source range of the empty configuration block.
emptyConfigs := map[string]hcl.Range{}
// the set of provider with a defined configuration, with the source range
// of the configuration block declaration.
configured := map[string]hcl.Range{}
// the set of configuration_aliases defined in the required_providers
// block, with the fully qualified provider type.
configAliases := map[string]addrs.AbsProviderConfig{}
// the set of provider names defined in the required_providers block, and
// their provider types.
localNames := map[string]addrs.Provider{}
for _, pc := range mod.ProviderConfigs {
name := providerName(pc.Name, pc.Alias)
// Validate the config against an empty schema to see if it's empty.
_, pcConfigDiags := pc.Config.Content(&hcl.BodySchema{})
if pcConfigDiags.HasErrors() || pc.Version.Required != nil {
configured[name] = pc.DeclRange
} else {
emptyConfigs[name] = pc.DeclRange
}
}
if mod.ProviderRequirements != nil {
for _, req := range mod.ProviderRequirements.RequiredProviders {
localNames[req.Name] = req.Type
for _, alias := range req.Aliases {
addr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: req.Type,
Alias: alias.Alias,
}
configAliases[providerName(alias.LocalName, alias.Alias)] = addr
}
}
}
// collect providers passed from the parent
if parentCall != nil {
for _, passed := range parentCall.Providers {
name := providerName(passed.InChild.Name, passed.InChild.Alias)
passedIn[name] = passed
}
}
parentModuleText := "the root module"
moduleText := "the root module"
if !cfg.Path.IsRoot() {
moduleText = cfg.Path.String()
if parent := cfg.Path.Parent(); !parent.IsRoot() {
// module address are prefixed with `module.`
parentModuleText = parent.String()
}
}
// Verify that any module calls only refer to named providers, and that
// those providers will have a configuration at runtime. This way we can
// direct users where to add the missing configuration, because the runtime
// error is only "missing provider X".
for _, modCall := range mod.ModuleCalls {
for _, passed := range modCall.Providers {
// aliased providers are handled more strictly, and are never
// inherited, so they are validated within modules further down.
// Skip these checks to prevent redundant diagnostics.
if passed.InParent.Alias != "" {
continue
}
name := passed.InParent.String()
_, confOK := configured[name]
_, localOK := localNames[name]
_, passedOK := passedIn[name]
// This name was not declared somewhere within in the
// configuration. We ignore empty configs, because they will
// already produce a warning.
if !(confOK || localOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for provider %q.\n\nTo clarify your intent and silence this warning, add to %s a required_providers entry named %q with source = %q, or a different source address if appropriate.",
name, moduleText, defAddr.ForDisplay(),
parentModuleText, name, defAddr.ForDisplay(),
),
Subject: &passed.InParent.NameRange,
})
continue
}
// Now we may have named this provider within the module, but
// there won't be a configuration available at runtime if the
// parent module did not pass one in.
if !cfg.Path.IsRoot() && !(confOK || passedOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The configuration for %s expects to inherit a configuration for provider %s with local name %q, but %s doesn't pass a configuration under that name.\n\nTo satisfy this requirement, add an entry for %q to the \"providers\" argument in the module %q block.",
moduleText, defAddr.ForDisplay(), name, parentModuleText,
name, parentCall.Name,
),
Subject: parentCall.DeclRange.Ptr(),
})
}
}
}
if cfg.Path.IsRoot() {
// nothing else to do in the root module
return diags
}
// there cannot be any configurations if no provider config is allowed
if len(configured) > 0 && noProviderConfigRange != nil {
// We report this from the perspective of the use of count, for_each,
// or depends_on rather than from inside the module, because the
// recipient of this message is more likely to be the author of the
// calling module (trying to use an older module that hasn't been
// updated yet) than of the called module.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Module is incompatible with count, for_each, and depends_on",
Detail: fmt.Sprintf(
"The module at %s is a legacy module which contains its own local provider configurations, and so calls to it may not use the count, for_each, or depends_on arguments.\n\nIf you also control the module %q, consider updating this module to instead expect provider configurations to be passed by its caller.",
cfg.Path, cfg.SourceAddr,
),
Subject: noProviderConfigRange,
})
}
// now check that the user is not attempting to override a config
for name := range configured {
if passed, ok := passedIn[name]; ok {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot override provider configuration",
Detail: fmt.Sprintf(
"The configuration of %s has its own local configuration for %s, and so it cannot accept an overridden configuration provided by %s.",
moduleText, name, parentModuleText,
),
Subject: &passed.InChild.NameRange,
})
}
}
// A declared alias requires either a matching configuration within the
// module, or one must be passed in.
for name, providerAddr := range configAliases {
_, confOk := configured[name]
_, passedOk := passedIn[name]
if confOk || passedOk {
continue
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The child module requires an additional configuration for provider %s, with the local name %q.\n\nRefer to the module's documentation to understand the intended purpose of this additional provider configuration, and then add an entry for %s in the \"providers\" meta-argument in the module block to choose which provider configuration the module should use for that purpose.",
providerAddr.Provider.ForDisplay(), name,
name,
),
Subject: &parentCall.DeclRange,
})
}
// You cannot pass in a provider that cannot be used
for name, passed := range passedIn {
childTy := passed.InChild.providerType
// get a default type if there was none set
if childTy.IsZero() {
// This means the child module is only using an inferred
// provider type. We allow this but will generate a warning to
// declare provider_requirements below.
childTy = addrs.NewDefaultProvider(passed.InChild.Name)
}
providerAddr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: childTy,
Alias: passed.InChild.Alias,
}
localAddr, localName := localNames[name]
if localName {
providerAddr.Provider = localAddr
}
aliasAddr, configAlias := configAliases[name]
if configAlias {
providerAddr = aliasAddr
}
_, emptyConfig := emptyConfigs[name]
if !(localName || configAlias || emptyConfig) {
// we still allow default configs, so switch to a warning if the incoming provider is a default
if providerAddr.Provider.IsDefault() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for %q.\n\nIf you also control the child module, add a required_providers entry named %q with the source address %q.",
name, moduleText, providerAddr.Provider.ForDisplay(),
name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"The child module does not declare any provider requirement with the local name %q.\n\nIf you also control the child module, you can add a required_providers entry named %q with the source address %q to accept this provider configuration.",
name, name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
}
}
// The provider being passed in must also be of the correct type.
pTy := passed.InParent.providerType
if pTy.IsZero() {
// While we would like to ensure required_providers exists here,
// implied default configuration is still allowed.
pTy = addrs.NewDefaultProvider(passed.InParent.Name)
}
// use the full address for a nice diagnostic output
parentAddr := addrs.AbsProviderConfig{
Module: cfg.Parent.Path,
Provider: pTy,
Alias: passed.InParent.Alias,
}
if cfg.Parent.Module.ProviderRequirements != nil {
req, defined := cfg.Parent.Module.ProviderRequirements.RequiredProviders[name]
if defined {
parentAddr.Provider = req.Type
}
}
if !providerAddr.Provider.Equals(parentAddr.Provider) {
// If this module declares the same source address for a different
// local name then we'll prefer to suggest changing to match
// the child module's chosen name, assuming that it was the local
// name that was wrong rather than the source address.
var otherLocalName string
for localName, sourceAddr := range localNames {
if sourceAddr.Equals(parentAddr.Provider) {
otherLocalName = localName
break
}
}
const errSummary = "Provider type mismatch"
if otherLocalName != "" {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The assigned configuration is for provider %q, but local name %q in %s represents %q.\n\nTo pass this configuration to the child module, use the local name %q instead.",
parentAddr.Provider.ForDisplay(), passed.InChild.Name,
parentModuleText, providerAddr.Provider.ForDisplay(),
otherLocalName,
),
Subject: &passed.InChild.NameRange,
})
} else {
// If there is no declared requirement for the provider the
// caller is trying to pass under any name then we'll instead
// report it as an unsuitable configuration to pass into the
// child module's provider configuration slot.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The local name %q in %s represents provider %q, but %q in %s represents %q.\n\nEach provider has its own distinct configuration schema and provider types, so this module's %q can be assigned only a configuration for %s, which is not required by %s.",
passed.InParent, parentModuleText, parentAddr.Provider.ForDisplay(),
passed.InChild, moduleText, providerAddr.Provider.ForDisplay(),
passed.InChild, providerAddr.Provider.ForDisplay(),
moduleText,
),
Subject: passed.InParent.NameRange.Ptr(),
})
}
}
}
// Empty configurations are no longer needed. Since the replacement for
// this calls for one entry per provider rather than one entry per
// provider _configuration_, we'll first gather them up by provider
// and then report a single warning for each, whereby we can show a direct
// example of what the replacement should look like.
type ProviderReqSuggestion struct {
SourceAddr addrs.Provider
SourceRanges []hcl.Range
RequiredConfigs []string
AliasCount int
}
providerReqSuggestions := make(map[string]*ProviderReqSuggestion)
for name, src := range emptyConfigs {
providerLocalName := name
if idx := strings.IndexByte(providerLocalName, '.'); idx >= 0 {
providerLocalName = providerLocalName[:idx]
}
sourceAddr, ok := localNames[name]
if !ok {
sourceAddr = addrs.NewDefaultProvider(providerLocalName)
}
suggestion := providerReqSuggestions[providerLocalName]
if suggestion == nil {
providerReqSuggestions[providerLocalName] = &ProviderReqSuggestion{
SourceAddr: sourceAddr,
}
suggestion = providerReqSuggestions[providerLocalName]
}
if providerLocalName != name {
// It's an aliased provider config, then.
suggestion.AliasCount++
}
suggestion.RequiredConfigs = append(suggestion.RequiredConfigs, name)
suggestion.SourceRanges = append(suggestion.SourceRanges, src)
}
for name, suggestion := range providerReqSuggestions {
var buf strings.Builder
fmt.Fprintf(
&buf,
"Earlier versions of Terraform used empty provider blocks (\"proxy provider configurations\") for child modules to declare their need to be passed a provider configuration by their callers. That approach was ambiguous and is now deprecated.\n\nIf you control this module, you can migrate to the new declaration syntax by removing all of the empty provider %q blocks and then adding or updating an entry like the following to the required_providers block of %s:\n",
name, moduleText,
)
fmt.Fprintf(&buf, " %s = {\n", name)
fmt.Fprintf(&buf, " source = %q\n", suggestion.SourceAddr.ForDisplay())
if suggestion.AliasCount > 0 {
// A lexical sort is fine because all of these strings are
// guaranteed to start with the same provider local name, and
// so we're only really sorting by the alias part.
sort.Strings(suggestion.RequiredConfigs)
fmt.Fprintln(&buf, " configuration_aliases = [")
for _, addrStr := range suggestion.RequiredConfigs {
fmt.Fprintf(&buf, " %s,\n", addrStr)
}
fmt.Fprintln(&buf, " ]")
}
fmt.Fprint(&buf, " }")
// We're arbitrarily going to just take the one source range that
// sorts earliest here. Multiple should be rare, so this is only to
// ensure that we produce a deterministic result in the edge case.
sort.Slice(suggestion.SourceRanges, func(i, j int) bool {
return suggestion.SourceRanges[i].String() < suggestion.SourceRanges[j].String()
})
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Redundant empty provider block",
Detail: buf.String(),
Subject: suggestion.SourceRanges[0].Ptr(),
})
}
return diags
}
func providerName(name, alias string) string | {
if alias != "" {
name = name + "." + alias
}
return name
} | identifier_body | |
provider_validation.go | package configs
import (
"fmt"
"sort"
"strings"
"github.com/camptocamp/terraboard/internal/terraform/addrs"
"github.com/hashicorp/hcl/v2"
)
// validateProviderConfigs walks the full configuration tree from the root
// module outward, static validation rules to the various combinations of
// provider configuration, required_providers values, and module call providers
// mappings.
//
// To retain compatibility with previous terraform versions, empty "proxy
// provider blocks" are still allowed within modules, though they will
// generate warnings when the configuration is loaded. The new validation
// however will generate an error if a suitable provider configuration is not
// passed in through the module call.
//
// The call argument is the ModuleCall for the provided Config cfg. The
// noProviderConfigRange argument is passed down the call stack, indicating
// that the module call, or a parent module call, has used a feature (at the
// specified source location) that precludes providers from being configured at
// all within the module.
func | (parentCall *ModuleCall, cfg *Config, noProviderConfigRange *hcl.Range) (diags hcl.Diagnostics) {
mod := cfg.Module
for name, child := range cfg.Children {
mc := mod.ModuleCalls[name]
childNoProviderConfigRange := noProviderConfigRange
// if the module call has any of count, for_each or depends_on,
// providers are prohibited from being configured in this module, or
// any module beneath this module.
switch {
case mc.Count != nil:
childNoProviderConfigRange = mc.Count.Range().Ptr()
case mc.ForEach != nil:
childNoProviderConfigRange = mc.ForEach.Range().Ptr()
case mc.DependsOn != nil:
if len(mc.DependsOn) > 0 {
childNoProviderConfigRange = mc.DependsOn[0].SourceRange().Ptr()
} else {
// Weird! We'll just use the call itself, then.
childNoProviderConfigRange = mc.DeclRange.Ptr()
}
}
diags = append(diags, validateProviderConfigs(mc, child, childNoProviderConfigRange)...)
}
// the set of provider configuration names passed into the module, with the
// source range of the provider assignment in the module call.
passedIn := map[string]PassedProviderConfig{}
// the set of empty configurations that could be proxy configurations, with
// the source range of the empty configuration block.
emptyConfigs := map[string]hcl.Range{}
// the set of provider with a defined configuration, with the source range
// of the configuration block declaration.
configured := map[string]hcl.Range{}
// the set of configuration_aliases defined in the required_providers
// block, with the fully qualified provider type.
configAliases := map[string]addrs.AbsProviderConfig{}
// the set of provider names defined in the required_providers block, and
// their provider types.
localNames := map[string]addrs.Provider{}
for _, pc := range mod.ProviderConfigs {
name := providerName(pc.Name, pc.Alias)
// Validate the config against an empty schema to see if it's empty.
_, pcConfigDiags := pc.Config.Content(&hcl.BodySchema{})
if pcConfigDiags.HasErrors() || pc.Version.Required != nil {
configured[name] = pc.DeclRange
} else {
emptyConfigs[name] = pc.DeclRange
}
}
if mod.ProviderRequirements != nil {
for _, req := range mod.ProviderRequirements.RequiredProviders {
localNames[req.Name] = req.Type
for _, alias := range req.Aliases {
addr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: req.Type,
Alias: alias.Alias,
}
configAliases[providerName(alias.LocalName, alias.Alias)] = addr
}
}
}
// collect providers passed from the parent
if parentCall != nil {
for _, passed := range parentCall.Providers {
name := providerName(passed.InChild.Name, passed.InChild.Alias)
passedIn[name] = passed
}
}
parentModuleText := "the root module"
moduleText := "the root module"
if !cfg.Path.IsRoot() {
moduleText = cfg.Path.String()
if parent := cfg.Path.Parent(); !parent.IsRoot() {
// module address are prefixed with `module.`
parentModuleText = parent.String()
}
}
// Verify that any module calls only refer to named providers, and that
// those providers will have a configuration at runtime. This way we can
// direct users where to add the missing configuration, because the runtime
// error is only "missing provider X".
for _, modCall := range mod.ModuleCalls {
for _, passed := range modCall.Providers {
// aliased providers are handled more strictly, and are never
// inherited, so they are validated within modules further down.
// Skip these checks to prevent redundant diagnostics.
if passed.InParent.Alias != "" {
continue
}
name := passed.InParent.String()
_, confOK := configured[name]
_, localOK := localNames[name]
_, passedOK := passedIn[name]
// This name was not declared somewhere within in the
// configuration. We ignore empty configs, because they will
// already produce a warning.
if !(confOK || localOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for provider %q.\n\nTo clarify your intent and silence this warning, add to %s a required_providers entry named %q with source = %q, or a different source address if appropriate.",
name, moduleText, defAddr.ForDisplay(),
parentModuleText, name, defAddr.ForDisplay(),
),
Subject: &passed.InParent.NameRange,
})
continue
}
// Now we may have named this provider within the module, but
// there won't be a configuration available at runtime if the
// parent module did not pass one in.
if !cfg.Path.IsRoot() && !(confOK || passedOK) {
defAddr := addrs.NewDefaultProvider(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The configuration for %s expects to inherit a configuration for provider %s with local name %q, but %s doesn't pass a configuration under that name.\n\nTo satisfy this requirement, add an entry for %q to the \"providers\" argument in the module %q block.",
moduleText, defAddr.ForDisplay(), name, parentModuleText,
name, parentCall.Name,
),
Subject: parentCall.DeclRange.Ptr(),
})
}
}
}
if cfg.Path.IsRoot() {
// nothing else to do in the root module
return diags
}
// there cannot be any configurations if no provider config is allowed
if len(configured) > 0 && noProviderConfigRange != nil {
// We report this from the perspective of the use of count, for_each,
// or depends_on rather than from inside the module, because the
// recipient of this message is more likely to be the author of the
// calling module (trying to use an older module that hasn't been
// updated yet) than of the called module.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Module is incompatible with count, for_each, and depends_on",
Detail: fmt.Sprintf(
"The module at %s is a legacy module which contains its own local provider configurations, and so calls to it may not use the count, for_each, or depends_on arguments.\n\nIf you also control the module %q, consider updating this module to instead expect provider configurations to be passed by its caller.",
cfg.Path, cfg.SourceAddr,
),
Subject: noProviderConfigRange,
})
}
// now check that the user is not attempting to override a config
for name := range configured {
if passed, ok := passedIn[name]; ok {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Cannot override provider configuration",
Detail: fmt.Sprintf(
"The configuration of %s has its own local configuration for %s, and so it cannot accept an overridden configuration provided by %s.",
moduleText, name, parentModuleText,
),
Subject: &passed.InChild.NameRange,
})
}
}
// A declared alias requires either a matching configuration within the
// module, or one must be passed in.
for name, providerAddr := range configAliases {
_, confOk := configured[name]
_, passedOk := passedIn[name]
if confOk || passedOk {
continue
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required provider configuration",
Detail: fmt.Sprintf(
"The child module requires an additional configuration for provider %s, with the local name %q.\n\nRefer to the module's documentation to understand the intended purpose of this additional provider configuration, and then add an entry for %s in the \"providers\" meta-argument in the module block to choose which provider configuration the module should use for that purpose.",
providerAddr.Provider.ForDisplay(), name,
name,
),
Subject: &parentCall.DeclRange,
})
}
// You cannot pass in a provider that cannot be used
for name, passed := range passedIn {
childTy := passed.InChild.providerType
// get a default type if there was none set
if childTy.IsZero() {
// This means the child module is only using an inferred
// provider type. We allow this but will generate a warning to
// declare provider_requirements below.
childTy = addrs.NewDefaultProvider(passed.InChild.Name)
}
providerAddr := addrs.AbsProviderConfig{
Module: cfg.Path,
Provider: childTy,
Alias: passed.InChild.Alias,
}
localAddr, localName := localNames[name]
if localName {
providerAddr.Provider = localAddr
}
aliasAddr, configAlias := configAliases[name]
if configAlias {
providerAddr = aliasAddr
}
_, emptyConfig := emptyConfigs[name]
if !(localName || configAlias || emptyConfig) {
// we still allow default configs, so switch to a warning if the incoming provider is a default
if providerAddr.Provider.IsDefault() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"There is no explicit declaration for local provider name %q in %s, so Terraform is assuming you mean to pass a configuration for %q.\n\nIf you also control the child module, add a required_providers entry named %q with the source address %q.",
name, moduleText, providerAddr.Provider.ForDisplay(),
name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to undefined provider",
Detail: fmt.Sprintf(
"The child module does not declare any provider requirement with the local name %q.\n\nIf you also control the child module, you can add a required_providers entry named %q with the source address %q to accept this provider configuration.",
name, name, providerAddr.Provider.ForDisplay(),
),
Subject: &passed.InChild.NameRange,
})
}
}
// The provider being passed in must also be of the correct type.
pTy := passed.InParent.providerType
if pTy.IsZero() {
// While we would like to ensure required_providers exists here,
// implied default configuration is still allowed.
pTy = addrs.NewDefaultProvider(passed.InParent.Name)
}
// use the full address for a nice diagnostic output
parentAddr := addrs.AbsProviderConfig{
Module: cfg.Parent.Path,
Provider: pTy,
Alias: passed.InParent.Alias,
}
if cfg.Parent.Module.ProviderRequirements != nil {
req, defined := cfg.Parent.Module.ProviderRequirements.RequiredProviders[name]
if defined {
parentAddr.Provider = req.Type
}
}
if !providerAddr.Provider.Equals(parentAddr.Provider) {
// If this module declares the same source address for a different
// local name then we'll prefer to suggest changing to match
// the child module's chosen name, assuming that it was the local
// name that was wrong rather than the source address.
var otherLocalName string
for localName, sourceAddr := range localNames {
if sourceAddr.Equals(parentAddr.Provider) {
otherLocalName = localName
break
}
}
const errSummary = "Provider type mismatch"
if otherLocalName != "" {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The assigned configuration is for provider %q, but local name %q in %s represents %q.\n\nTo pass this configuration to the child module, use the local name %q instead.",
parentAddr.Provider.ForDisplay(), passed.InChild.Name,
parentModuleText, providerAddr.Provider.ForDisplay(),
otherLocalName,
),
Subject: &passed.InChild.NameRange,
})
} else {
// If there is no declared requirement for the provider the
// caller is trying to pass under any name then we'll instead
// report it as an unsuitable configuration to pass into the
// child module's provider configuration slot.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: errSummary,
Detail: fmt.Sprintf(
"The local name %q in %s represents provider %q, but %q in %s represents %q.\n\nEach provider has its own distinct configuration schema and provider types, so this module's %q can be assigned only a configuration for %s, which is not required by %s.",
passed.InParent, parentModuleText, parentAddr.Provider.ForDisplay(),
passed.InChild, moduleText, providerAddr.Provider.ForDisplay(),
passed.InChild, providerAddr.Provider.ForDisplay(),
moduleText,
),
Subject: passed.InParent.NameRange.Ptr(),
})
}
}
}
// Empty configurations are no longer needed. Since the replacement for
// this calls for one entry per provider rather than one entry per
// provider _configuration_, we'll first gather them up by provider
// and then report a single warning for each, whereby we can show a direct
// example of what the replacement should look like.
type ProviderReqSuggestion struct {
SourceAddr addrs.Provider
SourceRanges []hcl.Range
RequiredConfigs []string
AliasCount int
}
providerReqSuggestions := make(map[string]*ProviderReqSuggestion)
for name, src := range emptyConfigs {
providerLocalName := name
if idx := strings.IndexByte(providerLocalName, '.'); idx >= 0 {
providerLocalName = providerLocalName[:idx]
}
sourceAddr, ok := localNames[name]
if !ok {
sourceAddr = addrs.NewDefaultProvider(providerLocalName)
}
suggestion := providerReqSuggestions[providerLocalName]
if suggestion == nil {
providerReqSuggestions[providerLocalName] = &ProviderReqSuggestion{
SourceAddr: sourceAddr,
}
suggestion = providerReqSuggestions[providerLocalName]
}
if providerLocalName != name {
// It's an aliased provider config, then.
suggestion.AliasCount++
}
suggestion.RequiredConfigs = append(suggestion.RequiredConfigs, name)
suggestion.SourceRanges = append(suggestion.SourceRanges, src)
}
for name, suggestion := range providerReqSuggestions {
var buf strings.Builder
fmt.Fprintf(
&buf,
"Earlier versions of Terraform used empty provider blocks (\"proxy provider configurations\") for child modules to declare their need to be passed a provider configuration by their callers. That approach was ambiguous and is now deprecated.\n\nIf you control this module, you can migrate to the new declaration syntax by removing all of the empty provider %q blocks and then adding or updating an entry like the following to the required_providers block of %s:\n",
name, moduleText,
)
fmt.Fprintf(&buf, " %s = {\n", name)
fmt.Fprintf(&buf, " source = %q\n", suggestion.SourceAddr.ForDisplay())
if suggestion.AliasCount > 0 {
// A lexical sort is fine because all of these strings are
// guaranteed to start with the same provider local name, and
// so we're only really sorting by the alias part.
sort.Strings(suggestion.RequiredConfigs)
fmt.Fprintln(&buf, " configuration_aliases = [")
for _, addrStr := range suggestion.RequiredConfigs {
fmt.Fprintf(&buf, " %s,\n", addrStr)
}
fmt.Fprintln(&buf, " ]")
}
fmt.Fprint(&buf, " }")
// We're arbitrarily going to just take the one source range that
// sorts earliest here. Multiple should be rare, so this is only to
// ensure that we produce a deterministic result in the edge case.
sort.Slice(suggestion.SourceRanges, func(i, j int) bool {
return suggestion.SourceRanges[i].String() < suggestion.SourceRanges[j].String()
})
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Redundant empty provider block",
Detail: buf.String(),
Subject: suggestion.SourceRanges[0].Ptr(),
})
}
return diags
}
func providerName(name, alias string) string {
if alias != "" {
name = name + "." + alias
}
return name
}
| validateProviderConfigs | identifier_name |
sup.rs | use super::util::{CacheKeyPath,
RemoteSup};
use crate::VERSION;
use configopt::{ConfigOptDefaults,
ConfigOptToString,
Partial};
use habitat_common::{cli::{RING_ENVVAR,
RING_KEY_ENVVAR},
types::{AutomateAuthToken,
EventStreamConnectMethod,
EventStreamMetadata,
EventStreamServerCertificate,
GossipListenAddr,
HttpListenAddr,
ListenCtlAddr}};
use habitat_core::{env::Config,
os::process::ShutdownTimeout,
package::PackageIdent,
service::HealthCheckInterval,
util::serde_string};
use rants::{error::Error as RantsError,
Address as NatsAddress};
use std::{fmt,
net::{Ipv4Addr,
SocketAddr},
path::PathBuf,
str::FromStr};
use structopt::{clap::AppSettings,
StructOpt};
use url::Url;
#[derive(StructOpt)]
#[structopt(name = "hab",
version = VERSION,
about = "The Habitat Supervisor",
author = "\nThe Habitat Maintainers <humans@habitat.sh>\n",
usage = "hab sup <SUBCOMMAND>",
global_settings = &[AppSettings::VersionlessSubcommands],
)]
#[allow(clippy::large_enum_variant)]
pub enum Sup {
/// Start an interactive Bash-like shell
#[structopt(usage = "hab sup bash", no_version)]
Bash,
/// Depart a Supervisor from the gossip ring; kicking and banning the target from joining again
/// with the same member-id
#[structopt(no_version)]
Depart {
/// The member-id of the Supervisor to depart
#[structopt(name = "MEMBER_ID")]
member_id: String,
#[structopt(flatten)]
remote_sup: RemoteSup,
},
/// Run the Habitat Supervisor
#[structopt(no_version)]
Run(SupRun),
#[structopt(no_version)]
Secret(Secret),
/// Start an interactive Bourne-like shell
#[structopt(usage = "hab sup sh", no_version)]
Sh,
/// Query the status of Habitat services
#[structopt(no_version)]
Status {
/// A package identifier (ex: core/redis, core/busybox-static/1.42.2)
#[structopt(name = "PKG_IDENT")]
pkg_ident: Option<PackageIdent>,
#[structopt(flatten)]
remote_sup: RemoteSup,
},
/// Gracefully terminate the Habitat Supervisor and all of its running services
#[structopt(usage = "hab sup term [OPTIONS]", no_version)]
Term,
}
// TODO (DM): This is unnecessarily difficult due to the orphan rule and the lack of specialization.
// The `configopt` library could be improved to make this easier.
#[derive(Deserialize, Serialize, Debug)]
struct | (#[serde(with = "serde_string")] NatsAddress);
impl fmt::Display for EventStreamAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) }
}
impl FromStr for EventStreamAddress {
type Err = RantsError;
fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(EventStreamAddress(s.parse()?)) }
}
impl ConfigOptToString for EventStreamAddress {}
#[derive(ConfigOptDefaults, Partial, StructOpt, Deserialize)]
#[configopt_defaults(type = "PartialSupRun")]
#[partial(derive(Debug, Default, Deserialize), attrs(serde))]
#[serde(deny_unknown_fields)]
#[structopt(name = "run",
no_version,
about = "Run the Habitat Supervisor",
// set custom usage string, otherwise the binary
// is displayed confusingly as `hab-sup`
// see: https://github.com/kbknapp/clap-rs/blob/2724ec5399c500b12a1a24d356f4090f4816f5e2/src/app/mod.rs#L373-L394
usage = "hab sup run [FLAGS] [OPTIONS] [--] [PKG_IDENT_OR_ARTIFACT]"
)]
#[allow(dead_code)]
pub struct SupRun {
/// The listen address for the Gossip System Gateway
#[structopt(name = "LISTEN_GOSSIP",
long = "listen-gossip",
env = GossipListenAddr::ENVVAR,
default_value = GossipListenAddr::default_as_str())]
listen_gossip: SocketAddr,
/// Start the supervisor in local mode
#[structopt(name = "LOCAL_GOSSIP_MODE",
long = "local-gossip-mode",
conflicts_with_all = &["LISTEN_GOSSIP", "PEER", "PEER_WATCH_FILE"])]
local_gossip_mode: bool,
/// The listen address for the HTTP Gateway
#[structopt(name = "LISTEN_HTTP",
long = "listen-http",
env = HttpListenAddr::ENVVAR,
default_value = HttpListenAddr::default_as_str())]
listen_http: SocketAddr,
/// Disable the HTTP Gateway completely
#[structopt(name = "HTTP_DISABLE", long = "http-disable", short = "D")]
http_disable: bool,
/// The listen address for the Control Gateway. If not specified, the value will be taken from
/// the HAB_LISTEN_CTL environment variable if defined
#[structopt(name = "LISTEN_CTL",
long = "listen-ctl",
env = ListenCtlAddr::ENVVAR,
default_value = ListenCtlAddr::default_as_str())]
listen_ctl: SocketAddr,
/// The organization that the Supervisor and its subsequent services are part of
#[structopt(name = "ORGANIZATION", long = "org")]
organization: Option<String>,
/// The listen address of one or more initial peers (IP[:PORT])
#[structopt(name = "PEER", long = "peer")]
// TODO (DM): This could probably be a different type for better validation (Vec<SockAddr>?)
peer: Vec<String>,
/// If this Supervisor is a permanent peer
#[structopt(name = "PERMANENT_PEER", long = "permanent-peer", short = "I")]
permanent_peer: bool,
/// Watch this file for connecting to the ring
#[structopt(name = "PEER_WATCH_FILE",
long = "peer-watch-file",
conflicts_with = "PEER")]
peer_watch_file: PathBuf,
#[structopt(flatten)]
cache_key_path: CacheKeyPath,
/// The name of the ring used by the Supervisor when running with wire encryption. (ex: hab sup
/// run --ring myring)
#[structopt(name = "RING",
long = "ring",
short = "r",
env = RING_ENVVAR,
conflicts_with = "RING_KEY")]
ring: String,
/// The contents of the ring key when running with wire encryption. (Note: This option is
/// explicitly undocumented and for testing purposes only. Do not use it in a production
/// system. Use the corresponding environment variable instead.) (ex: hab sup run --ring-key
/// 'SYM-SEC-1 foo-20181113185935GCrBOW6CCN75LMl0j2V5QqQ6nNzWm6and9hkKBSUFPI=')
#[structopt(name = "RING_KEY",
long = "ring-key",
env = RING_KEY_ENVVAR,
hidden = true,
conflicts_with = "RING")]
ring_key: Option<String>,
/// Receive Supervisor updates from the specified release channel
#[structopt(name = "CHANNEL", long = "channel", default_value = "stable")]
channel: String,
/// Specify an alternate Builder endpoint. If not specified, the value will be taken from the
/// HAB_BLDR_URL environment variable if defined (default: https://bldr.habitat.sh)
#[structopt(name = "BLDR_URL",
long = "url",
short = "u",
// TODO (DM): These fields are not actual set in the clap macro but I think they should
// env = BLDR_URL_ENVVAR,
// default_value = DEFAULT_BLDR_URL
)]
bldr_url: Url,
/// Use package config from this path, rather than the package itself
#[structopt(name = "CONFIG_DIR", long = "config-from")]
config_dir: Option<PathBuf>,
/// Enable automatic updates for the Supervisor itself
#[structopt(name = "AUTO_UPDATE", long = "auto-update", short = "A")]
auto_update: bool,
/// Used for enabling TLS for the HTTP gateway. Read private key from KEY_FILE. This should be
/// a RSA private key or PKCS8-encoded private key, in PEM format
#[structopt(name = "KEY_FILE", long = "key", requires = "CERT_FILE")]
key_file: Option<PathBuf>,
/// Used for enabling TLS for the HTTP gateway. Read server certificates from CERT_FILE. This
/// should contain PEM-format certificates in the right order (the first certificate should
/// certify KEY_FILE, the last should be a root CA)
#[structopt(name = "CERT_FILE", long = "certs", requires = "KEY_FILE")]
cert_file: Option<PathBuf>,
/// Used for enabling client-authentication with TLS for the HTTP gateway. Read CA certificate
/// from CA_CERT_FILE. This should contain PEM-format certificate that can be used to validate
/// client requests
#[structopt(name = "CA_CERT_FILE",
long = "ca-certs",
requires_all = &["CERT_FILE", "KEY_FILE"])]
ca_cert_file: Option<PathBuf>,
/// Load the given Habitat package as part of the Supervisor startup specified by a package
/// identifier (ex: core/redis) or filepath to a Habitat Artifact (ex:
/// /home/core-redis-3.0.7-21120102031201-x86_64-linux.hart)
// TODO (DM): We could probably do better validation here
#[structopt(name = "PKG_IDENT_OR_ARTIFACT")]
pkg_ident_or_artifact: Option<String>,
// TODO (DM): This flag can eventually be removed.
// See https://github.com/habitat-sh/habitat/issues/7339
#[structopt(name = "APPLICATION", long = "application", hidden = true)]
application: Vec<String>,
// TODO (DM): This flag can eventually be removed.
// See https://github.com/habitat-sh/habitat/issues/7339
#[structopt(name = "ENVIRONMENT", long = "environment", hidden = true)]
environment: Vec<String>,
/// The service group; shared config and topology [default: default]
// TODO (DM): This should set a default value
#[structopt(name = "GROUP", long = "group")]
group: String,
/// Service topology; [default: none]
// TODO (DM): I dont think saying the default is none makes sense here
#[structopt(name = "TOPOLOGY",
long = "topology",
short = "t",
possible_values = &["standalone", "leader"])]
topology: Option<habitat_sup_protocol::types::Topology>,
/// The update strategy; [default: none] [values: none, at-once, rolling]
// TODO (DM): this should set a default_value and use possible_values = &["none", "at-once",
// "rolling"]
#[structopt(name = "STRATEGY", long = "strategy", short = "s")]
strategy: Option<habitat_sup_protocol::types::UpdateStrategy>,
/// One or more service groups to bind to a configuration
#[structopt(name = "BIND", long = "bind")]
bind: Vec<String>,
/// Governs how the presence or absence of binds affects service startup. `strict` blocks
/// startup until all binds are present. [default: strict] [values: relaxed, strict]
// TODO (DM): This should set default_value and use possible_values
#[structopt(name = "BINDING_MODE", long = "binding-mode")]
binding_mode: Option<habitat_sup_protocol::types::BindingMode>,
/// Verbose output; shows file and line/column numbers
#[structopt(name = "VERBOSE", short = "v")]
verbose: bool,
/// Turn ANSI color off
#[structopt(name = "NO_COLOR", long = "no-color")]
no_color: bool,
/// Use structured JSON logging for the Supervisor. Implies NO_COLOR
#[structopt(name = "JSON", long = "json-logging")]
json_logging: bool,
/// The interval (seconds) on which to run health checks [default: 30]
// TODO (DM): Should use default_value = "30"
#[structopt(name = "HEALTH_CHECK_INTERVAL",
long = "health-check-interval",
short = "i")]
health_check_interval: HealthCheckInterval,
/// The IPv4 address to use as the `sys.ip` template variable. If this argument is not set, the
/// supervisor tries to dynamically determine an IP address. If that fails, the supervisor
/// defaults to using `127.0.0.1`
#[structopt(name = "SYS_IP_ADDRESS", long = "sys-ip-address")]
sys_ip_address: Option<Ipv4Addr>,
/// The name of the application for event stream purposes. This will be attached to all events
/// generated by this Supervisor
#[structopt(name = "EVENT_STREAM_APPLICATION", long = "event-stream-application")]
event_stream_application: String,
/// The name of the environment for event stream purposes. This will be attached to all events
/// generated by this Supervisor
#[structopt(name = "EVENT_STREAM_ENVIRONMENT", long = "event-stream-environment")]
event_stream_environment: Option<String>,
/// How long in seconds to wait for an event stream connection before exiting the Supervisor.
/// Set to '0' to immediately start the Supervisor and continue running regardless of the
/// initial connection status
#[structopt(name = "EVENT_STREAM_CONNECT_TIMEOUT",
long = "event-stream-connect-timeout",
default_value = "0",
env = EventStreamConnectMethod::ENVVAR)]
event_stream_connect_timeout: u64,
/// The event stream connection string (host:port) used by this Supervisor to send events to
/// Chef Automate. This enables the event stream and requires --event-stream-application,
/// --event-stream-environment, and --event-stream-token also be set
#[structopt(name = "EVENT_STREAM_URL",
long = "event-stream-url",
requires_all = &["EVENT_STREAM_APPLICATION",
"EVENT_STREAM_ENVIRONMENT",
AutomateAuthToken::ARG_NAME])]
event_stream_url: Option<EventStreamAddress>,
/// The name of the site where this Supervisor is running for event stream purposes
#[structopt(name = "EVENT_STREAM_SITE", long = "event-stream-site")]
event_stream_site: Option<String>,
/// The authentication token for connecting the event stream to Chef Automate
#[structopt(name = "EVENT_STREAM_TOKEN",
long = "event-stream-token",
env = AutomateAuthToken::ENVVAR,
validator = AutomateAuthToken::validate)]
automate_auth_token: Option<String>,
/// An arbitrary key-value pair to add to each event generated by this Supervisor
#[structopt(name = "EVENT_STREAM_METADATA",
long = "event-meta",
validator = EventStreamMetadata::validate)]
event_meta: Vec<String>,
/// The path to Chef Automate's event stream certificate in PEM format used to establish a TLS
/// connection
#[structopt(name = "EVENT_STREAM_SERVER_CERTIFICATE",
long = "event-stream-server-certificate",
validator = EventStreamServerCertificate::validate)]
event_stream_server_certificate: Option<String>,
/// The number of seconds after sending a shutdown signal to wait before killing a service
/// process (default: set in plan)
#[structopt(name = "SHUTDOWN_TIMEOUT", long = "shutdown-timeout")]
shutdown_timeout: ShutdownTimeout,
}
#[derive(StructOpt)]
#[structopt(no_version)]
/// Commands relating to a Habitat Supervisor's Control Gateway secret
pub enum Secret {
/// Generate a secret key to use as a Supervisor's Control Gateway secret
Generate,
}
| EventStreamAddress | identifier_name |
sup.rs | use super::util::{CacheKeyPath,
RemoteSup};
use crate::VERSION;
use configopt::{ConfigOptDefaults,
ConfigOptToString,
Partial};
use habitat_common::{cli::{RING_ENVVAR,
RING_KEY_ENVVAR},
types::{AutomateAuthToken,
EventStreamConnectMethod,
EventStreamMetadata,
EventStreamServerCertificate,
GossipListenAddr,
HttpListenAddr,
ListenCtlAddr}};
use habitat_core::{env::Config,
os::process::ShutdownTimeout,
package::PackageIdent,
service::HealthCheckInterval,
util::serde_string};
use rants::{error::Error as RantsError,
Address as NatsAddress};
use std::{fmt,
net::{Ipv4Addr,
SocketAddr},
path::PathBuf,
str::FromStr};
use structopt::{clap::AppSettings,
StructOpt};
use url::Url;
#[derive(StructOpt)]
#[structopt(name = "hab",
version = VERSION,
about = "The Habitat Supervisor",
author = "\nThe Habitat Maintainers <humans@habitat.sh>\n",
usage = "hab sup <SUBCOMMAND>",
global_settings = &[AppSettings::VersionlessSubcommands],
)]
#[allow(clippy::large_enum_variant)]
pub enum Sup {
/// Start an interactive Bash-like shell
#[structopt(usage = "hab sup bash", no_version)]
Bash,
/// Depart a Supervisor from the gossip ring; kicking and banning the target from joining again
/// with the same member-id
#[structopt(no_version)]
Depart {
/// The member-id of the Supervisor to depart
#[structopt(name = "MEMBER_ID")]
member_id: String,
#[structopt(flatten)]
remote_sup: RemoteSup,
},
/// Run the Habitat Supervisor
#[structopt(no_version)]
Run(SupRun),
#[structopt(no_version)]
Secret(Secret),
/// Start an interactive Bourne-like shell
#[structopt(usage = "hab sup sh", no_version)]
Sh,
/// Query the status of Habitat services
#[structopt(no_version)]
Status {
/// A package identifier (ex: core/redis, core/busybox-static/1.42.2)
#[structopt(name = "PKG_IDENT")]
pkg_ident: Option<PackageIdent>,
#[structopt(flatten)]
remote_sup: RemoteSup,
},
/// Gracefully terminate the Habitat Supervisor and all of its running services
#[structopt(usage = "hab sup term [OPTIONS]", no_version)]
Term,
}
// TODO (DM): This is unnecessarily difficult due to the orphan rule and the lack of specialization.
// The `configopt` library could be improved to make this easier.
#[derive(Deserialize, Serialize, Debug)]
struct EventStreamAddress(#[serde(with = "serde_string")] NatsAddress);
impl fmt::Display for EventStreamAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) }
}
impl FromStr for EventStreamAddress {
type Err = RantsError;
fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(EventStreamAddress(s.parse()?)) }
}
impl ConfigOptToString for EventStreamAddress {}
#[derive(ConfigOptDefaults, Partial, StructOpt, Deserialize)]
#[configopt_defaults(type = "PartialSupRun")]
#[partial(derive(Debug, Default, Deserialize), attrs(serde))]
#[serde(deny_unknown_fields)]
#[structopt(name = "run",
no_version,
about = "Run the Habitat Supervisor",
// set custom usage string, otherwise the binary
// is displayed confusingly as `hab-sup`
// see: https://github.com/kbknapp/clap-rs/blob/2724ec5399c500b12a1a24d356f4090f4816f5e2/src/app/mod.rs#L373-L394
usage = "hab sup run [FLAGS] [OPTIONS] [--] [PKG_IDENT_OR_ARTIFACT]"
)]
#[allow(dead_code)]
pub struct SupRun {
/// The listen address for the Gossip System Gateway
#[structopt(name = "LISTEN_GOSSIP",
long = "listen-gossip",
env = GossipListenAddr::ENVVAR,
default_value = GossipListenAddr::default_as_str())]
listen_gossip: SocketAddr,
/// Start the supervisor in local mode
#[structopt(name = "LOCAL_GOSSIP_MODE",
long = "local-gossip-mode",
conflicts_with_all = &["LISTEN_GOSSIP", "PEER", "PEER_WATCH_FILE"])]
local_gossip_mode: bool,
/// The listen address for the HTTP Gateway
#[structopt(name = "LISTEN_HTTP",
long = "listen-http",
env = HttpListenAddr::ENVVAR,
default_value = HttpListenAddr::default_as_str())]
listen_http: SocketAddr,
/// Disable the HTTP Gateway completely
#[structopt(name = "HTTP_DISABLE", long = "http-disable", short = "D")]
http_disable: bool,
/// The listen address for the Control Gateway. If not specified, the value will be taken from
/// the HAB_LISTEN_CTL environment variable if defined
#[structopt(name = "LISTEN_CTL",
long = "listen-ctl",
env = ListenCtlAddr::ENVVAR,
default_value = ListenCtlAddr::default_as_str())]
listen_ctl: SocketAddr,
/// The organization that the Supervisor and its subsequent services are part of
#[structopt(name = "ORGANIZATION", long = "org")]
organization: Option<String>,
/// The listen address of one or more initial peers (IP[:PORT])
#[structopt(name = "PEER", long = "peer")]
// TODO (DM): This could probably be a different type for better validation (Vec<SockAddr>?)
peer: Vec<String>,
/// If this Supervisor is a permanent peer
#[structopt(name = "PERMANENT_PEER", long = "permanent-peer", short = "I")]
permanent_peer: bool,
/// Watch this file for connecting to the ring
#[structopt(name = "PEER_WATCH_FILE",
long = "peer-watch-file",
conflicts_with = "PEER")]
peer_watch_file: PathBuf, | /// run --ring myring)
#[structopt(name = "RING",
long = "ring",
short = "r",
env = RING_ENVVAR,
conflicts_with = "RING_KEY")]
ring: String,
/// The contents of the ring key when running with wire encryption. (Note: This option is
/// explicitly undocumented and for testing purposes only. Do not use it in a production
/// system. Use the corresponding environment variable instead.) (ex: hab sup run --ring-key
/// 'SYM-SEC-1 foo-20181113185935GCrBOW6CCN75LMl0j2V5QqQ6nNzWm6and9hkKBSUFPI=')
#[structopt(name = "RING_KEY",
long = "ring-key",
env = RING_KEY_ENVVAR,
hidden = true,
conflicts_with = "RING")]
ring_key: Option<String>,
/// Receive Supervisor updates from the specified release channel
#[structopt(name = "CHANNEL", long = "channel", default_value = "stable")]
channel: String,
/// Specify an alternate Builder endpoint. If not specified, the value will be taken from the
/// HAB_BLDR_URL environment variable if defined (default: https://bldr.habitat.sh)
#[structopt(name = "BLDR_URL",
long = "url",
short = "u",
// TODO (DM): These fields are not actual set in the clap macro but I think they should
// env = BLDR_URL_ENVVAR,
// default_value = DEFAULT_BLDR_URL
)]
bldr_url: Url,
/// Use package config from this path, rather than the package itself
#[structopt(name = "CONFIG_DIR", long = "config-from")]
config_dir: Option<PathBuf>,
/// Enable automatic updates for the Supervisor itself
#[structopt(name = "AUTO_UPDATE", long = "auto-update", short = "A")]
auto_update: bool,
/// Used for enabling TLS for the HTTP gateway. Read private key from KEY_FILE. This should be
/// a RSA private key or PKCS8-encoded private key, in PEM format
#[structopt(name = "KEY_FILE", long = "key", requires = "CERT_FILE")]
key_file: Option<PathBuf>,
/// Used for enabling TLS for the HTTP gateway. Read server certificates from CERT_FILE. This
/// should contain PEM-format certificates in the right order (the first certificate should
/// certify KEY_FILE, the last should be a root CA)
#[structopt(name = "CERT_FILE", long = "certs", requires = "KEY_FILE")]
cert_file: Option<PathBuf>,
/// Used for enabling client-authentication with TLS for the HTTP gateway. Read CA certificate
/// from CA_CERT_FILE. This should contain PEM-format certificate that can be used to validate
/// client requests
#[structopt(name = "CA_CERT_FILE",
long = "ca-certs",
requires_all = &["CERT_FILE", "KEY_FILE"])]
ca_cert_file: Option<PathBuf>,
/// Load the given Habitat package as part of the Supervisor startup specified by a package
/// identifier (ex: core/redis) or filepath to a Habitat Artifact (ex:
/// /home/core-redis-3.0.7-21120102031201-x86_64-linux.hart)
// TODO (DM): We could probably do better validation here
#[structopt(name = "PKG_IDENT_OR_ARTIFACT")]
pkg_ident_or_artifact: Option<String>,
// TODO (DM): This flag can eventually be removed.
// See https://github.com/habitat-sh/habitat/issues/7339
#[structopt(name = "APPLICATION", long = "application", hidden = true)]
application: Vec<String>,
// TODO (DM): This flag can eventually be removed.
// See https://github.com/habitat-sh/habitat/issues/7339
#[structopt(name = "ENVIRONMENT", long = "environment", hidden = true)]
environment: Vec<String>,
/// The service group; shared config and topology [default: default]
// TODO (DM): This should set a default value
#[structopt(name = "GROUP", long = "group")]
group: String,
/// Service topology; [default: none]
// TODO (DM): I dont think saying the default is none makes sense here
#[structopt(name = "TOPOLOGY",
long = "topology",
short = "t",
possible_values = &["standalone", "leader"])]
topology: Option<habitat_sup_protocol::types::Topology>,
/// The update strategy; [default: none] [values: none, at-once, rolling]
// TODO (DM): this should set a default_value and use possible_values = &["none", "at-once",
// "rolling"]
#[structopt(name = "STRATEGY", long = "strategy", short = "s")]
strategy: Option<habitat_sup_protocol::types::UpdateStrategy>,
/// One or more service groups to bind to a configuration
#[structopt(name = "BIND", long = "bind")]
bind: Vec<String>,
/// Governs how the presence or absence of binds affects service startup. `strict` blocks
/// startup until all binds are present. [default: strict] [values: relaxed, strict]
// TODO (DM): This should set default_value and use possible_values
#[structopt(name = "BINDING_MODE", long = "binding-mode")]
binding_mode: Option<habitat_sup_protocol::types::BindingMode>,
/// Verbose output; shows file and line/column numbers
#[structopt(name = "VERBOSE", short = "v")]
verbose: bool,
/// Turn ANSI color off
#[structopt(name = "NO_COLOR", long = "no-color")]
no_color: bool,
/// Use structured JSON logging for the Supervisor. Implies NO_COLOR
#[structopt(name = "JSON", long = "json-logging")]
json_logging: bool,
/// The interval (seconds) on which to run health checks [default: 30]
// TODO (DM): Should use default_value = "30"
#[structopt(name = "HEALTH_CHECK_INTERVAL",
long = "health-check-interval",
short = "i")]
health_check_interval: HealthCheckInterval,
/// The IPv4 address to use as the `sys.ip` template variable. If this argument is not set, the
/// supervisor tries to dynamically determine an IP address. If that fails, the supervisor
/// defaults to using `127.0.0.1`
#[structopt(name = "SYS_IP_ADDRESS", long = "sys-ip-address")]
sys_ip_address: Option<Ipv4Addr>,
/// The name of the application for event stream purposes. This will be attached to all events
/// generated by this Supervisor
#[structopt(name = "EVENT_STREAM_APPLICATION", long = "event-stream-application")]
event_stream_application: String,
/// The name of the environment for event stream purposes. This will be attached to all events
/// generated by this Supervisor
#[structopt(name = "EVENT_STREAM_ENVIRONMENT", long = "event-stream-environment")]
event_stream_environment: Option<String>,
/// How long in seconds to wait for an event stream connection before exiting the Supervisor.
/// Set to '0' to immediately start the Supervisor and continue running regardless of the
/// initial connection status
#[structopt(name = "EVENT_STREAM_CONNECT_TIMEOUT",
long = "event-stream-connect-timeout",
default_value = "0",
env = EventStreamConnectMethod::ENVVAR)]
event_stream_connect_timeout: u64,
/// The event stream connection string (host:port) used by this Supervisor to send events to
/// Chef Automate. This enables the event stream and requires --event-stream-application,
/// --event-stream-environment, and --event-stream-token also be set
#[structopt(name = "EVENT_STREAM_URL",
long = "event-stream-url",
requires_all = &["EVENT_STREAM_APPLICATION",
"EVENT_STREAM_ENVIRONMENT",
AutomateAuthToken::ARG_NAME])]
event_stream_url: Option<EventStreamAddress>,
/// The name of the site where this Supervisor is running for event stream purposes
#[structopt(name = "EVENT_STREAM_SITE", long = "event-stream-site")]
event_stream_site: Option<String>,
/// The authentication token for connecting the event stream to Chef Automate
#[structopt(name = "EVENT_STREAM_TOKEN",
long = "event-stream-token",
env = AutomateAuthToken::ENVVAR,
validator = AutomateAuthToken::validate)]
automate_auth_token: Option<String>,
/// An arbitrary key-value pair to add to each event generated by this Supervisor
#[structopt(name = "EVENT_STREAM_METADATA",
long = "event-meta",
validator = EventStreamMetadata::validate)]
event_meta: Vec<String>,
/// The path to Chef Automate's event stream certificate in PEM format used to establish a TLS
/// connection
#[structopt(name = "EVENT_STREAM_SERVER_CERTIFICATE",
long = "event-stream-server-certificate",
validator = EventStreamServerCertificate::validate)]
event_stream_server_certificate: Option<String>,
/// The number of seconds after sending a shutdown signal to wait before killing a service
/// process (default: set in plan)
#[structopt(name = "SHUTDOWN_TIMEOUT", long = "shutdown-timeout")]
shutdown_timeout: ShutdownTimeout,
}
#[derive(StructOpt)]
#[structopt(no_version)]
/// Commands relating to a Habitat Supervisor's Control Gateway secret
pub enum Secret {
/// Generate a secret key to use as a Supervisor's Control Gateway secret
Generate,
} | #[structopt(flatten)]
cache_key_path: CacheKeyPath,
/// The name of the ring used by the Supervisor when running with wire encryption. (ex: hab sup | random_line_split |
texture.rs | use enum_dispatch::enum_dispatch;
use log::error;
use crate::textures::dots::DotsTexture;
use crate::textures::constant::ConstantTexture;
use crate::textures::scaled::ScaleTexture;
use crate::core::interaction::SurfaceInteraction;
use crate::textures::imagemap::{ ImageTextureFloat, ImageTextureRGB};
use crate::textures::mix::MixTexture;
use crate::textures::biler::BilerTexture;
use crate::textures::uv::UVTexture;
use crate::textures::marble::MarbleTexture;
use crate::textures::wrinkled::WrinkledTexture;
use crate::textures::fbm::FBmTexture;
use crate::textures::windy::WindyTexture;
use crate::textures::checkerboard::{Checkerboard3DTexture, Checkerboard2DTexture};
use crate::core::geometry::vector::{Vector2f, Vector3f};
use crate::core::geometry::point::{Point2f, Point3f};
use crate::core::pbrt::{Float, INV_PI, INV2_PI, PI, lerp, clamp, log2};
use crate::core::transform::Transform;
use crate::core::geometry::geometry::{spherical_theta, spherical_phi};
use crate::core::spectrum::{Spectrum, RGBSpectrum, SampledSpectrum};
use std::ops::{Mul, Add, AddAssign, Div};
use crate::core::mipmap::Clampable;
use crate::core::paramset::TextureParams;
const NOISE_PERM_SIZE: usize = 256;
const NOISE_PERM: [usize; 2 * NOISE_PERM_SIZE] = [
151, 160, 137, 91, 90, 15, 131, 13, 201, 95, 96, 53, 194, 233, 7, 225, 140,
36, 103, 30, 69, 142,
// Remainder of the noise permutation table
8, 99, 37, 240, 21, 10, 23, 190, 6, 148, 247, 120, 234, 75, 0, 26, 197, 62,
94, 252, 219, 203, 117, 35, 11, 32, 57, 177, 33, 88, 237, 149, 56, 87, 174,
20, 125, 136, 171, 168, 68, 175, 74, 165, 71, 134, 139, 48, 27, 166, 77,
146, 158, 231, 83, 111, 229, 122, 60, 211, 133, 230, 220, 105, 92, 41, 55,
46, 245, 40, 244, 102, 143, 54, 65, 25, 63, 161, 1, 216, 80, 73, 209, 76,
132, 187, 208, 89, 18, 169, 200, 196, 135, 130, 116, 188, 159, 86, 164, 100,
109, 198, 173, 186, 3, 64, 52, 217, 226, 250, 124, 123, 5, 202, 38, 147,
118, 126, 255, 82, 85, 212, 207, 206, 59, 227, 47, 16, 58, 17, 182, 189, 28,
42, 223, 183, 170, 213, 119, 248, 152, 2, 44, 154, 163, 70, 221, 153, 101,
155, 167, 43, 172, 9, 129, 22, 39, 253, 19, 98, 108, 110, 79, 113, 224, 232,
178, 185, 112, 104, 218, 246, 97, 228, 251, 34, 242, 193, 238, 210, 144, 12,
191, 179, 162, 241, 81, 51, 145, 235, 249, 14, 239, 107, 49, 192, 214, 31,
181, 199, 106, 157, 184, 84, 204, 176, 115, 121, 50, 45, 127, 4, 150, 254,
138, 236, 205, 93, 222, 114, 67, 29, 24, 72, 243, 141, 128, 195, 78, 66,
215, 61, 156, 180, 151, 160, 137, 91, 90, 15, 131, 13, 201, 95, 96, 53, 194,
233, 7, 225, 140, 36, 103, 30, 69, 142, 8, 99, 37, 240, 21, 10, 23, 190, 6,
148, 247, 120, 234, 75, 0, 26, 197, 62, 94, 252, 219, 203, 117, 35, 11, 32,
57, 177, 33, 88, 237, 149, 56, 87, 174, 20, 125, 136, 171, 168, 68, 175, 74,
165, 71, 134, 139, 48, 27, 166, 77, 146, 158, 231, 83, 111, 229, 122, 60,
211, 133, 230, 220, 105, 92, 41, 55, 46, 245, 40, 244, 102, 143, 54, 65, 25,
63, 161, 1, 216, 80, 73, 209, 76, 132, 187, 208, 89, 18, 169, 200, 196, 135,
130, 116, 188, 159, 86, 164, 100, 109, 198, 173, 186, 3, 64, 52, 217, 226,
250, 124, 123, 5, 202, 38, 147, 118, 126, 255, 82, 85, 212, 207, 206, 59,
227, 47, 16, 58, 17, 182, 189, 28, 42, 223, 183, 170, 213, 119, 248, 152, 2,
44, 154, 163, 70, 221, 153, 101, 155, 167, 43, 172, 9, 129, 22, 39, 253, 19,
98, 108, 110, 79, 113, 224, 232, 178, 185, 112, 104, 218, 246, 97, 228, 251,
34, 242, 193, 238, 210, 144, 12, 191, 179, 162, 241, 81, 51, 145, 235, 249,
14, 239, 107, 49, 192, 214, 31, 181, 199, 106, 157, 184, 84, 204, 176, 115,
121, 50, 45, 127, 4, 150, 254, 138, 236, 205, 93, 222, 114, 67, 29, 24, 72,
243, 141, 128, 195, 78, 66, 215, 61, 156, 180
];
pub type TextureFloat = Textures<Float, Float>;
pub type TextureSpec = Textures<Spectrum, Spectrum>;
#[enum_dispatch]
pub trait Texture<T2> {
fn evaluate(&self, s: &SurfaceInteraction) -> T2;
}
// All Texture generic types must implement these traits
pub trait SpectrumT<T>:
Copy +
Send +
Sync +
num::Zero +
Clampable +
AddAssign +
From<Float> +
From<SampledSpectrum> +
From<RGBSpectrum> +
Mul<T, Output = T> +
Mul<Float, Output = T> +
Div<Float, Output = T> +
Add<T, Output = T>{}
// Implementations for valid Texture generic types
impl SpectrumT<Float> for Float{}
impl SpectrumT<RGBSpectrum> for RGBSpectrum{}
impl SpectrumT<SampledSpectrum> for SampledSpectrum{}
#[enum_dispatch(Texture<T2>)]
pub enum Textures<T1, T2>
where T1: SpectrumT<T1> + Mul<T2, Output = T2>,
T2: SpectrumT<T2> + From<T1>
{
MarbleTexture,
UVTexture,
FBmTexture,
WrinkledTexture,
WindyTexture,
MixTexture(MixTexture<T2>),
BilerTexture(BilerTexture<T2>),
ScaleTexture(ScaleTexture<T1, T2>),
DotsTexture(DotsTexture<T2>),
ImageTextureFloat(ImageTextureFloat),
ImageTextureRGB(ImageTextureRGB),
ConstantTexture(ConstantTexture<T2>),
Checkerboard2DTexture(Checkerboard2DTexture<T2>),
Checkerboard3DTexture(Checkerboard3DTexture<T2>)
}
#[enum_dispatch]
pub trait TextureMapping2D {
fn map(&self, si: &SurfaceInteraction,
dstdx: &mut Vector2f, dstdy: &mut Vector2f) -> Point2f;
}
#[enum_dispatch(TextureMapping2D)]
pub enum TextureMapping2Ds {
UVMapping2D,
PlannarMapping2D,
SphericalMapping2D,
CylindricalMapping2D
}
pub struct UVMapping2D {
su: Float,
sv: Float,
du: Float,
dv: Float,
}
impl UVMapping2D {
pub fn new(su: Float, sv: Float, du: Float, dv: Float) -> Self {
Self { su, sv, du, dv }
}
}
impl TextureMapping2D for UVMapping2D {
fn map(&self, si: &SurfaceInteraction,
dstdx: &mut Vector2f, dstdy: &mut Vector2f) -> Point2f {
// Compute texture differentials for sphere (u, v) mapping
*dstdx = Vector2f::new(self.su * si.dudx.get() , self.sv * si.dvdx.get());
*dstdy = Vector2f::new(self.su * si.dudy.get(), self.sv * si.dvdy.get());
Point2f::new(self.su * si.uv[0] + self.du, self.sv * si.uv[1] + self.dv)
}
}
impl Default for UVMapping2D {
fn default() -> Self {
Self {
su: 1.0,
sv: 1.0,
du: 0.0,
dv: 0.0
}
}
}
pub struct SphericalMapping2D {
world_to_texture: Transform
}
impl SphericalMapping2D {
pub fn new(wtt: &Transform) -> Self {
Self { world_to_texture: *wtt }
}
fn sphere(&self, p: &Point3f) -> Point2f {
let vec = (
self.world_to_texture.transform_point(p) -
Point3f::new(0.0, 0.0, 0.0))
.normalize();
let theta = spherical_theta(&vec);
let phi = spherical_phi(&vec);
Point2f::new(theta * INV_PI, phi * INV2_PI)
}
}
impl TextureMapping2D for SphericalMapping2D {
fn map(&self, si: &SurfaceInteraction, dstdx: &mut Vector2f,
dstdy: &mut Vector2f) -> Point2f {
let st = self.sphere(&si.p);
// Compute texture coordinate differentials for sphere (u, v) mapping
let delta = 0.1;
let st_deltax = self.sphere(&(si.p + si.dpdx.get() * delta));
*dstdx = (st_deltax - st) / delta;
let st_deltay = self.sphere(&(si.p + si.dpdy.get() * delta));
*dstdy = (st_deltay - st) / delta;
// Handle sphere mapping discontinuity for coordinate differentials
if dstdx[1] > 0.5 { dstdx[1] = 1.0 - dstdx[1]; }
else if (*dstdx)[1] < -0.5 { (*dstdx)[1] = -((*dstdx)[1] + 1.0); }
if dstdy[1] > 0.5 { dstdy[1] = 1.0 - dstdy[1]; }
else if dstdy[1] < -0.5 { dstdy[1] = -(dstdy[1] + 1.0); }
st
}
}
pub struct CylindricalMapping2D {
world_to_texture: Transform
}
impl CylindricalMapping2D {
pub fn new(wtt: &Transform) -> Self {
Self { world_to_texture: *wtt }
}
fn cylinder(&self, p: &Point3f) -> Point2f {
let vec = (
self.world_to_texture.transform_point(p) -
Point3f::new(0.0, 0.0, 0.0))
.normalize();
Point2f::new(PI + vec.y.atan2(vec.x) * INV2_PI, vec.z)
}
}
impl TextureMapping2D for CylindricalMapping2D {
fn map(&self, si: &SurfaceInteraction,
dstdx: &mut Vector2f, dstdy: &mut Vector2f) -> Point2f {
let st = self.cylinder(&si.p);
// Compute texture coordinate differentials for cylinder (u, v) mapping
let delta = 0.1;
let st_deltax = self.cylinder(&(si.p + si.dpdx.get() * delta));
*dstdx = (st_deltax - st) / delta;
let st_deltay = self.cylinder(&(si.p + si.dpdy.get() * delta));
*dstdy = (st_deltay - st) / delta;
// Handle sphere mapping discontinuity for coordinate differentials
if dstdx[1] > 0.5 { dstdx[1] = 1.0 - dstdx[1]; }
else if (*dstdx)[1] < -0.5 { (*dstdx)[1] = -((*dstdx)[1] + 1.0); }
if dstdy[1] > 0.5 { dstdy[1] = 1.0 - dstdy[1]; }
else if dstdy[1] < -0.5 { dstdy[1] = -(dstdy[1] + 1.0); }
st
}
}
pub struct PlannarMapping2D {
vs: Vector3f,
vt: Vector3f,
ds: Float,
dt: Float
}
impl PlannarMapping2D {
pub fn new(vs: &Vector3f, vt: &Vector3f,
ds: Float, dt: Float) -> Self {
Self {
ds,
dt,
vs: *vs,
vt: *vt
}
}
}
impl TextureMapping2D for PlannarMapping2D {
fn map(&self, si: &SurfaceInteraction, dstdx: &mut Vector2f,
dstdy: &mut Vector2f) -> Point2f {
let vec = Vector3f::from(si.p);
*dstdx = Vector2f::new(si.dpdx.get().dot(&self.vs), si.dpdx.get().dot(&self.vt));
*dstdy = Vector2f::new(si.dpdy.get().dot(&self.vs), si.dpdy.get().dot(&self.vt));
Point2f::new(self.ds + vec.dot(&self.vs), self.dt + vec.dot(&self.vt))
}
}
#[enum_dispatch]
pub trait TextureMapping3D {
fn map(&self, si: &SurfaceInteraction, dpdx: &mut Vector3f,
dpdy: &mut Vector3f) -> Point3f;
}
#[enum_dispatch(TextureMapping3D)]
pub enum TextureMapping3Ds {
IdentityMapping3D
}
pub struct IdentityMapping3D {
world_to_texture: Transform
}
impl IdentityMapping3D {
pub fn new(w2t: &Transform) -> Self {
Self { world_to_texture: *w2t }
}
}
impl TextureMapping3D for IdentityMapping3D {
fn map(&self, si: &SurfaceInteraction, dpdx: &mut Vector3f,
dpdy: &mut Vector3f) -> Point3f {
*dpdx = self.world_to_texture.transform_vector(&si.dpdx.get());
*dpdy = self.world_to_texture.transform_vector(&si.dpdy.get());
self.world_to_texture.transform_point(&si.p)
}
}
pub fn lanczos(mut x: Float, tau: Float) -> Float {
x = x.abs();
if x < 1.0e-5 { return 1.0; }
if x > 1.0 { return 0.0; }
x *= PI;
let s = (x * tau).sin() / ( x * tau);
let lanc = x.sin() / x;
s * lanc
}
pub fn noise(x: Float, y: Float, z: Float) -> Float {
let mut ix = x.floor() as usize;
let mut iy = y.floor() as usize;
let mut iz = z.floor() as usize;
let dx = x - ix as Float;
let dy = y - iy as Float;
let dz = z - iz as Float;
// Compute gradient weights
ix &= NOISE_PERM_SIZE - 1;
iy &= NOISE_PERM_SIZE - 1;
iz &= NOISE_PERM_SIZE - 1;
let w000 = grad(ix, iy, iz, dx, dy, dz);
let w100 = grad(ix + 1, iy, iz, dx - 1.0, dy, dz);
let w010 = grad(ix, iy + 1, iz, dx, dy - 1.0, dz);
let w110 = grad(ix + 1, iy + 1, iz, dx - 1.0, dy - 1.0, dz);
let w001 = grad(ix, iy, iz + 1, dx, dy, dz - 1.0);
let w101 = grad(ix + 1, iy, iz + 1, dx - 1.0, dy, dz - 1.0);
let w011 = grad(ix, iy + 1, iz + 1, dx, dy - 1.0, dz - 1.0);
let w111 = grad(ix + 1, iy + 1, iz + 1, dx - 1.0, dy - 1.0, dz - 1.0);
// Compute trilinear interpolation of weights
let wx = noise_weight(dx);
let wy = noise_weight(dy);
let wz = noise_weight(dz);
let x00 = lerp(wx, w000, w100);
let x10 = lerp(wx, w010, w110);
let x01 = lerp(wx, w001, w101);
let x11 = lerp(wx, w011, w111);
let y0 = lerp(wy, x00, x10);
let y1 = lerp(wy, x01, x11);
lerp(wz, y0, y1)
}
pub fn noisep(p: Point3f) -> Float {
noise(p.x, p.y, p.z)
}
fn grad(x: usize, y: usize, z: usize, dx: Float, dy: Float, dz: Float) -> Float {
let mut h = NOISE_PERM[NOISE_PERM[NOISE_PERM[x] + y] + z];
h &= 15;
let u = if h < 8 || h == 12 || h == 13 { dx } else { dy };
let v = if h < 4 || h == 12 || h == 13 { dy } else { dz };
(if (h & 1) != 0 { -u } else { u }) + (if (h & 2) != 0 | else { v })
}
fn noise_weight(t: Float) -> Float {
let t3 = t * t * t;
let t4 = t3 * t;
6.0 * t4 * t - 15.0 * t4 + 10.0 * t3
}
pub fn fbm(
p: &Point3f, dpdx: &Vector3f, dpdy: &Vector3f,
omega: Float, max_octaves: usize) -> Float {
// Compute number of octaves for antialiased FBm
let len2 = dpdx.length_squared().max(dpdy.length_squared());
let n = clamp(-1.0 - 0.5 * log2(len2), 0.0, max_octaves as Float);
let nint = n.floor() as usize;
// Compute sum of octaves of noise for fbm
let (mut sum, mut lambda, mut o) = (0.0, 1.0, 1.0);
for _i in 0..nint {
sum += o * noisep(*p * lambda);
lambda *= 1.99;
o *= omega;
}
let npartial = n - nint as Float;
sum += o * smooth_step(0.3, 0.7, npartial) * noisep(*p * lambda);
sum
}
pub fn turbulence(
p: &Point3f, dpdx: &Vector3f, dpdy: &Vector3f,
omega: Float, max_octaves: usize) -> Float {
// Compute number of octaves for antialiased FBm
let len2 = dpdx.length_squared().max(dpdy.length_squared());
let n = clamp(-1.0 - 0.5 * len2.log2(), 0.0, max_octaves as Float);
let nint = n.floor() as usize;
// Compute sum of octaves of noise for turbulence
let (mut sum, mut lambda, mut o) = (0.0, 1.0, 1.0);
for _i in 0..nint {
sum += o + noisep(*p * lambda).abs();
lambda *= 1.99;
o *= omega;
}
// Account for contributions of clamped octaves in turbulence
let npartial = n - nint as Float;
sum += o + lerp(
smooth_step(0.3, 0.7, npartial),
0.2,
noisep(*p * lambda).abs());
for _i in nint..max_octaves {
sum += o * 0.2;
o *= omega;
}
sum
}
fn smooth_step(min: Float, max: Float, value: Float) -> Float {
let v = clamp((value - min) / (max - min), 0.0, 1.0);
v * v * (-2.0 * v + 3.0)
}
pub fn get_mapping2d(t2w: &Transform, tp: &mut TextureParams) -> TextureMapping2Ds {
let ty = tp.find_string("mapping", "uv");
match ty.as_str() {
"uv" => {
let su = tp.find_float("uscale", 1.0);
let sv = tp.find_float("vscale", 1.0);
let du = tp.find_float("udelta", 0.0);
let dv = tp.find_float("vdelta", 0.0);
UVMapping2D::new(su, sv, du, dv).into()
},
"planar" => {
let vs = tp.find_vector3f("v1", Vector3f::new(1.0, 0.0, 0.0));
let vt = tp.find_vector3f("v2", Vector3f::new(0.0, 1.0, 0.0));
let ds = tp.find_float("udelta", 0.0);
let dt = tp.find_float("vdelta", 0.0);
PlannarMapping2D::new(&vs, &vt, ds, dt).into()
}
"spherical" => SphericalMapping2D::new(&Transform::inverse(t2w)).into(),
"cylindrical" => CylindricalMapping2D::new(&Transform::inverse(t2w)).into(),
_ => {
error!("2D texture mapping \"{}\" unknown", ty);
UVMapping2D::new(1.0, 1.0, 0.0, 0.0).into()
}
}
} | { -v } | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.