repo_id
stringclasses 927
values | file_path
stringlengths 99
214
| content
stringlengths 2
4.15M
|
|---|---|---|
m1p
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/src/doublenest/namemismatch/m1p/a.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package m1p
import (
"sort"
"github.com/golang/dep/gps"
)
var (
_ = sort.Strings
_ = gps.Solve
)
|
m1p
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/src/doublenest/namemismatch/m1p/b.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package m1p
import (
"os"
"sort"
)
var (
_ = sort.Strings
_ = os.PathSeparator
)
|
simpleallt
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/src/simpleallt/a.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package simple
import (
"sort"
"github.com/golang/dep/gps"
)
var (
_ = sort.Strings
_ = gps.Solve
)
|
simpleallt
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/src/simpleallt/a_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package simple_test
import (
"sort"
"strconv"
)
var (
_ = sort.Strings
_ = strconv.Unquote
)
|
simpleallt
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/src/simpleallt/t_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package simple
import (
"math/rand"
"strconv"
)
var (
_ = rand.Int()
_ = strconv.Unquote
)
|
match
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/digest/launchpad.net/match/match.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package match
|
mismatch
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/digest/github.com/alice/mismatch/mismatch.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package mismatch
|
match
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/digest/github.com/alice/match/match.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package match
|
notInLock
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/digest/github.com/alice/notInLock/notInLock.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package notInLock
|
emptyDigest
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/digest/github.com/bob/emptyDigest/emptyDigest.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package emptyDigest
|
match
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/digest/github.com/bob/match/match.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package match
|
stdout_stderr
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/cmd/stdout_stderr/stdout_stderr.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"os"
)
func main() {
os.Stdout.WriteString("stdout")
os.Stderr.WriteString("stderr")
}
|
echosleep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/_testdata/cmd/echosleep/echosleep.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"flag"
"fmt"
"time"
)
func main() {
n := flag.Int("n", 1, "number of iterations before stopping")
flag.Parse()
for i := 0; i < *n; i++ {
fmt.Println("foo")
time.Sleep(time.Duration(i) * 250 * time.Millisecond)
}
}
|
paths
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/paths/paths_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package paths
import (
"testing"
_ "github.com/golang/dep/internal/test" // DO NOT REMOVE, allows go test ./... -update to work
)
func TestIsStandardImportPath(t *testing.T) {
fix := []struct {
ip string
is bool
}{
{"appengine", true},
{"net/http", true},
{"github.com/anything", false},
{"github.com", false},
{"foo", true},
{".", false},
}
for _, f := range fix {
r := IsStandardImportPath(f.ip)
if r != f.is {
if r {
t.Errorf("%s was marked stdlib but should not have been", f.ip)
} else {
t.Errorf("%s was not marked stdlib but should have been", f.ip)
}
}
}
}
|
paths
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/paths/paths.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package paths
import "strings"
// IsStandardImportPath reports whether $GOROOT/src/path should be considered
// part of the standard distribution. For historical reasons we allow people to add
// their own code to $GOROOT instead of using $GOPATH, but we assume that
// code will start with a domain name (dot in the first element).
// This was lovingly taken from src/cmd/go/pkg.go in Go's code (isStandardImportPath).
func IsStandardImportPath(path string) bool {
i := strings.Index(path, "/")
if i < 0 {
i = len(path)
}
return !strings.Contains(path[:i], ".")
}
|
pb
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/internal/pb/source_cache.pb.go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: source_cache.proto
/*
Package pb is a generated protocol buffer package.
It is generated from these files:
source_cache.proto
It has these top-level messages:
Constraint
ProjectProperties
LockedProject
*/
package pb
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type Constraint_Type int32
const (
Constraint_Revision Constraint_Type = 0
Constraint_Branch Constraint_Type = 1
Constraint_DefaultBranch Constraint_Type = 2
Constraint_Version Constraint_Type = 3
Constraint_Semver Constraint_Type = 4
)
var Constraint_Type_name = map[int32]string{
0: "Revision",
1: "Branch",
2: "DefaultBranch",
3: "Version",
4: "Semver",
}
var Constraint_Type_value = map[string]int32{
"Revision": 0,
"Branch": 1,
"DefaultBranch": 2,
"Version": 3,
"Semver": 4,
}
func (x Constraint_Type) String() string {
return proto.EnumName(Constraint_Type_name, int32(x))
}
func (Constraint_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} }
// Constraint is a serializable representation of a gps.Constraint or gps.UnpairedVersion.
type Constraint struct {
Type Constraint_Type `protobuf:"varint,1,opt,name=type,enum=pb.Constraint_Type" json:"type,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"`
}
func (m *Constraint) Reset() { *m = Constraint{} }
func (m *Constraint) String() string { return proto.CompactTextString(m) }
func (*Constraint) ProtoMessage() {}
func (*Constraint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
func (m *Constraint) GetType() Constraint_Type {
if m != nil {
return m.Type
}
return Constraint_Revision
}
func (m *Constraint) GetValue() string {
if m != nil {
return m.Value
}
return ""
}
// ProjectProperties is a serializable representation of gps.ProjectRoot and gps.ProjectProperties.
type ProjectProperties struct {
Root string `protobuf:"bytes,1,opt,name=root" json:"root,omitempty"`
Source string `protobuf:"bytes,2,opt,name=source" json:"source,omitempty"`
Constraint *Constraint `protobuf:"bytes,3,opt,name=constraint" json:"constraint,omitempty"`
}
func (m *ProjectProperties) Reset() { *m = ProjectProperties{} }
func (m *ProjectProperties) String() string { return proto.CompactTextString(m) }
func (*ProjectProperties) ProtoMessage() {}
func (*ProjectProperties) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
func (m *ProjectProperties) GetRoot() string {
if m != nil {
return m.Root
}
return ""
}
func (m *ProjectProperties) GetSource() string {
if m != nil {
return m.Source
}
return ""
}
func (m *ProjectProperties) GetConstraint() *Constraint {
if m != nil {
return m.Constraint
}
return nil
}
// LockedProject is a serializable representation of gps.LockedProject.
type LockedProject struct {
Root string `protobuf:"bytes,1,opt,name=root" json:"root,omitempty"`
Source string `protobuf:"bytes,2,opt,name=source" json:"source,omitempty"`
UnpairedVersion *Constraint `protobuf:"bytes,3,opt,name=unpairedVersion" json:"unpairedVersion,omitempty"`
Revision string `protobuf:"bytes,4,opt,name=revision" json:"revision,omitempty"`
Packages []string `protobuf:"bytes,5,rep,name=packages" json:"packages,omitempty"`
}
func (m *LockedProject) Reset() { *m = LockedProject{} }
func (m *LockedProject) String() string { return proto.CompactTextString(m) }
func (*LockedProject) ProtoMessage() {}
func (*LockedProject) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
func (m *LockedProject) GetRoot() string {
if m != nil {
return m.Root
}
return ""
}
func (m *LockedProject) GetSource() string {
if m != nil {
return m.Source
}
return ""
}
func (m *LockedProject) GetUnpairedVersion() *Constraint {
if m != nil {
return m.UnpairedVersion
}
return nil
}
func (m *LockedProject) GetRevision() string {
if m != nil {
return m.Revision
}
return ""
}
func (m *LockedProject) GetPackages() []string {
if m != nil {
return m.Packages
}
return nil
}
func init() {
proto.RegisterType((*Constraint)(nil), "pb.Constraint")
proto.RegisterType((*ProjectProperties)(nil), "pb.ProjectProperties")
proto.RegisterType((*LockedProject)(nil), "pb.LockedProject")
proto.RegisterEnum("pb.Constraint_Type", Constraint_Type_name, Constraint_Type_value)
}
func init() { proto.RegisterFile("source_cache.proto", fileDescriptor0) }
var fileDescriptor0 = []byte{
// 294 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x4f, 0x4f, 0xc2, 0x40,
0x14, 0xc4, 0x5d, 0x28, 0x08, 0x0f, 0x41, 0x78, 0x1a, 0xd3, 0x78, 0x6a, 0x7a, 0x91, 0x53, 0x0f,
0x78, 0xf1, 0xac, 0x1e, 0x39, 0x90, 0x6a, 0xbc, 0x9a, 0xed, 0xf2, 0x94, 0x0a, 0x76, 0x37, 0xaf,
0xdb, 0x26, 0x7c, 0x14, 0x3f, 0x84, 0xdf, 0xd1, 0x74, 0x59, 0xf1, 0x4f, 0xe2, 0xc1, 0x5b, 0xa7,
0xf3, 0xcb, 0xce, 0xcc, 0x2e, 0x60, 0xa9, 0x2b, 0x56, 0xf4, 0xa8, 0xa4, 0x5a, 0x51, 0x62, 0x58,
0x5b, 0x8d, 0x2d, 0x93, 0xc5, 0x6f, 0x02, 0xe0, 0x46, 0x17, 0xa5, 0x65, 0x99, 0x17, 0x16, 0x2f,
0x20, 0xb0, 0x5b, 0x43, 0xa1, 0x88, 0xc4, 0x74, 0x34, 0x3b, 0x49, 0x4c, 0x96, 0x7c, 0xb9, 0xc9,
0xfd, 0xd6, 0x50, 0xea, 0x00, 0x3c, 0x85, 0x4e, 0x2d, 0x37, 0x15, 0x85, 0xad, 0x48, 0x4c, 0xfb,
0xe9, 0x4e, 0xc4, 0x73, 0x08, 0x1a, 0x06, 0x8f, 0xa0, 0x97, 0x52, 0x9d, 0x97, 0xb9, 0x2e, 0xc6,
0x07, 0x08, 0xd0, 0xbd, 0x66, 0x59, 0xa8, 0xd5, 0x58, 0xe0, 0x04, 0x86, 0xb7, 0xf4, 0x24, 0xab,
0x8d, 0xf5, 0xbf, 0x5a, 0x38, 0x80, 0xc3, 0x07, 0x62, 0xc7, 0xb6, 0x1b, 0xf6, 0x8e, 0x5e, 0x6b,
0xe2, 0x71, 0x10, 0x6b, 0x98, 0x2c, 0x58, 0xbf, 0x90, 0xb2, 0x0b, 0xd6, 0x86, 0xd8, 0xe6, 0x54,
0x22, 0x42, 0xc0, 0x5a, 0x5b, 0xd7, 0xb0, 0x9f, 0xba, 0x6f, 0x3c, 0x83, 0xee, 0x6e, 0x9e, 0x6f,
0xe3, 0x15, 0x26, 0x00, 0x6a, 0xdf, 0x3e, 0x6c, 0x47, 0x62, 0x3a, 0x98, 0x8d, 0x7e, 0x6e, 0x4a,
0xbf, 0x11, 0xf1, 0xbb, 0x80, 0xe1, 0x5c, 0xab, 0x35, 0x2d, 0x7d, 0xee, 0xbf, 0xd2, 0xae, 0xe0,
0xb8, 0x2a, 0x8c, 0xcc, 0x99, 0x96, 0x7e, 0xcf, 0x1f, 0x91, 0xbf, 0x31, 0x3c, 0x87, 0x1e, 0xfb,
0xeb, 0x0a, 0x03, 0x77, 0xe6, 0x5e, 0x37, 0x9e, 0x91, 0x6a, 0x2d, 0x9f, 0xa9, 0x0c, 0x3b, 0x51,
0xbb, 0xf1, 0x3e, 0x75, 0xd6, 0x75, 0xef, 0x78, 0xf9, 0x11, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x52,
0x77, 0xb3, 0xdd, 0x01, 0x00, 0x00,
}
|
pb
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/internal/pb/pb.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package pb provides generated Protocol Buffers for cache serialization.
package pb
//go:generate protoc --go_out=. source_cache.proto
|
pb
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/internal/pb/source_cache.proto
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
syntax = "proto3";
package pb;
// Constraint is a serializable representation of a gps.Constraint or gps.UnpairedVersion.
message Constraint {
enum Type {
Revision = 0;
Branch = 1;
DefaultBranch = 2;
Version = 3;
Semver = 4;
}
Type type = 1;
string value = 2;
//TODO strongly typed Semver field
}
// ProjectProperties is a serializable representation of gps.ProjectRoot and gps.ProjectProperties.
message ProjectProperties {
string root = 1;
string source = 2;
Constraint constraint = 3;
}
// LockedProject is a serializable representation of gps.LockedProject.
message LockedProject {
string root = 1;
string source = 2;
Constraint unpairedVersion = 3;
string revision = 4;
repeated string packages = 5;
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/digest.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"bytes"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"fmt"
"hash"
"io"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/pkg/errors"
)
// HashVersion is an arbitrary number that identifies the hash algorithm used by
// the directory hasher.
//
// 1: SHA256, as implemented in crypto/sha256
const HashVersion = 1
const osPathSeparator = string(filepath.Separator)
// lineEndingReader is a `io.Reader` that converts CRLF sequences to LF.
//
// When cloning or checking out repositories, some Version Control Systems,
// VCSs, on some supported Go Operating System architectures, GOOS, will
// automatically convert line endings that end in a single line feed byte, LF,
// to line endings that end in a two byte sequence of carriage return, CR,
// followed by LF. This LF to CRLF conversion would cause otherwise identical
// versioned files to have different on disk contents simply based on which VCS
// and GOOS are involved. Different file contents for the same file would cause
// the resultant hashes to differ. In order to ensure file contents normalize
// and produce the same hash, this structure wraps an io.Reader that modifies
// the file's contents when it is read, translating all CRLF sequences to LF.
type lineEndingReader struct {
src io.Reader // source io.Reader from which this reads
prevReadEndedCR bool // used to track whether final byte of previous Read was CR
}
// newLineEndingReader returns a new lineEndingReader that reads from the
// specified source io.Reader.
func newLineEndingReader(src io.Reader) *lineEndingReader {
return &lineEndingReader{src: src}
}
var crlf = []byte("\r\n")
// Read consumes bytes from the structure's source io.Reader to fill the
// specified slice of bytes. It converts all CRLF byte sequences to LF, and
// handles cases where CR and LF straddle across two Read operations.
func (f *lineEndingReader) Read(buf []byte) (int, error) {
buflen := len(buf)
if f.prevReadEndedCR {
// Read one fewer bytes so we have room if the first byte of the
// upcoming Read is not a LF, in which case we will need to insert
// trailing CR from previous read.
buflen--
}
nr, er := f.src.Read(buf[:buflen])
if nr > 0 {
if f.prevReadEndedCR && buf[0] != '\n' {
// Having a CRLF split across two Read operations is rare, so the
// performance impact of copying entire buffer to the right by one
// byte, while suboptimal, will at least will not happen very
// often. This negative performance impact is mitigated somewhat on
// many Go compilation architectures, GOARCH, because the `copy`
// builtin uses a machine opcode for performing the memory copy on
// possibly overlapping regions of memory. This machine opcodes is
// not instantaneous and does require multiple CPU cycles to
// complete, but is significantly faster than the application
// looping through bytes.
copy(buf[1:nr+1], buf[:nr]) // shift data to right one byte
buf[0] = '\r' // insert the previous skipped CR byte at start of buf
nr++ // pretend we read one more byte
}
// Remove any CRLF sequences in the buffer using `bytes.Index` because,
// like the `copy` builtin on many GOARCHs, it also takes advantage of a
// machine opcode to search for byte patterns.
var searchOffset int // index within buffer from whence the search will commence for each loop; set to the index of the end of the previous loop.
var shiftCount int // each subsequenct shift operation needs to shift bytes to the left by one more position than the shift that preceded it.
previousIndex := -1 // index of previously found CRLF; -1 means no previous index
for {
index := bytes.Index(buf[searchOffset:nr], crlf)
if index == -1 {
break
}
index += searchOffset // convert relative index to absolute
if previousIndex != -1 {
// shift substring between previous index and this index
copy(buf[previousIndex-shiftCount:], buf[previousIndex+1:index])
shiftCount++ // next shift needs to be 1 byte to the left
}
previousIndex = index
searchOffset = index + 2 // start next search after len(crlf)
}
if previousIndex != -1 {
// handle final shift
copy(buf[previousIndex-shiftCount:], buf[previousIndex+1:nr])
shiftCount++
}
nr -= shiftCount // shorten byte read count by number of shifts executed
// When final byte from a read operation is CR, do not emit it until
// ensure first byte on next read is not LF.
if f.prevReadEndedCR = buf[nr-1] == '\r'; f.prevReadEndedCR {
nr-- // pretend byte was never read from source
}
} else if f.prevReadEndedCR {
// Reading from source returned nothing, but this struct is sitting on a
// trailing CR from previous Read, so let's give it to client now.
buf[0] = '\r'
nr = 1
er = nil
f.prevReadEndedCR = false // prevent infinite loop
}
return nr, er
}
// writeBytesWithNull appends the specified data to the specified hash, followed by
// the NULL byte, in order to make accidental hash collisions less likely.
func writeBytesWithNull(h hash.Hash, data []byte) {
// Ignore return values from writing to the hash, because hash write always
// returns nil error.
_, _ = h.Write(append(data, 0))
}
// dirWalkClosure is used to reduce number of allocation involved in closing
// over these variables.
type dirWalkClosure struct {
someCopyBufer []byte // allocate once and reuse for each file copy
someModeBytes []byte // allocate once and reuse for each node
someDirLen int
someHash hash.Hash
}
// DigestFromDirectory returns a hash of the specified directory contents, which
// will match the hash computed for any directory on any supported Go platform
// whose contents exactly match the specified directory.
//
// This function ignores any file system node named `vendor`, `.bzr`, `.git`,
// `.hg`, and `.svn`, as these are typically used as Version Control System
// (VCS) directories.
//
// Other than the `vendor` and VCS directories mentioned above, the calculated
// hash includes the pathname to every discovered file system node, whether it
// is an empty directory, a non-empty directory, an empty file, or a non-empty file.
//
// Symbolic links are excluded, as they are not considered valid elements in the
// definition of a Go module.
func DigestFromDirectory(osDirname string) (VersionedDigest, error) {
osDirname = filepath.Clean(osDirname)
// Create a single hash instance for the entire operation, rather than a new
// hash for each node we encounter.
closure := dirWalkClosure{
someCopyBufer: make([]byte, 4*1024), // only allocate a single page
someModeBytes: make([]byte, 4), // scratch place to store encoded os.FileMode (uint32)
someDirLen: len(osDirname) + len(osPathSeparator),
someHash: sha256.New(),
}
err := filepath.Walk(osDirname, func(osPathname string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Completely ignore symlinks.
if info.Mode()&os.ModeSymlink != 0 {
return nil
}
var osRelative string
if len(osPathname) > closure.someDirLen {
osRelative = osPathname[closure.someDirLen:]
}
switch filepath.Base(osRelative) {
case "vendor", ".bzr", ".git", ".hg", ".svn":
return filepath.SkipDir
}
// We could make our own enum-like data type for encoding the file type,
// but Go's runtime already gives us architecture independent file
// modes, as discussed in `os/types.go`:
//
// Go's runtime FileMode type has same definition on all systems, so
// that information about files can be moved from one system to
// another portably.
var mt os.FileMode
// We only care about the bits that identify the type of a file system
// node, and can ignore append, exclusive, temporary, setuid, setgid,
// permission bits, and sticky bits, which are coincident to bits which
// declare type of the file system node.
modeType := info.Mode() & os.ModeType
var shouldSkip bool // skip some types of file system nodes
switch {
case modeType&os.ModeDir > 0:
mt = os.ModeDir
// This func does not need to enumerate children, because
// filepath.Walk will do that for us.
shouldSkip = true
case modeType&os.ModeNamedPipe > 0:
mt = os.ModeNamedPipe
shouldSkip = true
case modeType&os.ModeSocket > 0:
mt = os.ModeSocket
shouldSkip = true
case modeType&os.ModeDevice > 0:
mt = os.ModeDevice
shouldSkip = true
}
// Write the relative pathname to hash because the hash is a function of
// the node names, node types, and node contents. Added benefit is that
// empty directories, named pipes, sockets, and devices. Use
// `filepath.ToSlash` to ensure relative pathname is os-agnostic.
writeBytesWithNull(closure.someHash, []byte(filepath.ToSlash(osRelative)))
binary.LittleEndian.PutUint32(closure.someModeBytes, uint32(mt)) // encode the type of mode
writeBytesWithNull(closure.someHash, closure.someModeBytes) // and write to hash
if shouldSkip {
return nil // nothing more to do for some of the node types
}
// If we get here, node is a regular file.
fh, err := os.Open(osPathname)
if err != nil {
return errors.Wrap(err, "cannot Open")
}
var bytesWritten int64
bytesWritten, err = io.CopyBuffer(closure.someHash, newLineEndingReader(fh), closure.someCopyBufer) // fast copy of file contents to hash
err = errors.Wrap(err, "cannot Copy") // errors.Wrap only wraps non-nil, so skip extra check
writeBytesWithNull(closure.someHash, []byte(strconv.FormatInt(bytesWritten, 10))) // 10: format file size as base 10 integer
// Close the file handle to the open file without masking
// possible previous error value.
if er := fh.Close(); err == nil {
err = errors.Wrap(er, "cannot Close")
}
return err
})
if err != nil {
return VersionedDigest{}, err
}
return VersionedDigest{
HashVersion: HashVersion,
Digest: closure.someHash.Sum(nil),
}, nil
}
// VendorStatus represents one of a handful of possible status conditions for a
// particular file system node in the vendor directory tree.
type VendorStatus uint8
const (
// NotInLock is used when a file system node exists for which there is no
// corresponding dependency in the lock file.
NotInLock VendorStatus = iota
// NotInTree is used when a lock file dependency exists for which there is
// no corresponding file system node.
NotInTree
// NoMismatch is used when the digest for a dependency listed in the
// lockfile matches what is calculated from the file system.
NoMismatch
// EmptyDigestInLock is used when the digest for a dependency listed in the
// lock file is the empty string. While this is a special case of
// DigestMismatchInLock, separating the cases is a desired feature.
EmptyDigestInLock
// DigestMismatchInLock is used when the digest for a dependency listed in
// the lock file does not match what is calculated from the file system.
DigestMismatchInLock
// HashVersionMismatch indicates that the hashing algorithm used to generate
// the digest being compared against is not the same as the one used by the
// current program.
HashVersionMismatch
)
func (ls VendorStatus) String() string {
switch ls {
case NotInLock:
return "not in lock"
case NotInTree:
return "not in tree"
case NoMismatch:
return "match"
case EmptyDigestInLock:
return "empty digest in lock"
case DigestMismatchInLock:
return "mismatch"
case HashVersionMismatch:
return "hasher changed"
}
return "unknown"
}
// fsnode is used to track which file system nodes are required by the lock
// file. When a directory is found whose name matches one of the declared
// projects in the lock file, e.g., "github.com/alice/alice1", an fsnode is
// created for that directory, but not for any of its children. All other file
// system nodes encountered will result in a fsnode created to represent it.
type fsnode struct {
osRelative string // os-specific relative path of a resource under vendor root
isRequiredAncestor bool // true iff this node or one of its descendants is in the lock file
myIndex, parentIndex int // index of this node and its parent in the tree's slice
}
// VersionedDigest comprises both a hash digest, and a simple integer indicating
// the version of the hash algorithm that produced the digest.
type VersionedDigest struct {
HashVersion int
Digest []byte
}
func (vd VersionedDigest) String() string {
return fmt.Sprintf("%s:%s", strconv.Itoa(vd.HashVersion), hex.EncodeToString(vd.Digest))
}
// IsEmpty indicates if the VersionedDigest is the zero value.
func (vd VersionedDigest) IsEmpty() bool {
return vd.HashVersion == 0 && len(vd.Digest) == 0
}
// ParseVersionedDigest decodes the string representation of versioned digest
// information - a colon-separated string with a version number in the first
// part and the hex-encdoed hash digest in the second - as a VersionedDigest.
func ParseVersionedDigest(input string) (VersionedDigest, error) {
var vd VersionedDigest
var err error
parts := strings.Split(input, ":")
if len(parts) != 2 {
return VersionedDigest{}, errors.Errorf("expected two colon-separated components in the versioned hash digest, got %q", input)
}
if vd.Digest, err = hex.DecodeString(parts[1]); err != nil {
return VersionedDigest{}, err
}
if vd.HashVersion, err = strconv.Atoi(parts[0]); err != nil {
return VersionedDigest{}, err
}
return vd, nil
}
// CheckDepTree verifies a dependency tree according to expected digest sums,
// and returns an associative array of file system nodes and their respective
// vendor status conditions.
//
// The keys to the expected digest sums associative array represent the
// project's dependencies, and each is required to be expressed using the
// solidus character, `/`, as its path separator. For example, even on a GOOS
// platform where the file system path separator is a character other than
// solidus, one particular dependency would be represented as
// "github.com/alice/alice1".
func CheckDepTree(osDirname string, wantDigests map[string]VersionedDigest) (map[string]VendorStatus, error) {
osDirname = filepath.Clean(osDirname)
// Create associative array to store the results of calling this function.
slashStatus := make(map[string]VendorStatus)
// Ensure top level pathname is a directory
fi, err := os.Stat(osDirname)
if err != nil {
// If the dir doesn't exist at all, that's OK - just consider all the
// wanted paths absent.
if os.IsNotExist(err) {
for path := range wantDigests {
slashStatus[path] = NotInTree
}
return slashStatus, nil
}
return nil, errors.Wrap(err, "cannot Stat")
}
if !fi.IsDir() {
return nil, errors.Errorf("cannot verify non directory: %q", osDirname)
}
// Initialize work queue with a node representing the specified directory
// name by declaring its relative pathname under the directory name as the
// empty string.
currentNode := &fsnode{osRelative: "", parentIndex: -1, isRequiredAncestor: true}
queue := []*fsnode{currentNode} // queue of directories that must be inspected
// In order to identify all file system nodes that are not in the lock file,
// represented by the specified expected sums parameter, and in order to
// only report the top level of a subdirectory of file system nodes, rather
// than every node internal to them, we will create a tree of nodes stored
// in a slice. We do this because we cannot predict the depth at which
// project roots occur. Some projects are fewer than and some projects more
// than the typical three layer subdirectory under the vendor root
// directory.
//
// For a following few examples, assume the below vendor root directory:
//
// github.com/alice/alice1/a1.go
// github.com/alice/alice2/a2.go
// github.com/bob/bob1/b1.go
// github.com/bob/bob2/b2.go
// launchpad.net/nifty/n1.go
//
// 1) If only the `alice1` and `alice2` projects were in the lock file, we'd
// prefer the output to state that `github.com/bob` is `NotInLock`, and
// `launchpad.net/nifty` is `NotInLock`.
//
// 2) If `alice1`, `alice2`, and `bob1` were in the lock file, we'd want to
// report `github.com/bob/bob2` as `NotInLock`, and `launchpad.net/nifty` is
// `NotInLock`.
//
// 3) If none of `alice1`, `alice2`, `bob1`, or `bob2` were in the lock
// file, the entire `github.com` directory would be reported as `NotInLock`,
// along with `launchpad.net/nifty` is `NotInLock`.
//
// Each node in our tree has the slice index of its parent node, so once we
// can categorically state a particular directory is required because it is
// in the lock file, we can mark all of its ancestors as also being
// required. Then, when we finish walking the directory hierarchy, any nodes
// which are not required but have a required parent will be marked as
// `NotInLock`.
nodes := []*fsnode{currentNode}
// Mark directories of expected projects as required. When each respective
// project is later found while traversing the vendor root hierarchy, its
// status will be updated to reflect whether its digest is empty, or,
// whether or not it matches the expected digest.
for slashPathname := range wantDigests {
slashStatus[slashPathname] = NotInTree
}
for len(queue) > 0 {
// Pop node from the top of queue (depth first traversal, reverse
// lexicographical order inside a directory), clearing the value stored
// in the slice's backing array as we proceed.
lq1 := len(queue) - 1
currentNode, queue[lq1], queue = queue[lq1], nil, queue[:lq1]
slashPathname := filepath.ToSlash(currentNode.osRelative)
osPathname := filepath.Join(osDirname, currentNode.osRelative)
if expectedSum, ok := wantDigests[slashPathname]; ok {
ls := EmptyDigestInLock
if expectedSum.HashVersion != HashVersion {
if !expectedSum.IsEmpty() {
ls = HashVersionMismatch
}
} else if len(expectedSum.Digest) > 0 {
projectSum, err := DigestFromDirectory(osPathname)
if err != nil {
return nil, errors.Wrap(err, "cannot compute dependency hash")
}
if bytes.Equal(projectSum.Digest, expectedSum.Digest) {
ls = NoMismatch
} else {
ls = DigestMismatchInLock
}
}
slashStatus[slashPathname] = ls
// Mark current nodes and all its parents as required.
for i := currentNode.myIndex; i != -1; i = nodes[i].parentIndex {
nodes[i].isRequiredAncestor = true
}
// Do not need to process this directory's contents because we
// already accounted for its contents while calculating its digest.
continue
}
osChildrenNames, err := sortedChildrenFromDirname(osPathname)
if err != nil {
return nil, errors.Wrap(err, "cannot get sorted list of directory children")
}
for _, osChildName := range osChildrenNames {
switch osChildName {
case ".", "..", "vendor", ".bzr", ".git", ".hg", ".svn":
// skip
default:
osChildRelative := filepath.Join(currentNode.osRelative, osChildName)
osChildPathname := filepath.Join(osDirname, osChildRelative)
// Create a new fsnode for this file system node, with a parent
// index set to the index of the current node.
otherNode := &fsnode{osRelative: osChildRelative, myIndex: len(nodes), parentIndex: currentNode.myIndex}
fi, err := os.Stat(osChildPathname)
if err != nil {
return nil, errors.Wrap(err, "cannot Stat")
}
nodes = append(nodes, otherNode) // Track all file system nodes...
if fi.IsDir() {
queue = append(queue, otherNode) // but only need to add directories to the work queue.
}
}
}
}
// Ignoring first node in the list, walk nodes from last to first. Whenever
// the current node is not required, but its parent is required, then the
// current node ought to be marked as `NotInLock`.
for len(nodes) > 1 {
// Pop node from top of queue, clearing the value stored in the slice's
// backing array as we proceed.
ln1 := len(nodes) - 1
currentNode, nodes[ln1], nodes = nodes[ln1], nil, nodes[:ln1]
if !currentNode.isRequiredAncestor && nodes[currentNode.parentIndex].isRequiredAncestor {
slashStatus[filepath.ToSlash(currentNode.osRelative)] = NotInLock
}
}
currentNode, nodes = nil, nil
return slashStatus, nil
}
// sortedChildrenFromDirname returns a lexicographically sorted list of child
// nodes for the specified directory.
func sortedChildrenFromDirname(osDirname string) ([]string, error) {
fh, err := os.Open(osDirname)
if err != nil {
return nil, errors.Wrap(err, "cannot Open")
}
osChildrenNames, err := fh.Readdirnames(0) // 0: read names of all children
if err != nil {
return nil, errors.Wrap(err, "cannot Readdirnames")
}
sort.Strings(osChildrenNames)
// Close the file handle to the open directory without masking possible
// previous error value.
if er := fh.Close(); err == nil {
err = errors.Wrap(er, "cannot Close")
}
return osChildrenNames, err
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/helper_types_test.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/pkgtree"
)
// mkPI creates a ProjectIdentifier with the ProjectRoot as the provided
// string, and the Source unset.
//
// Call normalize() on the returned value if you need the Source to be be
// equal to the ProjectRoot.
func mkPI(root string) gps.ProjectIdentifier {
return gps.ProjectIdentifier{
ProjectRoot: gps.ProjectRoot(root),
}
}
type safeLock struct {
p []gps.LockedProject
i []string
}
func (sl safeLock) InputImports() []string {
return sl.i
}
func (sl safeLock) Projects() []gps.LockedProject {
return sl.p
}
func (sl safeLock) dup() safeLock {
sl2 := safeLock{
i: make([]string, len(sl.i)),
p: make([]gps.LockedProject, 0, len(sl.p)),
}
copy(sl2.i, sl.i)
for _, lp := range sl.p {
// Only for use with VerifiableProjects.
sl2.p = append(sl2.p, lp.(VerifiableProject).dup())
}
return sl2
}
func (vp VerifiableProject) dup() VerifiableProject {
pkglist := make([]string, len(vp.Packages()))
copy(pkglist, vp.Packages())
hashbytes := make([]byte, len(vp.Digest.Digest))
copy(hashbytes, vp.Digest.Digest)
return VerifiableProject{
LockedProject: gps.NewLockedProject(vp.Ident(), vp.Version(), pkglist),
PruneOpts: vp.PruneOpts,
Digest: VersionedDigest{
HashVersion: vp.Digest.HashVersion,
Digest: hashbytes,
},
}
}
// simpleRootManifest exists so that we have a safe value to swap into solver
// params when a nil Manifest is provided.
type simpleRootManifest struct {
c, ovr gps.ProjectConstraints
ig *pkgtree.IgnoredRuleset
req map[string]bool
}
func (m simpleRootManifest) DependencyConstraints() gps.ProjectConstraints {
return m.c
}
func (m simpleRootManifest) Overrides() gps.ProjectConstraints {
return m.ovr
}
func (m simpleRootManifest) IgnoredPackages() *pkgtree.IgnoredRuleset {
return m.ig
}
func (m simpleRootManifest) RequiredPackages() map[string]bool {
return m.req
}
func (m simpleRootManifest) dup() simpleRootManifest {
m2 := simpleRootManifest{
c: make(gps.ProjectConstraints),
ovr: make(gps.ProjectConstraints),
ig: pkgtree.NewIgnoredRuleset(m.ig.ToSlice()),
req: make(map[string]bool),
}
for k, v := range m.c {
m2.c[k] = v
}
for k, v := range m.ovr {
m2.ovr[k] = v
}
for k := range m.req {
m2.req[k] = true
}
return m2
}
func newVerifiableProject(id gps.ProjectIdentifier, v gps.Version, pkgs []string) VerifiableProject {
return VerifiableProject{
LockedProject: gps.NewLockedProject(id, v, pkgs),
Digest: VersionedDigest{
HashVersion: HashVersion,
Digest: []byte("something"),
},
}
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/locksat_test.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"strings"
"testing"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/pkgtree"
)
type lockUnsatisfactionDimension uint8
const (
noLock lockUnsatisfactionDimension = 1 << iota
missingImports
excessImports
unmatchedOverrides
unmatchedConstraints
)
func (lsd lockUnsatisfactionDimension) String() string {
var parts []string
for i := uint(0); i < 5; i++ {
if lsd&(1<<i) != 0 {
switch lsd {
case noLock:
parts = append(parts, "no lock")
case missingImports:
parts = append(parts, "missing imports")
case excessImports:
parts = append(parts, "excess imports")
case unmatchedOverrides:
parts = append(parts, "unmatched overrides")
case unmatchedConstraints:
parts = append(parts, "unmatched constraints")
}
}
}
return strings.Join(parts, ", ")
}
func TestLockSatisfaction(t *testing.T) {
fooversion := gps.NewVersion("v1.0.0").Pair("foorev1")
bazversion := gps.NewVersion("v2.0.0").Pair("bazrev1")
transver := gps.NewVersion("v0.5.0").Pair("transrev1")
l := safeLock{
i: []string{"foo.com/bar", "baz.com/qux"},
p: []gps.LockedProject{
newVerifiableProject(mkPI("foo.com/bar"), fooversion, []string{".", "subpkg"}),
newVerifiableProject(mkPI("baz.com/qux"), bazversion, []string{".", "other"}),
newVerifiableProject(mkPI("transitive.com/dependency"), transver, []string{"."}),
},
}
ptree := pkgtree.PackageTree{
ImportRoot: "current",
Packages: map[string]pkgtree.PackageOrErr{
"current": {
P: pkgtree.Package{
Name: "current",
ImportPath: "current",
Imports: []string{"foo.com/bar"},
},
},
},
}
rm := simpleRootManifest{
req: map[string]bool{
"baz.com/qux": true,
},
}
var dup rootManifestTransformer = func(simpleRootManifest) simpleRootManifest {
return rm.dup()
}
tt := map[string]struct {
rmt rootManifestTransformer
sat lockUnsatisfactionDimension
checkfn func(*testing.T, LockSatisfaction)
}{
"ident": {
rmt: dup,
},
"added import": {
rmt: dup.addReq("fiz.com/wow"),
sat: missingImports,
},
"removed import": {
rmt: dup.rmReq("baz.com/qux"),
sat: excessImports,
},
"added and removed import": {
rmt: dup.rmReq("baz.com/qux").addReq("fiz.com/wow"),
sat: excessImports | missingImports,
checkfn: func(t *testing.T, lsat LockSatisfaction) {
if lsat.MissingImports[0] != "fiz.com/wow" {
t.Errorf("expected 'fiz.com/wow' as sole missing import, got %s", lsat.MissingImports)
}
if lsat.ExcessImports[0] != "baz.com/qux" {
t.Errorf("expected 'baz.com/qux' as sole excess import, got %s", lsat.ExcessImports)
}
},
},
"acceptable constraint": {
rmt: dup.setConstraint("baz.com/qux", bazversion.Unpair(), ""),
},
"unacceptable constraint": {
rmt: dup.setConstraint("baz.com/qux", fooversion.Unpair(), ""),
sat: unmatchedConstraints,
checkfn: func(t *testing.T, lsat LockSatisfaction) {
pr := gps.ProjectRoot("baz.com/qux")
unmet, has := lsat.UnmetConstraints[pr]
if !has {
t.Errorf("did not have constraint on expected project %q; map contents: %s", pr, lsat.UnmetConstraints)
}
if unmet.C != fooversion.Unpair() {
t.Errorf("wanted %s for unmet constraint, got %s", fooversion.Unpair(), unmet.C)
}
if unmet.V != bazversion {
t.Errorf("wanted %s for version that did not meet constraint, got %s", bazversion, unmet.V)
}
},
},
"acceptable override": {
rmt: dup.setOverride("baz.com/qux", bazversion.Unpair(), ""),
},
"unacceptable override": {
rmt: dup.setOverride("baz.com/qux", fooversion.Unpair(), ""),
sat: unmatchedOverrides,
},
"ineffectual constraint": {
rmt: dup.setConstraint("transitive.com/dependency", bazversion.Unpair(), ""),
},
"transitive override": {
rmt: dup.setOverride("transitive.com/dependency", bazversion.Unpair(), ""),
sat: unmatchedOverrides,
},
"ignores respected": {
rmt: dup.addIgnore("foo.com/bar"),
sat: excessImports,
},
}
for name, fix := range tt {
fix := fix
t.Run(name, func(t *testing.T) {
fixrm := fix.rmt(rm)
lsat := LockSatisfiesInputs(l, fixrm, ptree)
gotsat := lsat.unsatTypes()
if fix.sat & ^gotsat != 0 {
t.Errorf("wanted unsat in some dimensions that were satisfied: %s", fix.sat & ^gotsat)
}
if gotsat & ^fix.sat != 0 {
t.Errorf("wanted sat in some dimensions that were unsatisfied: %s", gotsat & ^fix.sat)
}
if lsat.Satisfied() && fix.sat != 0 {
t.Errorf("Satisfied() incorrectly reporting true when expecting some dimensions to be unsatisfied: %s", fix.sat)
} else if !lsat.Satisfied() && fix.sat == 0 {
t.Error("Satisfied() incorrectly reporting false when expecting all dimensions to be satisfied")
}
if fix.checkfn != nil {
fix.checkfn(t, lsat)
}
})
}
var lsat LockSatisfaction
if lsat.Satisfied() {
t.Error("zero value of LockSatisfaction should fail")
}
if LockSatisfiesInputs(nil, nil, ptree).Satisfied() {
t.Error("nil lock to LockSatisfiesInputs should produce failing result")
}
}
func (ls LockSatisfaction) unsatTypes() lockUnsatisfactionDimension {
var dims lockUnsatisfactionDimension
if !ls.LockExisted {
dims |= noLock
}
if len(ls.MissingImports) != 0 {
dims |= missingImports
}
if len(ls.ExcessImports) != 0 {
dims |= excessImports
}
if len(ls.UnmetOverrides) != 0 {
dims |= unmatchedOverrides
}
if len(ls.UnmetConstraints) != 0 {
dims |= unmatchedConstraints
}
return dims
}
type rootManifestTransformer func(simpleRootManifest) simpleRootManifest
func (rmt rootManifestTransformer) compose(rmt2 rootManifestTransformer) rootManifestTransformer {
if rmt == nil {
return rmt2
}
return func(rm simpleRootManifest) simpleRootManifest {
return rmt2(rmt(rm))
}
}
func (rmt rootManifestTransformer) addReq(path string) rootManifestTransformer {
return rmt.compose(func(rm simpleRootManifest) simpleRootManifest {
rm.req[path] = true
return rm
})
}
func (rmt rootManifestTransformer) rmReq(path string) rootManifestTransformer {
return rmt.compose(func(rm simpleRootManifest) simpleRootManifest {
delete(rm.req, path)
return rm
})
}
func (rmt rootManifestTransformer) setConstraint(pr string, c gps.Constraint, source string) rootManifestTransformer {
return rmt.compose(func(rm simpleRootManifest) simpleRootManifest {
rm.c[gps.ProjectRoot(pr)] = gps.ProjectProperties{
Constraint: c,
Source: source,
}
return rm
})
}
func (rmt rootManifestTransformer) setOverride(pr string, c gps.Constraint, source string) rootManifestTransformer {
return rmt.compose(func(rm simpleRootManifest) simpleRootManifest {
rm.ovr[gps.ProjectRoot(pr)] = gps.ProjectProperties{
Constraint: c,
Source: source,
}
return rm
})
}
func (rmt rootManifestTransformer) addIgnore(path string) rootManifestTransformer {
return rmt.compose(func(rm simpleRootManifest) simpleRootManifest {
rm.ig = pkgtree.NewIgnoredRuleset(append(rm.ig.ToSlice(), path))
return rm
})
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/locksat.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
radix "github.com/armon/go-radix"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/paths"
"github.com/golang/dep/gps/pkgtree"
)
// LockSatisfaction holds the compound result of LockSatisfiesInputs, allowing
// the caller to inspect each of several orthogonal possible types of failure.
//
// The zero value assumes that there was no input lock, which necessarily means
// the inputs were not satisfied. This zero value means we err on the side of
// failure.
type LockSatisfaction struct {
// If LockExisted is false, it indicates that a nil gps.Lock was passed to
// LockSatisfiesInputs().
LockExisted bool
// MissingImports is the set of import paths that were present in the
// inputs but missing in the Lock.
MissingImports []string
// ExcessImports is the set of import paths that were present in the Lock
// but absent from the inputs.
ExcessImports []string
// UnmatchedConstraints reports any normal, non-override constraint rules that
// were not satisfied by the corresponding LockedProject in the Lock.
UnmetConstraints map[gps.ProjectRoot]ConstraintMismatch
// UnmatchedOverrides reports any override rules that were not satisfied by the
// corresponding LockedProject in the Lock.
UnmetOverrides map[gps.ProjectRoot]ConstraintMismatch
}
// ConstraintMismatch is a two-tuple of a gps.Version, and a gps.Constraint that
// does not allow that version.
type ConstraintMismatch struct {
C gps.Constraint
V gps.Version
}
// LockSatisfiesInputs determines whether the provided Lock satisfies all the
// requirements indicated by the inputs (RootManifest and PackageTree).
//
// The second parameter is expected to be the list of imports that were used to
// generate the input Lock. Without this explicit list, it is not possible to
// compute package imports that may have been removed. Figuring out that
// negative space would require exploring the entire graph to ensure there are
// no in-edges for particular imports.
func LockSatisfiesInputs(l gps.Lock, m gps.RootManifest, ptree pkgtree.PackageTree) LockSatisfaction {
if l == nil {
return LockSatisfaction{}
}
lsat := LockSatisfaction{
LockExisted: true,
UnmetOverrides: make(map[gps.ProjectRoot]ConstraintMismatch),
UnmetConstraints: make(map[gps.ProjectRoot]ConstraintMismatch),
}
var ig *pkgtree.IgnoredRuleset
var req map[string]bool
if m != nil {
ig = m.IgnoredPackages()
req = m.RequiredPackages()
}
rm, _ := ptree.ToReachMap(true, true, false, ig)
reach := rm.FlattenFn(paths.IsStandardImportPath)
inlock := make(map[string]bool, len(l.InputImports()))
ininputs := make(map[string]bool, len(reach)+len(req))
type lockUnsatisfy uint8
const (
missingFromLock lockUnsatisfy = iota
inAdditionToLock
)
pkgDiff := make(map[string]lockUnsatisfy)
for _, imp := range reach {
ininputs[imp] = true
}
for imp := range req {
ininputs[imp] = true
}
for _, imp := range l.InputImports() {
inlock[imp] = true
}
for ip := range ininputs {
if !inlock[ip] {
pkgDiff[ip] = missingFromLock
} else {
// So we don't have to revisit it below
delete(inlock, ip)
}
}
// Something in the missing list might already be in the packages list,
// because another package in the depgraph imports it. We could make a
// special case for that, but it would break the simplicity of the model and
// complicate the notion of LockSatisfaction.Passed(), so let's see if we
// can get away without it.
for ip := range inlock {
if !ininputs[ip] {
pkgDiff[ip] = inAdditionToLock
}
}
for ip, typ := range pkgDiff {
if typ == missingFromLock {
lsat.MissingImports = append(lsat.MissingImports, ip)
} else {
lsat.ExcessImports = append(lsat.ExcessImports, ip)
}
}
eff := findEffectualConstraints(m, ininputs)
ovr, constraints := m.Overrides(), m.DependencyConstraints()
for _, lp := range l.Projects() {
pr := lp.Ident().ProjectRoot
if pp, has := ovr[pr]; has {
if !pp.Constraint.Matches(lp.Version()) {
lsat.UnmetOverrides[pr] = ConstraintMismatch{
C: pp.Constraint,
V: lp.Version(),
}
}
// The constraint isn't considered if we have an override,
// independent of whether the override is satisfied.
continue
}
if pp, has := constraints[pr]; has && eff[string(pr)] && !pp.Constraint.Matches(lp.Version()) {
lsat.UnmetConstraints[pr] = ConstraintMismatch{
C: pp.Constraint,
V: lp.Version(),
}
}
}
return lsat
}
// Satisfied is a shortcut method that indicates whether there were any ways in
// which the Lock did not satisfy the inputs. It will return true only if the
// Lock was satisfactory in all respects vis-a-vis the inputs.
func (ls LockSatisfaction) Satisfied() bool {
if !ls.LockExisted {
return false
}
if len(ls.MissingImports) > 0 {
return false
}
if len(ls.ExcessImports) > 0 {
return false
}
if len(ls.UnmetOverrides) > 0 {
return false
}
if len(ls.UnmetConstraints) > 0 {
return false
}
return true
}
func findEffectualConstraints(m gps.Manifest, imports map[string]bool) map[string]bool {
eff := make(map[string]bool)
xt := radix.New()
for pr := range m.DependencyConstraints() {
// FIXME(sdboyer) this has the trailing slash ambiguity problem; adapt
// code from the solver
xt.Insert(string(pr), nil)
}
for imp := range imports {
if root, _, has := xt.LongestPrefix(imp); has {
eff[root] = true
}
}
return eff
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/lock.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"github.com/golang/dep/gps"
)
// VerifiableProject composes a LockedProject to indicate what the hash digest
// of a file tree for that LockedProject should be, given the PruneOptions and
// the list of packages.
type VerifiableProject struct {
gps.LockedProject
PruneOpts gps.PruneOptions
Digest VersionedDigest
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/lockdiff_test.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"fmt"
"math/bits"
"strings"
"testing"
"github.com/golang/dep/gps"
)
func contains(haystack []string, needle string) bool {
for _, str := range haystack {
if str == needle {
return true
}
}
return false
}
func (dd DeltaDimension) String() string {
var parts []string
for dd != 0 {
index := bits.TrailingZeros32(uint32(dd))
dd &= ^(1 << uint(index))
switch DeltaDimension(1 << uint(index)) {
case InputImportsChanged:
parts = append(parts, "input imports")
case ProjectAdded:
parts = append(parts, "project added")
case ProjectRemoved:
parts = append(parts, "project removed")
case SourceChanged:
parts = append(parts, "source changed")
case VersionChanged:
parts = append(parts, "version changed")
case RevisionChanged:
parts = append(parts, "revision changed")
case PackagesChanged:
parts = append(parts, "packages changed")
case PruneOptsChanged:
parts = append(parts, "pruneopts changed")
case HashVersionChanged:
parts = append(parts, "hash version changed")
case HashChanged:
parts = append(parts, "hash digest changed")
}
}
return strings.Join(parts, ", ")
}
func TestLockDelta(t *testing.T) {
fooversion := gps.NewVersion("v1.0.0").Pair("foorev1")
bazversion := gps.NewVersion("v2.0.0").Pair("bazrev1")
transver := gps.NewVersion("v0.5.0").Pair("transrev1")
l := safeLock{
i: []string{"foo.com/bar", "baz.com/qux"},
p: []gps.LockedProject{
newVerifiableProject(mkPI("foo.com/bar"), fooversion, []string{".", "subpkg"}),
newVerifiableProject(mkPI("baz.com/qux"), bazversion, []string{".", "other"}),
newVerifiableProject(mkPI("transitive.com/dependency"), transver, []string{"."}),
},
}
var dup lockTransformer = func(l safeLock) safeLock {
return l.dup()
}
tt := map[string]struct {
lt lockTransformer
delta DeltaDimension
checkfn func(*testing.T, LockDelta)
}{
"ident": {
lt: dup,
},
"added import": {
lt: dup.addII("other.org"),
delta: InputImportsChanged,
},
"added import 2x": {
lt: dup.addII("other.org").addII("andsomethingelse.com/wowie"),
delta: InputImportsChanged,
checkfn: func(t *testing.T, ld LockDelta) {
if !contains(ld.AddedImportInputs, "other.org") {
t.Error("first added input import missing")
}
if !contains(ld.AddedImportInputs, "andsomethingelse.com/wowie") {
t.Error("first added input import missing")
}
},
},
"removed import": {
lt: dup.rmII("baz.com/qux"),
delta: InputImportsChanged,
checkfn: func(t *testing.T, ld LockDelta) {
if !contains(ld.RemovedImportInputs, "baz.com/qux") {
t.Error("removed input import missing")
}
},
},
"add project": {
lt: dup.addDumbProject("madeup.org"),
delta: ProjectAdded,
},
"remove project": {
lt: dup.rmProject("foo.com/bar"),
delta: ProjectRemoved,
},
"remove last project": {
lt: dup.rmProject("transitive.com/dependency"),
delta: ProjectRemoved,
},
"all": {
lt: dup.addII("other.org").rmII("baz.com/qux").addDumbProject("zebrafun.org").rmProject("foo.com/bar"),
delta: InputImportsChanged | ProjectRemoved | ProjectAdded,
},
"remove all projects and imports": {
lt: dup.rmII("baz.com/qux").rmII("foo.com/bar").rmProject("baz.com/qux").rmProject("foo.com/bar").rmProject("transitive.com/dependency"),
delta: InputImportsChanged | ProjectRemoved,
},
}
for name, fix := range tt {
fix := fix
t.Run(name, func(t *testing.T) {
fixl := fix.lt(l)
ld := DiffLocks(l, fixl)
if !ld.Changed(AnyChanged) && fix.delta != 0 {
t.Errorf("Changed() reported false when expecting some dimensions to be changed: %s", fix.delta)
} else if ld.Changed(AnyChanged) && fix.delta == 0 {
t.Error("Changed() reported true when expecting no changes")
}
if ld.Changed(AnyChanged & ^fix.delta) {
t.Errorf("Changed() reported true when checking along not-expected dimensions: %s", ld.Changes() & ^fix.delta)
}
gotdelta := ld.Changes()
if fix.delta & ^gotdelta != 0 {
t.Errorf("wanted change in some dimensions that were unchanged: %s", fix.delta & ^gotdelta)
}
if gotdelta & ^fix.delta != 0 {
t.Errorf("did not want change in some dimensions that were changed: %s", gotdelta & ^fix.delta)
}
if fix.checkfn != nil {
fix.checkfn(t, ld)
}
})
}
}
func TestLockedProjectPropertiesDelta(t *testing.T) {
fooversion, foorev := gps.NewVersion("v1.0.0"), gps.Revision("foorev1")
foopair := fooversion.Pair(foorev)
foovp := VerifiableProject{
LockedProject: gps.NewLockedProject(mkPI("foo.com/project"), foopair, []string{".", "subpkg"}),
PruneOpts: gps.PruneNestedVendorDirs,
Digest: VersionedDigest{
HashVersion: HashVersion,
Digest: []byte("foobytes"),
},
}
var dup lockedProjectTransformer = func(lp gps.LockedProject) gps.LockedProject {
return lp.(VerifiableProject).dup()
}
tt := map[string]struct {
lt1, lt2 lockedProjectTransformer
delta DeltaDimension
checkfn func(*testing.T, LockedProjectPropertiesDelta)
}{
"ident": {
lt1: dup,
},
"add pkg": {
lt1: dup.addPkg("whatev"),
delta: PackagesChanged,
},
"rm pkg": {
lt1: dup.rmPkg("subpkg"),
delta: PackagesChanged,
},
"add and rm pkg": {
lt1: dup.rmPkg("subpkg").addPkg("whatev"),
delta: PackagesChanged,
checkfn: func(t *testing.T, ld LockedProjectPropertiesDelta) {
if !contains(ld.PackagesAdded, "whatev") {
t.Error("added pkg missing from list")
}
if !contains(ld.PackagesRemoved, "subpkg") {
t.Error("removed pkg missing from list")
}
},
},
"add source": {
lt1: dup.setSource("somethingelse"),
delta: SourceChanged,
},
"remove source": {
lt1: dup.setSource("somethingelse"),
lt2: dup,
delta: SourceChanged,
},
"to rev only": {
lt1: dup.setVersion(foorev),
delta: VersionChanged,
},
"from rev only": {
lt1: dup.setVersion(foorev),
lt2: dup,
delta: VersionChanged,
},
"to new rev only": {
lt1: dup.setVersion(gps.Revision("newrev")),
delta: VersionChanged | RevisionChanged,
},
"from new rev only": {
lt1: dup.setVersion(gps.Revision("newrev")),
lt2: dup,
delta: VersionChanged | RevisionChanged,
},
"version change": {
lt1: dup.setVersion(gps.NewVersion("v0.5.0").Pair(foorev)),
delta: VersionChanged,
},
"version change to norev": {
lt1: dup.setVersion(gps.NewVersion("v0.5.0")),
delta: VersionChanged | RevisionChanged,
},
"version change from norev": {
lt1: dup.setVersion(gps.NewVersion("v0.5.0")),
lt2: dup.setVersion(gps.NewVersion("v0.5.0").Pair(foorev)),
delta: RevisionChanged,
},
"to branch": {
lt1: dup.setVersion(gps.NewBranch("master").Pair(foorev)),
delta: VersionChanged,
},
"to branch new rev": {
lt1: dup.setVersion(gps.NewBranch("master").Pair(gps.Revision("newrev"))),
delta: VersionChanged | RevisionChanged,
},
"to empty prune opts": {
lt1: dup.setPruneOpts(0),
delta: PruneOptsChanged,
},
"from empty prune opts": {
lt1: dup.setPruneOpts(0),
lt2: dup,
delta: PruneOptsChanged,
},
"prune opts change": {
lt1: dup.setPruneOpts(gps.PruneNestedVendorDirs | gps.PruneNonGoFiles),
delta: PruneOptsChanged,
},
"empty digest": {
lt1: dup.setDigest(VersionedDigest{}),
delta: HashVersionChanged | HashChanged,
},
"to empty digest": {
lt1: dup.setDigest(VersionedDigest{}),
lt2: dup,
delta: HashVersionChanged | HashChanged,
},
"hash version changed": {
lt1: dup.setDigest(VersionedDigest{HashVersion: HashVersion + 1, Digest: []byte("foobytes")}),
delta: HashVersionChanged,
},
"hash contents changed": {
lt1: dup.setDigest(VersionedDigest{HashVersion: HashVersion, Digest: []byte("barbytes")}),
delta: HashChanged,
},
"to plain locked project": {
lt1: dup.toPlainLP(),
delta: PruneOptsChanged | HashChanged | HashVersionChanged,
},
"from plain locked project": {
lt1: dup.toPlainLP(),
lt2: dup,
delta: PruneOptsChanged | HashChanged | HashVersionChanged,
},
"all": {
lt1: dup.setDigest(VersionedDigest{}).setVersion(gps.NewBranch("master").Pair(gps.Revision("newrev"))).setPruneOpts(gps.PruneNestedVendorDirs | gps.PruneNonGoFiles).setSource("whatever"),
delta: SourceChanged | VersionChanged | RevisionChanged | PruneOptsChanged | HashChanged | HashVersionChanged,
},
}
for name, fix := range tt {
fix := fix
t.Run(name, func(t *testing.T) {
// Use two patterns for constructing locks to compare: if only lt1
// is set, use foovp as the first lp and compare with the lt1
// transforms applied. If lt2 is set, transform foovp with lt1 for
// the first lp, then transform foovp with lt2 for the second lp.
var lp1, lp2 gps.LockedProject
if fix.lt2 == nil {
lp1 = foovp
lp2 = fix.lt1(foovp)
} else {
lp1 = fix.lt1(foovp)
lp2 = fix.lt2(foovp)
}
lppd := DiffLockedProjectProperties(lp1, lp2)
if !lppd.Changed(AnyChanged) && fix.delta != 0 {
t.Errorf("Changed() reporting false when expecting some dimensions to be changed: %s", fix.delta)
} else if lppd.Changed(AnyChanged) && fix.delta == 0 {
t.Error("Changed() reporting true when expecting no changes")
}
if lppd.Changed(AnyChanged & ^fix.delta) {
t.Errorf("Changed() reported true when checking along not-expected dimensions: %s", lppd.Changes() & ^fix.delta)
}
gotdelta := lppd.Changes()
if fix.delta & ^gotdelta != 0 {
t.Errorf("wanted change in some dimensions that were unchanged: %s", fix.delta & ^gotdelta)
}
if gotdelta & ^fix.delta != 0 {
t.Errorf("did not want change in some dimensions that were changed: %s", gotdelta & ^fix.delta)
}
if fix.checkfn != nil {
fix.checkfn(t, lppd)
}
})
}
}
type lockTransformer func(safeLock) safeLock
func (lt lockTransformer) compose(lt2 lockTransformer) lockTransformer {
if lt == nil {
return lt2
}
return func(l safeLock) safeLock {
return lt2(lt(l))
}
}
func (lt lockTransformer) addDumbProject(root string) lockTransformer {
vp := newVerifiableProject(mkPI(root), gps.NewVersion("whatever").Pair("addedrev"), []string{"."})
return lt.compose(func(l safeLock) safeLock {
for _, lp := range l.p {
if lp.Ident().ProjectRoot == vp.Ident().ProjectRoot {
panic(fmt.Sprintf("%q already in lock", vp.Ident().ProjectRoot))
}
}
l.p = append(l.p, vp)
return l
})
}
func (lt lockTransformer) rmProject(pr string) lockTransformer {
return lt.compose(func(l safeLock) safeLock {
for k, lp := range l.p {
if lp.Ident().ProjectRoot == gps.ProjectRoot(pr) {
l.p = l.p[:k+copy(l.p[k:], l.p[k+1:])]
return l
}
}
panic(fmt.Sprintf("%q not in lock", pr))
})
}
func (lt lockTransformer) addII(path string) lockTransformer {
return lt.compose(func(l safeLock) safeLock {
for _, impath := range l.i {
if path == impath {
panic(fmt.Sprintf("%q already in input imports", impath))
}
}
l.i = append(l.i, path)
return l
})
}
func (lt lockTransformer) rmII(path string) lockTransformer {
return lt.compose(func(l safeLock) safeLock {
for k, impath := range l.i {
if path == impath {
l.i = l.i[:k+copy(l.i[k:], l.i[k+1:])]
return l
}
}
panic(fmt.Sprintf("%q not in input imports", path))
})
}
type lockedProjectTransformer func(gps.LockedProject) gps.LockedProject
func (lpt lockedProjectTransformer) compose(lpt2 lockedProjectTransformer) lockedProjectTransformer {
if lpt == nil {
return lpt2
}
return func(lp gps.LockedProject) gps.LockedProject {
return lpt2(lpt(lp))
}
}
func (lpt lockedProjectTransformer) addPkg(path string) lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
for _, pkg := range lp.Packages() {
if path == pkg {
panic(fmt.Sprintf("%q already in pkg list", path))
}
}
nlp := gps.NewLockedProject(lp.Ident(), lp.Version(), append(lp.Packages(), path))
if vp, ok := lp.(VerifiableProject); ok {
vp.LockedProject = nlp
return vp
}
return nlp
})
}
func (lpt lockedProjectTransformer) rmPkg(path string) lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
pkglist := lp.Packages()
for k, pkg := range pkglist {
if path == pkg {
pkglist = pkglist[:k+copy(pkglist[k:], pkglist[k+1:])]
nlp := gps.NewLockedProject(lp.Ident(), lp.Version(), pkglist)
if vp, ok := lp.(VerifiableProject); ok {
vp.LockedProject = nlp
return vp
}
return nlp
}
}
panic(fmt.Sprintf("%q not in pkg list", path))
})
}
func (lpt lockedProjectTransformer) setSource(source string) lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
ident := lp.Ident()
ident.Source = source
nlp := gps.NewLockedProject(ident, lp.Version(), lp.Packages())
if vp, ok := lp.(VerifiableProject); ok {
vp.LockedProject = nlp
return vp
}
return nlp
})
}
func (lpt lockedProjectTransformer) setVersion(v gps.Version) lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
nlp := gps.NewLockedProject(lp.Ident(), v, lp.Packages())
if vp, ok := lp.(VerifiableProject); ok {
vp.LockedProject = nlp
return vp
}
return nlp
})
}
func (lpt lockedProjectTransformer) setPruneOpts(po gps.PruneOptions) lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
vp := lp.(VerifiableProject)
vp.PruneOpts = po
return vp
})
}
func (lpt lockedProjectTransformer) setDigest(vd VersionedDigest) lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
vp := lp.(VerifiableProject)
vp.Digest = vd
return vp
})
}
func (lpt lockedProjectTransformer) toPlainLP() lockedProjectTransformer {
return lpt.compose(func(lp gps.LockedProject) gps.LockedProject {
if vp, ok := lp.(VerifiableProject); ok {
return vp.LockedProject
}
return lp
})
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/lockdiff.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"bytes"
"sort"
"strings"
"github.com/golang/dep/gps"
)
// DeltaDimension defines a bitset enumerating all of the different dimensions
// along which a Lock, and its constitutent components, can change.
type DeltaDimension uint32
// Each flag represents an ortohgonal dimension along which Locks can vary with
// respect to each other.
const (
InputImportsChanged DeltaDimension = 1 << iota
ProjectAdded
ProjectRemoved
SourceChanged
VersionChanged
RevisionChanged
PackagesChanged
PruneOptsChanged
HashVersionChanged
HashChanged
AnyChanged = (1 << iota) - 1
)
// LockDelta represents all possible differences between two Locks.
type LockDelta struct {
AddedImportInputs []string
RemovedImportInputs []string
ProjectDeltas map[gps.ProjectRoot]LockedProjectDelta
}
// LockedProjectDelta represents all possible state changes of a LockedProject
// within a Lock. It encapsulates the property-level differences represented by
// a LockedProjectPropertiesDelta, but can also represent existence deltas - a
// given name came to exist, or cease to exist, across two Locks.
type LockedProjectDelta struct {
Name gps.ProjectRoot
ProjectRemoved, ProjectAdded bool
LockedProjectPropertiesDelta
}
// LockedProjectPropertiesDelta represents all possible differences between the
// properties of two LockedProjects. It can represent deltas for
// VerifiableProject properties, as well.
type LockedProjectPropertiesDelta struct {
PackagesAdded, PackagesRemoved []string
VersionBefore, VersionAfter gps.UnpairedVersion
RevisionBefore, RevisionAfter gps.Revision
SourceBefore, SourceAfter string
PruneOptsBefore, PruneOptsAfter gps.PruneOptions
HashVersionBefore, HashVersionAfter int
HashChanged bool
}
// DiffLocks compares two locks and computes a semantically rich delta between
// them.
func DiffLocks(l1, l2 gps.Lock) LockDelta {
// Default nil locks to empty locks, so that we can still generate a diff.
if l1 == nil {
if l2 == nil {
// But both locks being nil results in an empty delta.
return LockDelta{}
}
l1 = gps.SimpleLock{}
}
if l2 == nil {
l2 = gps.SimpleLock{}
}
p1, p2 := l1.Projects(), l2.Projects()
p1 = sortLockedProjects(p1)
p2 = sortLockedProjects(p2)
diff := LockDelta{
ProjectDeltas: make(map[gps.ProjectRoot]LockedProjectDelta),
}
var i2next int
for i1 := 0; i1 < len(p1); i1++ {
lp1 := p1[i1]
pr1 := lp1.Ident().ProjectRoot
lpd := LockedProjectDelta{
Name: pr1,
// Default to assuming a project was removed, as it will handle both
// the obvious removal case (where there's a visible hole in p2),
// and the non-obvious case, where p2 is shorter than p1.
ProjectRemoved: true,
}
for i2 := i2next; i2 < len(p2); i2++ {
lp2 := p2[i2]
pr2 := lp2.Ident().ProjectRoot
switch strings.Compare(string(pr1), string(pr2)) {
case 0: // Found a matching project
lpd = LockedProjectDelta{
Name: pr1,
LockedProjectPropertiesDelta: DiffLockedProjectProperties(lp1, lp2),
}
i2next = i2 + 1 // Don't visit this project again
case +1: // Found a new project
diff.ProjectDeltas[pr2] = LockedProjectDelta{
Name: pr2,
ProjectAdded: true,
}
i2next = i2 + 1 // Don't visit this project again
continue // Keep looking for a matching project
}
break // Done evaluating this project, move onto the next
}
diff.ProjectDeltas[pr1] = lpd
}
// Anything that still hasn't been evaluated are adds
for i2 := i2next; i2 < len(p2); i2++ {
lp2 := p2[i2]
pr2 := lp2.Ident().ProjectRoot
diff.ProjectDeltas[pr2] = LockedProjectDelta{
Name: pr2,
ProjectAdded: true,
}
}
diff.AddedImportInputs, diff.RemovedImportInputs = findAddedAndRemoved(l1.InputImports(), l2.InputImports())
return diff
}
func findAddedAndRemoved(l1, l2 []string) (add, remove []string) {
// Computing package add/removes might be optimizable to O(n) (?), but it's
// not critical path for any known case, so not worth the effort right now.
p1, p2 := make(map[string]bool, len(l1)), make(map[string]bool, len(l2))
for _, pkg := range l1 {
p1[pkg] = true
}
for _, pkg := range l2 {
p2[pkg] = true
}
for pkg := range p1 {
if !p2[pkg] {
remove = append(remove, pkg)
}
}
for pkg := range p2 {
if !p1[pkg] {
add = append(add, pkg)
}
}
return add, remove
}
// DiffLockedProjectProperties takes two gps.LockedProject and computes a delta
// for each of their component properties.
//
// This function is focused exclusively on the properties of a LockedProject. As
// such, it does not compare the ProjectRoot part of the LockedProject's
// ProjectIdentifier, as those are names, and the concern here is a difference
// in properties, not intrinsic identity.
func DiffLockedProjectProperties(lp1, lp2 gps.LockedProject) LockedProjectPropertiesDelta {
ld := LockedProjectPropertiesDelta{
SourceBefore: lp1.Ident().Source,
SourceAfter: lp2.Ident().Source,
}
ld.PackagesAdded, ld.PackagesRemoved = findAddedAndRemoved(lp1.Packages(), lp2.Packages())
switch v := lp1.Version().(type) {
case gps.PairedVersion:
ld.VersionBefore, ld.RevisionBefore = v.Unpair(), v.Revision()
case gps.Revision:
ld.RevisionBefore = v
case gps.UnpairedVersion:
// This should ideally never happen
ld.VersionBefore = v
}
switch v := lp2.Version().(type) {
case gps.PairedVersion:
ld.VersionAfter, ld.RevisionAfter = v.Unpair(), v.Revision()
case gps.Revision:
ld.RevisionAfter = v
case gps.UnpairedVersion:
// This should ideally never happen
ld.VersionAfter = v
}
vp1, ok1 := lp1.(VerifiableProject)
vp2, ok2 := lp2.(VerifiableProject)
if ok1 && ok2 {
ld.PruneOptsBefore, ld.PruneOptsAfter = vp1.PruneOpts, vp2.PruneOpts
ld.HashVersionBefore, ld.HashVersionAfter = vp1.Digest.HashVersion, vp2.Digest.HashVersion
if !bytes.Equal(vp1.Digest.Digest, vp2.Digest.Digest) {
ld.HashChanged = true
}
} else if ok1 {
ld.PruneOptsBefore = vp1.PruneOpts
ld.HashVersionBefore = vp1.Digest.HashVersion
ld.HashChanged = true
} else if ok2 {
ld.PruneOptsAfter = vp2.PruneOpts
ld.HashVersionAfter = vp2.Digest.HashVersion
ld.HashChanged = true
}
return ld
}
// Changed indicates whether the delta contains a change along the dimensions
// with their corresponding bits set.
//
// This implementation checks the topmost-level Lock properties
func (ld LockDelta) Changed(dims DeltaDimension) bool {
if dims&InputImportsChanged != 0 && (len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0) {
return true
}
for _, ld := range ld.ProjectDeltas {
if ld.Changed(dims & ^InputImportsChanged) {
return true
}
}
return false
}
// Changes returns a bitset indicating the dimensions along which deltas exist across
// all contents of the LockDelta.
//
// This recurses down into the individual LockedProjectDeltas contained within
// the LockDelta. A single delta along a particular dimension from a single
// project is sufficient to flip the bit on for that dimension.
func (ld LockDelta) Changes() DeltaDimension {
var dd DeltaDimension
if len(ld.AddedImportInputs) > 0 || len(ld.RemovedImportInputs) > 0 {
dd |= InputImportsChanged
}
for _, ld := range ld.ProjectDeltas {
dd |= ld.Changes()
}
return dd
}
// Changed indicates whether the delta contains a change along the dimensions
// with their corresponding bits set.
//
// For example, if only the Revision changed, and this method is called with
// SourceChanged | VersionChanged, it will return false; if it is called with
// VersionChanged | RevisionChanged, it will return true.
func (ld LockedProjectDelta) Changed(dims DeltaDimension) bool {
if dims&ProjectAdded != 0 && ld.WasAdded() {
return true
}
if dims&ProjectRemoved != 0 && ld.WasRemoved() {
return true
}
return ld.LockedProjectPropertiesDelta.Changed(dims & ^ProjectAdded & ^ProjectRemoved)
}
// Changes returns a bitset indicating the dimensions along which there were
// changes between the compared LockedProjects. This includes both
// existence-level deltas (add/remove) and property-level deltas.
func (ld LockedProjectDelta) Changes() DeltaDimension {
var dd DeltaDimension
if ld.WasAdded() {
dd |= ProjectAdded
}
if ld.WasRemoved() {
dd |= ProjectRemoved
}
return dd | ld.LockedProjectPropertiesDelta.Changes()
}
// WasRemoved returns true if the named project existed in the first lock, but
// did not exist in the second lock.
func (ld LockedProjectDelta) WasRemoved() bool {
return ld.ProjectRemoved
}
// WasAdded returns true if the named project did not exist in the first lock,
// but did exist in the second lock.
func (ld LockedProjectDelta) WasAdded() bool {
return ld.ProjectAdded
}
// Changed indicates whether the delta contains a change along the dimensions
// with their corresponding bits set.
//
// For example, if only the Revision changed, and this method is called with
// SourceChanged | VersionChanged, it will return false; if it is called with
// VersionChanged | RevisionChanged, it will return true.
func (ld LockedProjectPropertiesDelta) Changed(dims DeltaDimension) bool {
if dims&SourceChanged != 0 && ld.SourceChanged() {
return true
}
if dims&RevisionChanged != 0 && ld.RevisionChanged() {
return true
}
if dims&PruneOptsChanged != 0 && ld.PruneOptsChanged() {
return true
}
if dims&HashChanged != 0 && ld.HashChanged {
return true
}
if dims&HashVersionChanged != 0 && ld.HashVersionChanged() {
return true
}
if dims&VersionChanged != 0 && ld.VersionChanged() {
return true
}
if dims&PackagesChanged != 0 && ld.PackagesChanged() {
return true
}
return false
}
// Changes returns a bitset indicating the dimensions along which there were
// changes between the compared LockedProjects.
func (ld LockedProjectPropertiesDelta) Changes() DeltaDimension {
var dd DeltaDimension
if ld.SourceChanged() {
dd |= SourceChanged
}
if ld.RevisionChanged() {
dd |= RevisionChanged
}
if ld.PruneOptsChanged() {
dd |= PruneOptsChanged
}
if ld.HashChanged {
dd |= HashChanged
}
if ld.HashVersionChanged() {
dd |= HashVersionChanged
}
if ld.VersionChanged() {
dd |= VersionChanged
}
if ld.PackagesChanged() {
dd |= PackagesChanged
}
return dd
}
// SourceChanged returns true if the source field differed between the first and
// second locks.
func (ld LockedProjectPropertiesDelta) SourceChanged() bool {
return ld.SourceBefore != ld.SourceAfter
}
// VersionChanged returns true if the version property differed between the
// first and second locks. In addition to simple changes (e.g. 1.0.1 -> 1.0.2),
// this also includes all possible version type changes either going from a
// paired version to a plain revision, or the reverse direction, or the type of
// unpaired version changing (e.g. branch -> semver).
func (ld LockedProjectPropertiesDelta) VersionChanged() bool {
if ld.VersionBefore == nil && ld.VersionAfter == nil {
return false
} else if (ld.VersionBefore == nil || ld.VersionAfter == nil) || (ld.VersionBefore.Type() != ld.VersionAfter.Type()) {
return true
} else if !ld.VersionBefore.Matches(ld.VersionAfter) {
return true
}
return false
}
// RevisionChanged returns true if the revision property differed between the
// first and second locks.
func (ld LockedProjectPropertiesDelta) RevisionChanged() bool {
return ld.RevisionBefore != ld.RevisionAfter
}
// PackagesChanged returns true if the package set gained or lost members (or
// both) between the first and second locks.
func (ld LockedProjectPropertiesDelta) PackagesChanged() bool {
return len(ld.PackagesAdded) > 0 || len(ld.PackagesRemoved) > 0
}
// PruneOptsChanged returns true if the pruning flags for the project changed
// between the first and second locks.
func (ld LockedProjectPropertiesDelta) PruneOptsChanged() bool {
return ld.PruneOptsBefore != ld.PruneOptsAfter
}
// HashVersionChanged returns true if the version of the hashing algorithm
// changed between the first and second locks.
func (ld LockedProjectPropertiesDelta) HashVersionChanged() bool {
return ld.HashVersionBefore != ld.HashVersionAfter
}
// HashVersionWasZero returns true if the first lock had a zero hash version,
// which can only mean it was uninitialized.
func (ld LockedProjectPropertiesDelta) HashVersionWasZero() bool {
return ld.HashVersionBefore == 0
}
// sortLockedProjects returns a sorted copy of lps, or itself if already sorted.
func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject {
if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool {
return lps[i].Ident().Less(lps[j].Ident())
}) {
return lps
}
cp := make([]gps.LockedProject, len(lps))
copy(cp, lps)
sort.Slice(cp, func(i, j int) bool {
return cp[i].Ident().Less(cp[j].Ident())
})
return cp
}
|
verify
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/gps/verify/digest_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package verify
import (
"bytes"
"io"
"os"
"path/filepath"
"testing"
)
// crossBuffer is a test io.Reader that emits a few canned responses.
type crossBuffer struct {
readCount int
iterations []string
}
func (cb *crossBuffer) Read(buf []byte) (int, error) {
if cb.readCount == len(cb.iterations) {
return 0, io.EOF
}
cb.readCount++
return copy(buf, cb.iterations[cb.readCount-1]), nil
}
func streamThruLineEndingReader(t *testing.T, iterations []string) []byte {
dst := new(bytes.Buffer)
n, err := io.Copy(dst, newLineEndingReader(&crossBuffer{iterations: iterations}))
if got, want := err, error(nil); got != want {
t.Errorf("(GOT): %v; (WNT): %v", got, want)
}
if got, want := n, int64(dst.Len()); got != want {
t.Errorf("(GOT): %v; (WNT): %v", got, want)
}
return dst.Bytes()
}
func TestLineEndingReader(t *testing.T) {
testCases := []struct {
input []string
output string
}{
{[]string{"\r"}, "\r"},
{[]string{"\r\n"}, "\n"},
{[]string{"now is the time\r\n"}, "now is the time\n"},
{[]string{"now is the time\r\n(trailing data)"}, "now is the time\n(trailing data)"},
{[]string{"now is the time\n"}, "now is the time\n"},
{[]string{"now is the time\r"}, "now is the time\r"}, // trailing CR ought to convey
{[]string{"\rnow is the time"}, "\rnow is the time"}, // CR not followed by LF ought to convey
{[]string{"\rnow is the time\r"}, "\rnow is the time\r"}, // CR not followed by LF ought to convey
// no line splits
{[]string{"first", "second", "third"}, "firstsecondthird"},
// 1->2 and 2->3 both break across a CRLF
{[]string{"first\r", "\nsecond\r", "\nthird"}, "first\nsecond\nthird"},
// 1->2 breaks across CRLF and 2->3 does not
{[]string{"first\r", "\nsecond", "third"}, "first\nsecondthird"},
// 1->2 breaks across CRLF and 2 ends in CR but 3 does not begin LF
{[]string{"first\r", "\nsecond\r", "third"}, "first\nsecond\rthird"},
// 1 ends in CR but 2 does not begin LF, and 2->3 breaks across CRLF
{[]string{"first\r", "second\r", "\nthird"}, "first\rsecond\nthird"},
// 1 ends in CR but 2 does not begin LF, and 2->3 does not break across CRLF
{[]string{"first\r", "second\r", "\nthird"}, "first\rsecond\nthird"},
// 1->2 and 2->3 both break across a CRLF, but 3->4 does not
{[]string{"first\r", "\nsecond\r", "\nthird\r", "fourth"}, "first\nsecond\nthird\rfourth"},
{[]string{"first\r", "\nsecond\r", "\nthird\n", "fourth"}, "first\nsecond\nthird\nfourth"},
{[]string{"this is the result\r\nfrom the first read\r", "\nthis is the result\r\nfrom the second read\r"},
"this is the result\nfrom the first read\nthis is the result\nfrom the second read\r"},
{[]string{"now is the time\r\nfor all good engineers\r\nto improve their test coverage!\r\n"},
"now is the time\nfor all good engineers\nto improve their test coverage!\n"},
{[]string{"now is the time\r\nfor all good engineers\r", "\nto improve their test coverage!\r\n"},
"now is the time\nfor all good engineers\nto improve their test coverage!\n"},
}
for _, testCase := range testCases {
got := streamThruLineEndingReader(t, testCase.input)
if want := []byte(testCase.output); !bytes.Equal(got, want) {
t.Errorf("Input: %#v; (GOT): %#q; (WNT): %#q", testCase.input, got, want)
}
}
}
////////////////////////////////////////
func getTestdataVerifyRoot(t *testing.T) string {
cwd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
return filepath.Join(filepath.Dir(cwd), "_testdata/digest")
}
func TestDigestFromDirectoryBailsUnlessDirectory(t *testing.T) {
prefix := getTestdataVerifyRoot(t)
relativePathname := "launchpad.net/match"
_, err := DigestFromDirectory(filepath.Join(prefix, relativePathname))
if got, want := err, error(nil); got != want {
t.Errorf("\n(GOT): %v; (WNT): %v", got, want)
}
}
func TestDigestFromDirectory(t *testing.T) {
relativePathname := "launchpad.net/match"
want := []byte{0x7e, 0x10, 0x6, 0x2f, 0x8, 0x3, 0x3c, 0x76, 0xae, 0xbc, 0xa4, 0xc9, 0xec, 0x73, 0x67, 0x15, 0x70, 0x2b, 0x0, 0x89, 0x27, 0xbb, 0x61, 0x9d, 0xc7, 0xc3, 0x39, 0x46, 0x3, 0x91, 0xb7, 0x3b}
// NOTE: Create the hash using both an absolute and a relative pathname to
// ensure hash ignores prefix.
t.Run("AbsolutePrefix", func(t *testing.T) {
t.Parallel()
prefix := getTestdataVerifyRoot(t)
got, err := DigestFromDirectory(filepath.Join(prefix, relativePathname))
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(got.Digest, want) {
t.Errorf("\n(GOT):\n\t%#v\n(WNT):\n\t%#v", got, want)
}
})
t.Run("RelativePrefix", func(t *testing.T) {
t.Parallel()
prefix := "../_testdata/digest"
got, err := DigestFromDirectory(filepath.Join(prefix, relativePathname))
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(got.Digest, want) {
t.Errorf("\n(GOT):\n\t%#v\n(WNT):\n\t%#v", got, want)
}
})
}
func TestVerifyDepTree(t *testing.T) {
vendorRoot := getTestdataVerifyRoot(t)
wantSums := map[string][]byte{
"github.com/alice/match": {0x7e, 0x10, 0x6, 0x2f, 0x8, 0x3, 0x3c, 0x76, 0xae, 0xbc, 0xa4, 0xc9, 0xec, 0x73, 0x67, 0x15, 0x70, 0x2b, 0x0, 0x89, 0x27, 0xbb, 0x61, 0x9d, 0xc7, 0xc3, 0x39, 0x46, 0x3, 0x91, 0xb7, 0x3b},
"github.com/alice/mismatch": []byte("some non-matching digest"),
"github.com/bob/emptyDigest": nil, // empty hash result
"github.com/bob/match": {0x7e, 0x10, 0x6, 0x2f, 0x8, 0x3, 0x3c, 0x76, 0xae, 0xbc, 0xa4, 0xc9, 0xec, 0x73, 0x67, 0x15, 0x70, 0x2b, 0x0, 0x89, 0x27, 0xbb, 0x61, 0x9d, 0xc7, 0xc3, 0x39, 0x46, 0x3, 0x91, 0xb7, 0x3b},
"github.com/charlie/notInTree": nil, // not in tree result ought to superseede empty digest result
// matching result at seldom found directory level
"launchpad.net/match": {0x7e, 0x10, 0x6, 0x2f, 0x8, 0x3, 0x3c, 0x76, 0xae, 0xbc, 0xa4, 0xc9, 0xec, 0x73, 0x67, 0x15, 0x70, 0x2b, 0x0, 0x89, 0x27, 0xbb, 0x61, 0x9d, 0xc7, 0xc3, 0x39, 0x46, 0x3, 0x91, 0xb7, 0x3b},
}
checkStatus := func(t *testing.T, status map[string]VendorStatus, key string, want VendorStatus) {
t.Helper()
got, ok := status[key]
if !ok {
t.Errorf("Want key: %q", key)
return
}
if got != want {
t.Errorf("Key: %q; (GOT): %v; (WNT): %v", key, got, want)
}
}
t.Run("normal", func(t *testing.T) {
t.Parallel()
wantDigests := make(map[string]VersionedDigest)
for k, v := range wantSums {
wantDigests[k] = VersionedDigest{
HashVersion: HashVersion,
Digest: v,
}
}
status, err := CheckDepTree(vendorRoot, wantDigests)
if err != nil {
t.Fatal(err)
}
if got, want := len(status), 7; got != want {
t.Errorf("Unexpected result count from VerifyDepTree:\n\t(GOT): %v\n\t(WNT): %v", got, want)
}
checkStatus(t, status, "github.com/alice/match", NoMismatch)
checkStatus(t, status, "github.com/alice/mismatch", DigestMismatchInLock)
checkStatus(t, status, "github.com/alice/notInLock", NotInLock)
checkStatus(t, status, "github.com/bob/match", NoMismatch)
checkStatus(t, status, "github.com/bob/emptyDigest", EmptyDigestInLock)
checkStatus(t, status, "github.com/charlie/notInTree", NotInTree)
checkStatus(t, status, "launchpad.net/match", NoMismatch)
if t.Failed() {
for k, want := range wantSums {
got, err := DigestFromDirectory(filepath.Join(vendorRoot, k))
if err != nil {
t.Error(err)
}
if !bytes.Equal(got.Digest, want) {
t.Errorf("Digest mismatch for %q\n(GOT):\n\t%#v\n(WNT):\n\t%#v", k, got, want)
}
}
}
})
t.Run("hashv-mismatch", func(t *testing.T) {
t.Parallel()
wantDigests := make(map[string]VersionedDigest)
for k, v := range wantSums {
wantDigests[k] = VersionedDigest{
HashVersion: HashVersion + 1,
Digest: v,
}
}
status, err := CheckDepTree(vendorRoot, wantDigests)
if err != nil {
t.Fatal(err)
}
if got, want := len(status), 7; got != want {
t.Errorf("Unexpected result count from VerifyDepTree:\n\t(GOT): %v\n\t(WNT): %v", got, want)
}
checkStatus(t, status, "github.com/alice/match", HashVersionMismatch)
checkStatus(t, status, "github.com/alice/mismatch", HashVersionMismatch)
checkStatus(t, status, "github.com/alice/notInLock", NotInLock)
checkStatus(t, status, "github.com/bob/match", HashVersionMismatch)
checkStatus(t, status, "github.com/bob/emptyDigest", HashVersionMismatch)
checkStatus(t, status, "github.com/charlie/notInTree", NotInTree)
checkStatus(t, status, "launchpad.net/match", HashVersionMismatch)
})
t.Run("Non-existent directory", func(t *testing.T) {
t.Parallel()
wantDigests := make(map[string]VersionedDigest)
for k, v := range wantSums {
wantDigests[k] = VersionedDigest{
HashVersion: HashVersion + 1,
Digest: v,
}
}
status, err := CheckDepTree("fooVendorRoot", wantDigests)
if err != nil {
t.Fatal(err)
}
if got, want := len(status), 6; got != want {
t.Errorf("Unexpected result count from VerifyDepTree:\n\t(GOT): %v\n\t(WNT): %v", got, want)
}
checkStatus(t, status, "github.com/alice/match", NotInTree)
checkStatus(t, status, "github.com/alice/mismatch", NotInTree)
checkStatus(t, status, "github.com/bob/match", NotInTree)
checkStatus(t, status, "github.com/bob/emptyDigest", NotInTree)
checkStatus(t, status, "github.com/charlie/notInTree", NotInTree)
checkStatus(t, status, "launchpad.net/match", NotInTree)
})
}
func TestParseVersionedDigest(t *testing.T) {
t.Run("Parse valid VersionedDigest", func(t *testing.T) {
t.Parallel()
input := "1:60861e762bdbe39c4c7bf292c291329b731c9925388fd41125888f5c1c595feb"
vd, err := ParseVersionedDigest(input)
if err != nil {
t.Fatal()
}
expectedHash := "60861e762bdbe39c4c7bf292c291329b731c9925388fd41125888f5c1c595feb"
if got, want := vd.Digest, expectedHash; bytes.Equal(got, []byte(expectedHash)) {
t.Errorf("Unexpected result from ParseVersionedDigest:\n\t(GOT): %s\n\t(WNT): %s", got, want)
}
if got, want := vd.String(), input; got != want {
t.Errorf("Unexpected result from ParseVersionedDigest String:\n\t(GOT): %s\n\t(WNT): %s", got, want)
}
})
t.Run("Parse VersionedDigest with invalid format", func(t *testing.T) {
t.Parallel()
input := "1abc"
_, err := ParseVersionedDigest(input)
if err == nil {
t.Error("expected error for invalid VersionedDigest format")
}
})
t.Run("Parse VersionedDigest with invalid hex string", func(t *testing.T) {
t.Parallel()
input := "1:60861g762bdbe39c4c7bf292c291329b731c9925388fd41125888f5c1c595feb"
_, err := ParseVersionedDigest(input)
if err == nil {
t.Error("expected error VersionedDigest with invalid hex string")
}
})
t.Run("Parse VersionedDigest with invalid hash version", func(t *testing.T) {
t.Parallel()
input := "a:60861e762bdbe39c4c7bf292c291329b731c9925388fd41125888f5c1c595feb"
_, err := ParseVersionedDigest(input)
if err == nil {
t.Error("expected error VersionedDigest with invalid hash version")
}
})
}
func BenchmarkDigestFromDirectory(b *testing.B) {
b.Skip("Eliding benchmark of user's Go source directory")
prefix := filepath.Join(os.Getenv("GOPATH"), "src")
for i := 0; i < b.N; i++ {
_, err := DigestFromDirectory(prefix)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkVerifyDepTree(b *testing.B) {
b.Skip("Eliding benchmark of user's Go source directory")
prefix := filepath.Join(os.Getenv("GOPATH"), "src")
for i := 0; i < b.N; i++ {
_, err := CheckDepTree(prefix, nil)
if err != nil {
b.Fatal(err)
}
}
}
|
feedback
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/feedback/feedback_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package feedback
import (
"bytes"
log2 "log"
"strings"
"testing"
"github.com/golang/dep/gps"
_ "github.com/golang/dep/internal/test" // DO NOT REMOVE, allows go test ./... -update to work
)
func TestFeedback_Constraint(t *testing.T) {
ver, _ := gps.NewSemverConstraint("^1.0.0")
rev := gps.Revision("1b8edb3")
pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/foo/bar")}
cases := []struct {
feedback *ConstraintFeedback
want string
}{
{
feedback: NewConstraintFeedback(gps.ProjectConstraint{Constraint: ver, Ident: pi}, DepTypeDirect),
want: "Using ^1.0.0 as constraint for direct dep github.com/foo/bar",
},
{
feedback: NewConstraintFeedback(gps.ProjectConstraint{Constraint: ver, Ident: pi}, DepTypeImported),
want: "Using ^1.0.0 as initial constraint for imported dep github.com/foo/bar",
},
{
feedback: NewConstraintFeedback(gps.ProjectConstraint{Constraint: gps.Any(), Ident: pi}, DepTypeImported),
want: "Using * as initial constraint for imported dep github.com/foo/bar",
},
{
feedback: NewConstraintFeedback(gps.ProjectConstraint{Constraint: rev, Ident: pi}, DepTypeDirect),
want: "Using 1b8edb3 as hint for direct dep github.com/foo/bar",
},
{
feedback: NewConstraintFeedback(gps.ProjectConstraint{Constraint: rev, Ident: pi}, DepTypeImported),
want: "Using 1b8edb3 as initial hint for imported dep github.com/foo/bar",
},
}
for _, c := range cases {
buf := &bytes.Buffer{}
log := log2.New(buf, "", 0)
c.feedback.LogFeedback(log)
got := strings.TrimSpace(buf.String())
if c.want != got {
t.Errorf("Feedbacks are not expected: \n\t(GOT) '%s'\n\t(WNT) '%s'", got, c.want)
}
}
}
func TestFeedback_LockedProject(t *testing.T) {
v := gps.NewVersion("v1.1.4").Pair("bc29b4f")
b := gps.NewBranch("master").Pair("436f39d")
pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/foo/bar")}
cases := []struct {
feedback *ConstraintFeedback
want string
}{
{
feedback: NewLockedProjectFeedback(gps.NewLockedProject(pi, v, nil), DepTypeDirect),
want: "Locking in v1.1.4 (bc29b4f) for direct dep github.com/foo/bar",
},
{
feedback: NewLockedProjectFeedback(gps.NewLockedProject(pi, v, nil), DepTypeImported),
want: "Trying v1.1.4 (bc29b4f) as initial lock for imported dep github.com/foo/bar",
},
{
feedback: NewLockedProjectFeedback(gps.NewLockedProject(pi, gps.NewVersion("").Pair("bc29b4f"), nil), DepTypeImported),
want: "Trying * (bc29b4f) as initial lock for imported dep github.com/foo/bar",
},
{
feedback: NewLockedProjectFeedback(gps.NewLockedProject(pi, b, nil), DepTypeTransitive),
want: "Locking in master (436f39d) for transitive dep github.com/foo/bar",
},
}
for _, c := range cases {
buf := &bytes.Buffer{}
log := log2.New(buf, "", 0)
c.feedback.LogFeedback(log)
got := strings.TrimSpace(buf.String())
if c.want != got {
t.Errorf("Feedbacks are not expected: \n\t(GOT) '%s'\n\t(WNT) '%s'", got, c.want)
}
}
}
func TestFeedback_BrokenImport(t *testing.T) {
pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/foo/bar")}
cases := []struct {
oldVersion gps.Version
currentVersion gps.Version
pID gps.ProjectIdentifier
altPID gps.ProjectIdentifier
want string
name string
}{
{
oldVersion: gps.NewVersion("v1.1.4").Pair("bc29b4f"),
currentVersion: gps.NewVersion("v1.2.0").Pair("ia3da28"),
pID: pi,
altPID: pi,
want: "Warning: Unable to preserve imported lock v1.1.4 (bc29b4f) for github.com/foo/bar. Locking in v1.2.0 (ia3da28)",
name: "Basic broken import",
},
{
oldVersion: gps.NewBranch("master").Pair("bc29b4f"),
currentVersion: gps.NewBranch("dev").Pair("ia3da28"),
pID: pi,
altPID: pi,
want: "Warning: Unable to preserve imported lock master (bc29b4f) for github.com/foo/bar. Locking in dev (ia3da28)",
name: "Branches",
},
{
oldVersion: gps.NewBranch("master").Pair("bc29b4f"),
currentVersion: gps.NewBranch("dev").Pair("ia3da28"),
pID: pi,
altPID: gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/foo/boo")},
want: "Warning: Unable to preserve imported lock master (bc29b4f) for github.com/foo/bar. The project was removed from the lock because it is not used.",
name: "Branches",
},
{
oldVersion: gps.NewBranch("master").Pair("bc29b4f"),
currentVersion: gps.NewBranch("dev").Pair("ia3da28"),
pID: gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/foo/boo"), Source: "github.com/das/foo"},
altPID: gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/foo/boo"), Source: "github.com/das/bar"},
want: "Warning: Unable to preserve imported lock master (bc29b4f) for github.com/foo/boo(github.com/das/foo). Locking in dev (ia3da28) for github.com/foo/boo(github.com/das/bar)",
name: "With a source",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
buf := &bytes.Buffer{}
ol := gps.SimpleLock{
gps.NewLockedProject(c.pID, c.oldVersion, nil),
}
l := gps.SimpleLock{
gps.NewLockedProject(c.altPID, c.currentVersion, nil),
}
log := log2.New(buf, "", 0)
feedback := NewBrokenImportFeedback(DiffLocks(&ol, &l))
feedback.LogFeedback(log)
got := strings.TrimSpace(buf.String())
if c.want != got {
t.Errorf("Feedbacks are not expected: \n\t(GOT) '%s'\n\t(WNT) '%s'", got, c.want)
}
})
}
}
|
feedback
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/feedback/lockdiff_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package feedback
import (
"bytes"
"testing"
"github.com/golang/dep/gps"
)
// mkPI creates a ProjectIdentifier with the ProjectRoot as the provided
// string, and the Source unset.
//
// Call normalize() on the returned value if you need the Source to be be
// equal to the ProjectRoot.
func mkPI(root string) gps.ProjectIdentifier {
return gps.ProjectIdentifier{
ProjectRoot: gps.ProjectRoot(root),
}
}
func TestStringDiff_NoChange(t *testing.T) {
diff := StringDiff{Previous: "foo", Current: "foo"}
want := "foo"
got := diff.String()
if got != want {
t.Fatalf("Expected '%s', got '%s'", want, got)
}
}
func TestStringDiff_Add(t *testing.T) {
diff := StringDiff{Current: "foo"}
got := diff.String()
if got != "+ foo" {
t.Fatalf("Expected '+ foo', got '%s'", got)
}
}
func TestStringDiff_Remove(t *testing.T) {
diff := StringDiff{Previous: "foo"}
want := "- foo"
got := diff.String()
if got != want {
t.Fatalf("Expected '%s', got '%s'", want, got)
}
}
func TestStringDiff_Modify(t *testing.T) {
diff := StringDiff{Previous: "foo", Current: "bar"}
want := "foo -> bar"
got := diff.String()
if got != want {
t.Fatalf("Expected '%s', got '%s'", want, got)
}
}
func TestDiffProjects_NoChange(t *testing.T) {
p1 := gps.NewLockedProject(mkPI("github.com/golang/dep/gps"), gps.NewVersion("v0.10.0"), []string{"gps"})
p2 := gps.NewLockedProject(mkPI("github.com/golang/dep/gps"), gps.NewVersion("v0.10.0"), []string{"gps"})
diff := DiffProjects(p1, p2)
if diff != nil {
t.Fatal("Expected the diff to be nil")
}
}
func TestDiffProjects_Modify(t *testing.T) {
p1 := gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewBranch("master").Pair("abc123"), []string{"baz", "qux"})
p2 := gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"},
gps.NewVersion("v1.0.0").Pair("def456"), []string{"baz", "derp"})
diff := DiffProjects(p1, p2)
if diff == nil {
t.Fatal("Expected the diff to be populated")
}
wantSource := "+ https://github.com/mcfork/gps.git"
gotSource := diff.Source.String()
if gotSource != wantSource {
t.Fatalf("Expected diff.Source to be '%s', got '%s'", wantSource, diff.Source)
}
wantVersion := "+ v1.0.0"
gotVersion := diff.Version.String()
if gotVersion != wantVersion {
t.Fatalf("Expected diff.Version to be '%s', got '%s'", wantVersion, gotVersion)
}
wantRevision := "abc123 -> def456"
gotRevision := diff.Revision.String()
if gotRevision != wantRevision {
t.Fatalf("Expected diff.Revision to be '%s', got '%s'", wantRevision, gotRevision)
}
wantBranch := "- master"
gotBranch := diff.Branch.String()
if gotBranch != wantBranch {
t.Fatalf("Expected diff.Branch to be '%s', got '%s'", wantBranch, gotBranch)
}
fmtPkgs := func(pkgs []StringDiff) string {
b := bytes.NewBufferString("[")
for _, pkg := range pkgs {
b.WriteString(pkg.String())
b.WriteString(",")
}
b.WriteString("]")
return b.String()
}
wantPackages := "[+ derp,- qux,]"
gotPackages := fmtPkgs(diff.Packages)
if gotPackages != wantPackages {
t.Fatalf("Expected diff.Packages to be '%s', got '%s'", wantPackages, gotPackages)
}
}
func TestDiffProjects_AddPackages(t *testing.T) {
p1 := gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewBranch("master").Pair("abc123"), []string{"foobar"})
p2 := gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"},
gps.NewVersion("v1.0.0").Pair("def456"), []string{"bazqux", "foobar", "zugzug"})
diff := DiffProjects(p1, p2)
if diff == nil {
t.Fatal("Expected the diff to be populated")
}
if len(diff.Packages) != 2 {
t.Fatalf("Expected diff.Packages to have 2 packages, got %d", len(diff.Packages))
}
want0 := "+ bazqux"
got0 := diff.Packages[0].String()
if got0 != want0 {
t.Fatalf("Expected diff.Packages[0] to contain %s, got %s", want0, got0)
}
want1 := "+ zugzug"
got1 := diff.Packages[1].String()
if got1 != want1 {
t.Fatalf("Expected diff.Packages[1] to contain %s, got %s", want1, got1)
}
}
func TestDiffProjects_RemovePackages(t *testing.T) {
p1 := gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewBranch("master").Pair("abc123"), []string{"athing", "foobar"})
p2 := gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/foo/bar", Source: "https://github.com/mcfork/gps.git"},
gps.NewVersion("v1.0.0").Pair("def456"), []string{"bazqux"})
diff := DiffProjects(p1, p2)
if diff == nil {
t.Fatal("Expected the diff to be populated")
}
if len(diff.Packages) > 3 {
t.Fatalf("Expected diff.Packages to have 3 packages, got %d", len(diff.Packages))
}
want0 := "- athing"
got0 := diff.Packages[0].String()
if got0 != want0 {
t.Fatalf("Expected diff.Packages[0] to contain %s, got %s", want0, got0)
}
// diff.Packages[1] is '+ bazqux'
want2 := "- foobar"
got2 := diff.Packages[2].String()
if got2 != want2 {
t.Fatalf("Expected diff.Packages[2] to contain %s, got %s", want2, got2)
}
}
func TestDiffLocks_NoChange(t *testing.T) {
l1 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
}
l2 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
}
diff := DiffLocks(l1, l2)
if diff != nil {
t.Fatal("Expected the diff to be nil")
}
}
func TestDiffLocks_AddProjects(t *testing.T) {
l1 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
}
l2 := gps.SimpleLock{
gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/baz/qux", Source: "https://github.com/mcfork/bazqux.git"},
gps.NewVersion("v0.5.0").Pair("def456"), []string{"p1", "p2"}),
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
gps.NewLockedProject(mkPI("github.com/zug/zug"), gps.NewVersion("v1.0.0"), nil),
}
diff := DiffLocks(l1, l2)
if diff == nil {
t.Fatal("Expected the diff to be populated")
}
if len(diff.Add) != 2 {
t.Fatalf("Expected diff.Add to have 2 projects, got %d", len(diff.Add))
}
want0 := "github.com/baz/qux"
got0 := string(diff.Add[0].Name)
if got0 != want0 {
t.Fatalf("Expected diff.Add[0] to contain %s, got %s", want0, got0)
}
want1 := "github.com/zug/zug"
got1 := string(diff.Add[1].Name)
if got1 != want1 {
t.Fatalf("Expected diff.Add[1] to contain %s, got %s", want1, got1)
}
add0 := diff.Add[0]
wantSource := "https://github.com/mcfork/bazqux.git"
gotSource := add0.Source.String()
if gotSource != wantSource {
t.Fatalf("Expected diff.Add[0].Source to be '%s', got '%s'", wantSource, add0.Source)
}
wantVersion := "v0.5.0"
gotVersion := add0.Version.String()
if gotVersion != wantVersion {
t.Fatalf("Expected diff.Add[0].Version to be '%s', got '%s'", wantVersion, gotVersion)
}
wantRevision := "def456"
gotRevision := add0.Revision.String()
if gotRevision != wantRevision {
t.Fatalf("Expected diff.Add[0].Revision to be '%s', got '%s'", wantRevision, gotRevision)
}
wantBranch := ""
gotBranch := add0.Branch.String()
if gotBranch != wantBranch {
t.Fatalf("Expected diff.Add[0].Branch to be '%s', got '%s'", wantBranch, gotBranch)
}
fmtPkgs := func(pkgs []StringDiff) string {
b := bytes.NewBufferString("[")
for _, pkg := range pkgs {
b.WriteString(pkg.String())
b.WriteString(",")
}
b.WriteString("]")
return b.String()
}
wantPackages := "[p1,p2,]"
gotPackages := fmtPkgs(add0.Packages)
if gotPackages != wantPackages {
t.Fatalf("Expected diff.Add[0].Packages to be '%s', got '%s'", wantPackages, gotPackages)
}
}
func TestDiffLocks_RemoveProjects(t *testing.T) {
l1 := gps.SimpleLock{
gps.NewLockedProject(gps.ProjectIdentifier{ProjectRoot: "github.com/a/thing", Source: "https://github.com/mcfork/athing.git"},
gps.NewBranch("master").Pair("def456"), []string{"p1", "p2"}),
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
}
l2 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/baz/qux"), gps.NewVersion("v1.0.0"), nil),
}
diff := DiffLocks(l1, l2)
if diff == nil {
t.Fatal("Expected the diff to be populated")
}
if len(diff.Remove) != 2 {
t.Fatalf("Expected diff.Remove to have 2 projects, got %d", len(diff.Remove))
}
want0 := "github.com/a/thing"
got0 := string(diff.Remove[0].Name)
if got0 != want0 {
t.Fatalf("Expected diff.Remove[0] to contain %s, got %s", want0, got0)
}
want1 := "github.com/foo/bar"
got1 := string(diff.Remove[1].Name)
if got1 != want1 {
t.Fatalf("Expected diff.Remove[1] to contain %s, got %s", want1, got1)
}
remove0 := diff.Remove[0]
wantSource := "https://github.com/mcfork/athing.git"
gotSource := remove0.Source.String()
if gotSource != wantSource {
t.Fatalf("Expected diff.Remove[0].Source to be '%s', got '%s'", wantSource, remove0.Source)
}
wantVersion := ""
gotVersion := remove0.Version.String()
if gotVersion != wantVersion {
t.Fatalf("Expected diff.Remove[0].Version to be '%s', got '%s'", wantVersion, gotVersion)
}
wantRevision := "def456"
gotRevision := remove0.Revision.String()
if gotRevision != wantRevision {
t.Fatalf("Expected diff.Remove[0].Revision to be '%s', got '%s'", wantRevision, gotRevision)
}
wantBranch := "master"
gotBranch := remove0.Branch.String()
if gotBranch != wantBranch {
t.Fatalf("Expected diff.Remove[0].Branch to be '%s', got '%s'", wantBranch, gotBranch)
}
fmtPkgs := func(pkgs []StringDiff) string {
b := bytes.NewBufferString("[")
for _, pkg := range pkgs {
b.WriteString(pkg.String())
b.WriteString(",")
}
b.WriteString("]")
return b.String()
}
wantPackages := "[p1,p2,]"
gotPackages := fmtPkgs(remove0.Packages)
if gotPackages != wantPackages {
t.Fatalf("Expected diff.Remove[0].Packages to be '%s', got '%s'", wantPackages, gotPackages)
}
}
func TestDiffLocks_ModifyProjects(t *testing.T) {
l1 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
gps.NewLockedProject(mkPI("github.com/foo/bu"), gps.NewVersion("v1.0.0"), nil),
gps.NewLockedProject(mkPI("github.com/zig/zag"), gps.NewVersion("v1.0.0"), nil),
}
l2 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/baz/qux"), gps.NewVersion("v1.0.0"), nil),
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v2.0.0"), nil),
gps.NewLockedProject(mkPI("github.com/zig/zag"), gps.NewVersion("v2.0.0"), nil),
gps.NewLockedProject(mkPI("github.com/zug/zug"), gps.NewVersion("v1.0.0"), nil),
}
diff := DiffLocks(l1, l2)
if diff == nil {
t.Fatal("Expected the diff to be populated")
}
if len(diff.Modify) != 2 {
t.Fatalf("Expected diff.Remove to have 2 projects, got %d", len(diff.Remove))
}
want0 := "github.com/foo/bar"
got0 := string(diff.Modify[0].Name)
if got0 != want0 {
t.Fatalf("Expected diff.Modify[0] to contain %s, got %s", want0, got0)
}
want1 := "github.com/zig/zag"
got1 := string(diff.Modify[1].Name)
if got1 != want1 {
t.Fatalf("Expected diff.Modify[1] to contain %s, got %s", want1, got1)
}
}
func TestDiffLocks_EmptyInitialLock(t *testing.T) {
l2 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
}
diff := DiffLocks(nil, l2)
if len(diff.Add) != 1 {
t.Fatalf("Expected diff.Add to contain 1 project, got %d", len(diff.Add))
}
}
func TestDiffLocks_EmptyFinalLock(t *testing.T) {
l1 := gps.SimpleLock{
gps.NewLockedProject(mkPI("github.com/foo/bar"), gps.NewVersion("v1.0.0"), nil),
}
diff := DiffLocks(l1, nil)
if len(diff.Remove) != 1 {
t.Fatalf("Expected diff.Remove to contain 1 project, got %d", len(diff.Remove))
}
}
func TestDiffLocks_EmptyLocks(t *testing.T) {
diff := DiffLocks(nil, nil)
if diff != nil {
t.Fatal("Expected the diff to be empty")
}
}
|
feedback
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/feedback/feedback.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package feedback
import (
"encoding/hex"
"fmt"
"log"
"github.com/golang/dep/gps"
)
const (
// ConsTypeConstraint represents a constraint
ConsTypeConstraint = "constraint"
// ConsTypeHint represents a constraint type hint
ConsTypeHint = "hint"
// DepTypeDirect represents a direct dependency
DepTypeDirect = "direct dep"
// DepTypeTransitive represents a transitive dependency,
// or a dependency of a dependency
DepTypeTransitive = "transitive dep"
// DepTypeImported represents a dependency imported by an external tool
DepTypeImported = "imported dep"
)
// ConstraintFeedback holds project constraint feedback data
type ConstraintFeedback struct {
Constraint, LockedVersion, Revision, ConstraintType, DependencyType, ProjectPath string
}
// NewConstraintFeedback builds a feedback entry for a constraint in the manifest.
func NewConstraintFeedback(pc gps.ProjectConstraint, depType string) *ConstraintFeedback {
cf := &ConstraintFeedback{
Constraint: pc.Constraint.String(),
ProjectPath: string(pc.Ident.ProjectRoot),
DependencyType: depType,
}
if _, ok := pc.Constraint.(gps.Revision); ok {
cf.ConstraintType = ConsTypeHint
} else {
cf.ConstraintType = ConsTypeConstraint
}
return cf
}
// NewLockedProjectFeedback builds a feedback entry for a project in the lock.
func NewLockedProjectFeedback(lp gps.LockedProject, depType string) *ConstraintFeedback {
cf := &ConstraintFeedback{
ProjectPath: string(lp.Ident().ProjectRoot),
DependencyType: depType,
}
switch vt := lp.Version().(type) {
case gps.PairedVersion:
cf.LockedVersion = vt.String()
cf.Revision = vt.Revision().String()
case gps.UnpairedVersion: // Logically this should never occur, but handle for completeness sake
cf.LockedVersion = vt.String()
case gps.Revision:
cf.Revision = vt.String()
}
return cf
}
// LogFeedback logs feedback on changes made to the manifest or lock.
func (cf ConstraintFeedback) LogFeedback(logger *log.Logger) {
if cf.Constraint != "" {
logger.Printf(" %v", GetUsingFeedback(cf.Constraint, cf.ConstraintType, cf.DependencyType, cf.ProjectPath))
}
if cf.Revision != "" {
logger.Printf(" %v", GetLockingFeedback(cf.LockedVersion, cf.Revision, cf.DependencyType, cf.ProjectPath))
}
}
type brokenImport interface {
String() string
}
type modifiedImport struct {
source, branch, revision, version *StringDiff
projectPath string
}
func (mi modifiedImport) String() string {
var pv string
var pr string
pp := mi.projectPath
var cr string
var cv string
cp := ""
if mi.revision != nil {
pr = fmt.Sprintf("(%s)", trimSHA(mi.revision.Previous))
cr = fmt.Sprintf("(%s)", trimSHA(mi.revision.Current))
}
if mi.version != nil {
pv = mi.version.Previous
cv = mi.version.Current
} else if mi.branch != nil {
pv = mi.branch.Previous
cv = mi.branch.Current
}
if mi.source != nil {
pp = fmt.Sprintf("%s(%s)", mi.projectPath, mi.source.Previous)
cp = fmt.Sprintf(" for %s(%s)", mi.projectPath, mi.source.Current)
}
// Warning: Unable to preserve imported lock VERSION/BRANCH (REV) for PROJECT(SOURCE). Locking in VERSION/BRANCH (REV) for PROJECT(SOURCE)
return fmt.Sprintf("%v %s for %s. Locking in %v %s%s", pv, pr, pp, cv, cr, cp)
}
type removedImport struct {
source, branch, revision, version *StringDiff
projectPath string
}
func (ri removedImport) String() string {
var pr string
var pv string
pp := ri.projectPath
if ri.revision != nil {
pr = fmt.Sprintf("(%s)", trimSHA(ri.revision.Previous))
}
if ri.version != nil {
pv = ri.version.Previous
} else if ri.branch != nil {
pv = ri.branch.Previous
}
if ri.source != nil {
pp = fmt.Sprintf("%s(%s)", ri.projectPath, ri.source.Previous)
}
// Warning: Unable to preserve imported lock VERSION/BRANCH (REV) for PROJECT(SOURCE). Locking in VERSION/BRANCH (REV) for PROJECT(SOURCE)
return fmt.Sprintf("%v %s for %s. The project was removed from the lock because it is not used.", pv, pr, pp)
}
// BrokenImportFeedback holds information on changes to locks pre- and post- solving.
type BrokenImportFeedback struct {
brokenImports []brokenImport
}
// NewBrokenImportFeedback builds a feedback entry that compares an initially
// imported, unsolved lock to the same lock after it has been solved.
func NewBrokenImportFeedback(ld *LockDiff) *BrokenImportFeedback {
bi := &BrokenImportFeedback{}
if ld == nil {
return bi
}
for _, lpd := range ld.Modify {
if lpd.Branch == nil && lpd.Revision == nil && lpd.Source == nil && lpd.Version == nil {
continue
}
bi.brokenImports = append(bi.brokenImports, modifiedImport{
projectPath: string(lpd.Name),
source: lpd.Source,
branch: lpd.Branch,
revision: lpd.Revision,
version: lpd.Version,
})
}
for _, lpd := range ld.Remove {
bi.brokenImports = append(bi.brokenImports, removedImport{
projectPath: string(lpd.Name),
source: lpd.Source,
branch: lpd.Branch,
revision: lpd.Revision,
version: lpd.Version,
})
}
return bi
}
// LogFeedback logs a warning for all changes between the initially imported and post- solve locks
func (b BrokenImportFeedback) LogFeedback(logger *log.Logger) {
for _, bi := range b.brokenImports {
logger.Printf("Warning: Unable to preserve imported lock %v\n", bi)
}
}
// GetUsingFeedback returns a dependency "using" feedback message. For example:
//
// Using ^1.0.0 as constraint for direct dep github.com/foo/bar
// Using 1b8edb3 as hint for direct dep github.com/bar/baz
func GetUsingFeedback(version, consType, depType, projectPath string) string {
if depType == DepTypeImported {
return fmt.Sprintf("Using %s as initial %s for %s %s", version, consType, depType, projectPath)
}
return fmt.Sprintf("Using %s as %s for %s %s", version, consType, depType, projectPath)
}
// GetLockingFeedback returns a dependency "locking" feedback message. For
// example:
//
// Locking in v1.1.4 (bc29b4f) for direct dep github.com/foo/bar
// Locking in master (436f39d) for transitive dep github.com/baz/qux
func GetLockingFeedback(version, revision, depType, projectPath string) string {
revision = trimSHA(revision)
if depType == DepTypeImported {
if version == "" {
version = "*"
}
return fmt.Sprintf("Trying %s (%s) as initial lock for %s %s", version, revision, depType, projectPath)
}
return fmt.Sprintf("Locking in %s (%s) for %s %s", version, revision, depType, projectPath)
}
// trimSHA checks if revision is a valid SHA1 digest and trims to 7 characters.
func trimSHA(revision string) string {
if len(revision) == 40 {
if _, err := hex.DecodeString(revision); err == nil {
// Valid SHA1 digest
revision = revision[0:7]
}
}
return revision
}
|
feedback
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/feedback/lockdiff.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package feedback
import (
"fmt"
"sort"
"strings"
"github.com/golang/dep/gps"
)
// StringDiff represents a modified string value.
// * Added: Previous = nil, Current != nil
// * Deleted: Previous != nil, Current = nil
// * Modified: Previous != nil, Current != nil
// * No Change: Previous = Current, or a nil pointer
type StringDiff struct {
Previous string
Current string
}
func (diff *StringDiff) String() string {
if diff == nil {
return ""
}
if diff.Previous == "" && diff.Current != "" {
return fmt.Sprintf("+ %s", diff.Current)
}
if diff.Previous != "" && diff.Current == "" {
return fmt.Sprintf("- %s", diff.Previous)
}
if diff.Previous != diff.Current {
return fmt.Sprintf("%s -> %s", diff.Previous, diff.Current)
}
return diff.Current
}
// LockDiff is the set of differences between an existing lock file and an updated lock file.
// Fields are only populated when there is a difference, otherwise they are empty.
type LockDiff struct {
Add []LockedProjectDiff
Remove []LockedProjectDiff
Modify []LockedProjectDiff
}
// LockedProjectDiff contains the before and after snapshot of a project reference.
// Fields are only populated when there is a difference, otherwise they are empty.
type LockedProjectDiff struct {
Name gps.ProjectRoot
Source *StringDiff
Version *StringDiff
Branch *StringDiff
Revision *StringDiff
Packages []StringDiff
}
// DiffLocks compares two locks and identifies the differences between them.
// Returns nil if there are no differences.
func DiffLocks(l1, l2 gps.Lock) *LockDiff {
// Default nil locks to empty locks, so that we can still generate a diff
if l1 == nil {
l1 = gps.SimpleLock{}
}
if l2 == nil {
l2 = gps.SimpleLock{}
}
p1, p2 := l1.Projects(), l2.Projects()
p1 = sortLockedProjects(p1)
p2 = sortLockedProjects(p2)
diff := LockDiff{}
var i2next int
for i1 := 0; i1 < len(p1); i1++ {
lp1 := p1[i1]
pr1 := lp1.Ident().ProjectRoot
var matched bool
for i2 := i2next; i2 < len(p2); i2++ {
lp2 := p2[i2]
pr2 := lp2.Ident().ProjectRoot
switch strings.Compare(string(pr1), string(pr2)) {
case 0: // Found a matching project
matched = true
pdiff := DiffProjects(lp1, lp2)
if pdiff != nil {
diff.Modify = append(diff.Modify, *pdiff)
}
i2next = i2 + 1 // Don't evaluate to this again
case +1: // Found a new project
add := buildLockedProjectDiff(lp2)
diff.Add = append(diff.Add, add)
i2next = i2 + 1 // Don't evaluate to this again
continue // Keep looking for a matching project
case -1: // Project has been removed, handled below
continue
}
break // Done evaluating this project, move onto the next
}
if !matched {
remove := buildLockedProjectDiff(lp1)
diff.Remove = append(diff.Remove, remove)
}
}
// Anything that still hasn't been evaluated are adds
for i2 := i2next; i2 < len(p2); i2++ {
lp2 := p2[i2]
add := buildLockedProjectDiff(lp2)
diff.Add = append(diff.Add, add)
}
if len(diff.Add) == 0 && len(diff.Remove) == 0 && len(diff.Modify) == 0 {
return nil // The locks are the equivalent
}
return &diff
}
func buildLockedProjectDiff(lp gps.LockedProject) LockedProjectDiff {
s2 := lp.Ident().Source
r2, b2, v2 := gps.VersionComponentStrings(lp.Version())
var rev, version, branch, source *StringDiff
if s2 != "" {
source = &StringDiff{Previous: s2, Current: s2}
}
if r2 != "" {
rev = &StringDiff{Previous: r2, Current: r2}
}
if b2 != "" {
branch = &StringDiff{Previous: b2, Current: b2}
}
if v2 != "" {
version = &StringDiff{Previous: v2, Current: v2}
}
add := LockedProjectDiff{
Name: lp.Ident().ProjectRoot,
Source: source,
Revision: rev,
Version: version,
Branch: branch,
Packages: make([]StringDiff, len(lp.Packages())),
}
for i, pkg := range lp.Packages() {
add.Packages[i] = StringDiff{Previous: pkg, Current: pkg}
}
return add
}
// DiffProjects compares two projects and identifies the differences between them.
// Returns nil if there are no differences.
func DiffProjects(lp1, lp2 gps.LockedProject) *LockedProjectDiff {
diff := LockedProjectDiff{Name: lp1.Ident().ProjectRoot}
s1 := lp1.Ident().Source
s2 := lp2.Ident().Source
if s1 != s2 {
diff.Source = &StringDiff{Previous: s1, Current: s2}
}
r1, b1, v1 := gps.VersionComponentStrings(lp1.Version())
r2, b2, v2 := gps.VersionComponentStrings(lp2.Version())
if r1 != r2 {
diff.Revision = &StringDiff{Previous: r1, Current: r2}
}
if b1 != b2 {
diff.Branch = &StringDiff{Previous: b1, Current: b2}
}
if v1 != v2 {
diff.Version = &StringDiff{Previous: v1, Current: v2}
}
p1 := lp1.Packages()
p2 := lp2.Packages()
if !sort.StringsAreSorted(p1) {
p1 = make([]string, len(p1))
copy(p1, lp1.Packages())
sort.Strings(p1)
}
if !sort.StringsAreSorted(p2) {
p2 = make([]string, len(p2))
copy(p2, lp2.Packages())
sort.Strings(p2)
}
var i2next int
for i1 := 0; i1 < len(p1); i1++ {
pkg1 := p1[i1]
var matched bool
for i2 := i2next; i2 < len(p2); i2++ {
pkg2 := p2[i2]
switch strings.Compare(pkg1, pkg2) {
case 0: // Found matching package
matched = true
i2next = i2 + 1 // Don't evaluate to this again
case +1: // Found a new package
add := StringDiff{Current: pkg2}
diff.Packages = append(diff.Packages, add)
i2next = i2 + 1 // Don't evaluate to this again
continue // Keep looking for a match
case -1: // Package has been removed (handled below)
continue
}
break // Done evaluating this package, move onto the next
}
if !matched {
diff.Packages = append(diff.Packages, StringDiff{Previous: pkg1})
}
}
// Anything that still hasn't been evaluated are adds
for i2 := i2next; i2 < len(p2); i2++ {
pkg2 := p2[i2]
add := StringDiff{Current: pkg2}
diff.Packages = append(diff.Packages, add)
}
if diff.Source == nil && diff.Version == nil && diff.Revision == nil && len(diff.Packages) == 0 {
return nil // The projects are equivalent
}
return &diff
}
// sortLockedProjects returns a sorted copy of lps, or itself if already sorted.
func sortLockedProjects(lps []gps.LockedProject) []gps.LockedProject {
if len(lps) <= 1 || sort.SliceIsSorted(lps, func(i, j int) bool {
return lps[i].Ident().Less(lps[j].Ident())
}) {
return lps
}
cp := make([]gps.LockedProject, len(lps))
copy(cp, lps)
sort.Slice(cp, func(i, j int) bool {
return cp[i].Ident().Less(cp[j].Ident())
})
return cp
}
|
fs
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/fs/rename.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !windows
package fs
import (
"os"
"syscall"
"github.com/pkg/errors"
)
// renameFallback attempts to determine the appropriate fallback to failed rename
// operation depending on the resulting error.
func renameFallback(err error, src, dst string) error {
// Rename may fail if src and dst are on different devices; fall back to
// copy if we detect that case. syscall.EXDEV is the common name for the
// cross device link error which has varying output text across different
// operating systems.
terr, ok := err.(*os.LinkError)
if !ok {
return err
} else if terr.Err != syscall.EXDEV {
return errors.Wrapf(terr, "link error: cannot rename %s to %s", src, dst)
}
return renameByCopy(src, dst)
}
|
fs
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/fs/fs.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package fs
import (
"io"
"io/ioutil"
"os"
"path/filepath"
"runtime"
"strings"
"syscall"
"unicode"
"github.com/pkg/errors"
)
// HasFilepathPrefix will determine if "path" starts with "prefix" from
// the point of view of a filesystem.
//
// Unlike filepath.HasPrefix, this function is path-aware, meaning that
// it knows that two directories /foo and /foobar are not the same
// thing, and therefore HasFilepathPrefix("/foobar", "/foo") will return
// false.
//
// This function also handles the case where the involved filesystems
// are case-insensitive, meaning /foo/bar and /Foo/Bar correspond to the
// same file. In that situation HasFilepathPrefix("/Foo/Bar", "/foo")
// will return true. The implementation is *not* OS-specific, so a FAT32
// filesystem mounted on Linux will be handled correctly.
func HasFilepathPrefix(path, prefix string) (bool, error) {
// this function is more convoluted then ideal due to need for special
// handling of volume name/drive letter on Windows. vnPath and vnPrefix
// are first compared, and then used to initialize initial values of p and
// d which will be appended to for incremental checks using
// IsCaseSensitiveFilesystem and then equality.
// no need to check IsCaseSensitiveFilesystem because VolumeName return
// empty string on all non-Windows machines
vnPath := strings.ToLower(filepath.VolumeName(path))
vnPrefix := strings.ToLower(filepath.VolumeName(prefix))
if vnPath != vnPrefix {
return false, nil
}
// Because filepath.Join("c:","dir") returns "c:dir", we have to manually
// add path separator to drive letters. Also, we need to set the path root
// on *nix systems, since filepath.Join("", "dir") returns a relative path.
vnPath += string(os.PathSeparator)
vnPrefix += string(os.PathSeparator)
var dn string
if isDir, err := IsDir(path); err != nil {
return false, errors.Wrap(err, "failed to check filepath prefix")
} else if isDir {
dn = path
} else {
dn = filepath.Dir(path)
}
dn = filepath.Clean(dn)
prefix = filepath.Clean(prefix)
// [1:] in the lines below eliminates empty string on *nix and volume name on Windows
dirs := strings.Split(dn, string(os.PathSeparator))[1:]
prefixes := strings.Split(prefix, string(os.PathSeparator))[1:]
if len(prefixes) > len(dirs) {
return false, nil
}
// d,p are initialized with "/" on *nix and volume name on Windows
d := vnPath
p := vnPrefix
for i := range prefixes {
// need to test each component of the path for
// case-sensitiveness because on Unix we could have
// something like ext4 filesystem mounted on FAT
// mountpoint, mounted on ext4 filesystem, i.e. the
// problematic filesystem is not the last one.
caseSensitive, err := IsCaseSensitiveFilesystem(filepath.Join(d, dirs[i]))
if err != nil {
return false, errors.Wrap(err, "failed to check filepath prefix")
}
if caseSensitive {
d = filepath.Join(d, dirs[i])
p = filepath.Join(p, prefixes[i])
} else {
d = filepath.Join(d, strings.ToLower(dirs[i]))
p = filepath.Join(p, strings.ToLower(prefixes[i]))
}
if p != d {
return false, nil
}
}
return true, nil
}
// EquivalentPaths compares the paths passed to check if they are equivalent.
// It respects the case-sensitivity of the underlying filesysyems.
func EquivalentPaths(p1, p2 string) (bool, error) {
p1 = filepath.Clean(p1)
p2 = filepath.Clean(p2)
fi1, err := os.Stat(p1)
if err != nil {
return false, errors.Wrapf(err, "could not check for path equivalence")
}
fi2, err := os.Stat(p2)
if err != nil {
return false, errors.Wrapf(err, "could not check for path equivalence")
}
p1Filename, p2Filename := "", ""
if !fi1.IsDir() {
p1, p1Filename = filepath.Split(p1)
}
if !fi2.IsDir() {
p2, p2Filename = filepath.Split(p2)
}
if isPrefix1, err := HasFilepathPrefix(p1, p2); err != nil {
return false, errors.Wrap(err, "failed to check for path equivalence")
} else if isPrefix2, err := HasFilepathPrefix(p2, p1); err != nil {
return false, errors.Wrap(err, "failed to check for path equivalence")
} else if !isPrefix1 || !isPrefix2 {
return false, nil
}
if p1Filename != "" || p2Filename != "" {
caseSensitive, err := IsCaseSensitiveFilesystem(filepath.Join(p1, p1Filename))
if err != nil {
return false, errors.Wrap(err, "could not check for filesystem case-sensitivity")
}
if caseSensitive {
if p1Filename != p2Filename {
return false, nil
}
} else {
if !strings.EqualFold(p1Filename, p2Filename) {
return false, nil
}
}
}
return true, nil
}
// RenameWithFallback attempts to rename a file or directory, but falls back to
// copying in the event of a cross-device link error. If the fallback copy
// succeeds, src is still removed, emulating normal rename behavior.
func RenameWithFallback(src, dst string) error {
_, err := os.Stat(src)
if err != nil {
return errors.Wrapf(err, "cannot stat %s", src)
}
err = os.Rename(src, dst)
if err == nil {
return nil
}
return renameFallback(err, src, dst)
}
// renameByCopy attempts to rename a file or directory by copying it to the
// destination and then removing the src thus emulating the rename behavior.
func renameByCopy(src, dst string) error {
var cerr error
if dir, _ := IsDir(src); dir {
cerr = CopyDir(src, dst)
if cerr != nil {
cerr = errors.Wrap(cerr, "copying directory failed")
}
} else {
cerr = copyFile(src, dst)
if cerr != nil {
cerr = errors.Wrap(cerr, "copying file failed")
}
}
if cerr != nil {
return errors.Wrapf(cerr, "rename fallback failed: cannot rename %s to %s", src, dst)
}
return errors.Wrapf(os.RemoveAll(src), "cannot delete %s", src)
}
// IsCaseSensitiveFilesystem determines if the filesystem where dir
// exists is case sensitive or not.
//
// CAVEAT: this function works by taking the last component of the given
// path and flipping the case of the first letter for which case
// flipping is a reversible operation (/foo/Bar → /foo/bar), then
// testing for the existence of the new filename. There are two
// possibilities:
//
// 1. The alternate filename does not exist. We can conclude that the
// filesystem is case sensitive.
//
// 2. The filename happens to exist. We have to test if the two files
// are the same file (case insensitive file system) or different ones
// (case sensitive filesystem).
//
// If the input directory is such that the last component is composed
// exclusively of case-less codepoints (e.g. numbers), this function will
// return false.
func IsCaseSensitiveFilesystem(dir string) (bool, error) {
alt := filepath.Join(filepath.Dir(dir), genTestFilename(filepath.Base(dir)))
dInfo, err := os.Stat(dir)
if err != nil {
return false, errors.Wrap(err, "could not determine the case-sensitivity of the filesystem")
}
aInfo, err := os.Stat(alt)
if err != nil {
// If the file doesn't exists, assume we are on a case-sensitive filesystem.
if os.IsNotExist(err) {
return true, nil
}
return false, errors.Wrap(err, "could not determine the case-sensitivity of the filesystem")
}
return !os.SameFile(dInfo, aInfo), nil
}
// genTestFilename returns a string with at most one rune case-flipped.
//
// The transformation is applied only to the first rune that can be
// reversibly case-flipped, meaning:
//
// * A lowercase rune for which it's true that lower(upper(r)) == r
// * An uppercase rune for which it's true that upper(lower(r)) == r
//
// All the other runes are left intact.
func genTestFilename(str string) string {
flip := true
return strings.Map(func(r rune) rune {
if flip {
if unicode.IsLower(r) {
u := unicode.ToUpper(r)
if unicode.ToLower(u) == r {
r = u
flip = false
}
} else if unicode.IsUpper(r) {
l := unicode.ToLower(r)
if unicode.ToUpper(l) == r {
r = l
flip = false
}
}
}
return r
}, str)
}
var errPathNotDir = errors.New("given path is not a directory")
// ReadActualFilenames is used to determine the actual file names in given directory.
//
// On case sensitive file systems like ext4, it will check if those files exist using
// `os.Stat` and return a map with key and value as filenames which exist in the folder.
//
// Otherwise, it reads the contents of the directory and returns a map which has the
// given file name as the key and actual filename as the value(if it was found).
func ReadActualFilenames(dirPath string, names []string) (map[string]string, error) {
actualFilenames := make(map[string]string, len(names))
if len(names) == 0 {
// This isn't expected to happen for current usage. Adding edge case handling,
// as it may be useful in future.
return actualFilenames, nil
}
// First, check that the given path is valid and it is a directory
dirStat, err := os.Stat(dirPath)
if err != nil {
return nil, errors.Wrap(err, "failed to read actual filenames")
}
if !dirStat.IsDir() {
return nil, errPathNotDir
}
// Ideally, we would use `os.Stat` for getting the actual file names but that returns
// the name we passed in as an argument and not the actual filename. So we are forced
// to list the directory contents and check against that. Since this check is costly,
// we do it only if absolutely necessary.
caseSensitive, err := IsCaseSensitiveFilesystem(dirPath)
if err != nil {
return nil, errors.Wrap(err, "failed to read actual filenames")
}
if caseSensitive {
// There will be no difference between actual filename and given filename. So
// just check if those files exist.
for _, name := range names {
_, err := os.Stat(filepath.Join(dirPath, name))
if err == nil {
actualFilenames[name] = name
} else if !os.IsNotExist(err) {
// Some unexpected err, wrap and return it.
return nil, errors.Wrap(err, "failed to read actual filenames")
}
}
return actualFilenames, nil
}
dir, err := os.Open(dirPath)
if err != nil {
return nil, errors.Wrap(err, "failed to read actual filenames")
}
defer dir.Close()
// Pass -1 to read all filenames in directory
filenames, err := dir.Readdirnames(-1)
if err != nil {
return nil, errors.Wrap(err, "failed to read actual filenames")
}
// namesMap holds the mapping from lowercase name to search name. Using this, we can
// avoid repeatedly looping through names.
namesMap := make(map[string]string, len(names))
for _, name := range names {
namesMap[strings.ToLower(name)] = name
}
for _, filename := range filenames {
searchName, ok := namesMap[strings.ToLower(filename)]
if ok {
// We are interested in this file, case insensitive match successful.
actualFilenames[searchName] = filename
if len(actualFilenames) == len(names) {
// We found all that we were looking for.
return actualFilenames, nil
}
}
}
return actualFilenames, nil
}
var (
errSrcNotDir = errors.New("source is not a directory")
errDstExist = errors.New("destination already exists")
)
// CopyDir recursively copies a directory tree, attempting to preserve permissions.
// Source directory must exist, destination directory must *not* exist.
func CopyDir(src, dst string) error {
src = filepath.Clean(src)
dst = filepath.Clean(dst)
// We use os.Lstat() here to ensure we don't fall in a loop where a symlink
// actually links to a one of its parent directories.
fi, err := os.Lstat(src)
if err != nil {
return err
}
if !fi.IsDir() {
return errSrcNotDir
}
_, err = os.Stat(dst)
if err != nil && !os.IsNotExist(err) {
return err
}
if err == nil {
return errDstExist
}
if err = os.MkdirAll(dst, fi.Mode()); err != nil {
return errors.Wrapf(err, "cannot mkdir %s", dst)
}
entries, err := ioutil.ReadDir(src)
if err != nil {
return errors.Wrapf(err, "cannot read directory %s", dst)
}
for _, entry := range entries {
srcPath := filepath.Join(src, entry.Name())
dstPath := filepath.Join(dst, entry.Name())
if entry.IsDir() {
if err = CopyDir(srcPath, dstPath); err != nil {
return errors.Wrap(err, "copying directory failed")
}
} else {
// This will include symlinks, which is what we want when
// copying things.
if err = copyFile(srcPath, dstPath); err != nil {
return errors.Wrap(err, "copying file failed")
}
}
}
return nil
}
// copyFile copies the contents of the file named src to the file named
// by dst. The file will be created if it does not already exist. If the
// destination file exists, all its contents will be replaced by the contents
// of the source file. The file mode will be copied from the source.
func copyFile(src, dst string) (err error) {
if sym, err := IsSymlink(src); err != nil {
return errors.Wrap(err, "symlink check failed")
} else if sym {
if err := cloneSymlink(src, dst); err != nil {
if runtime.GOOS == "windows" {
// If cloning the symlink fails on Windows because the user
// does not have the required privileges, ignore the error and
// fall back to copying the file contents.
//
// ERROR_PRIVILEGE_NOT_HELD is 1314 (0x522):
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms681385(v=vs.85).aspx
if lerr, ok := err.(*os.LinkError); ok && lerr.Err != syscall.Errno(1314) {
return err
}
} else {
return err
}
} else {
return nil
}
}
in, err := os.Open(src)
if err != nil {
return
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return
}
if _, err = io.Copy(out, in); err != nil {
out.Close()
return
}
// Check for write errors on Close
if err = out.Close(); err != nil {
return
}
si, err := os.Stat(src)
if err != nil {
return
}
// Temporary fix for Go < 1.9
//
// See: https://github.com/golang/dep/issues/774
// and https://github.com/golang/go/issues/20829
if runtime.GOOS == "windows" {
dst = fixLongPath(dst)
}
err = os.Chmod(dst, si.Mode())
return
}
// cloneSymlink will create a new symlink that points to the resolved path of sl.
// If sl is a relative symlink, dst will also be a relative symlink.
func cloneSymlink(sl, dst string) error {
resolved, err := os.Readlink(sl)
if err != nil {
return err
}
return os.Symlink(resolved, dst)
}
// EnsureDir tries to ensure that a directory is present at the given path. It first
// checks if the directory already exists at the given path. If there isn't one, it tries
// to create it with the given permissions. However, it does not try to create the
// directory recursively.
func EnsureDir(path string, perm os.FileMode) error {
_, err := IsDir(path)
if os.IsNotExist(err) {
err = os.Mkdir(path, perm)
if err != nil {
return errors.Wrapf(err, "failed to ensure directory at %q", path)
}
}
return err
}
// IsDir determines is the path given is a directory or not.
func IsDir(name string) (bool, error) {
fi, err := os.Stat(name)
if err != nil {
return false, err
}
if !fi.IsDir() {
return false, errors.Errorf("%q is not a directory", name)
}
return true, nil
}
// IsNonEmptyDir determines if the path given is a non-empty directory or not.
func IsNonEmptyDir(name string) (bool, error) {
isDir, err := IsDir(name)
if err != nil && !os.IsNotExist(err) {
return false, err
} else if !isDir {
return false, nil
}
// Get file descriptor
f, err := os.Open(name)
if err != nil {
return false, err
}
defer f.Close()
// Query only 1 child. EOF if no children.
_, err = f.Readdirnames(1)
switch err {
case io.EOF:
return false, nil
case nil:
return true, nil
default:
return false, err
}
}
// IsRegular determines if the path given is a regular file or not.
func IsRegular(name string) (bool, error) {
fi, err := os.Stat(name)
if os.IsNotExist(err) {
return false, nil
}
if err != nil {
return false, err
}
mode := fi.Mode()
if mode&os.ModeType != 0 {
return false, errors.Errorf("%q is a %v, expected a file", name, mode)
}
return true, nil
}
// IsSymlink determines if the given path is a symbolic link.
func IsSymlink(path string) (bool, error) {
l, err := os.Lstat(path)
if err != nil {
return false, err
}
return l.Mode()&os.ModeSymlink == os.ModeSymlink, nil
}
// fixLongPath returns the extended-length (\\?\-prefixed) form of
// path when needed, in order to avoid the default 260 character file
// path limit imposed by Windows. If path is not easily converted to
// the extended-length form (for example, if path is a relative path
// or contains .. elements), or is short enough, fixLongPath returns
// path unmodified.
//
// See https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx#maxpath
func fixLongPath(path string) string {
// Do nothing (and don't allocate) if the path is "short".
// Empirically (at least on the Windows Server 2013 builder),
// the kernel is arbitrarily okay with < 248 bytes. That
// matches what the docs above say:
// "When using an API to create a directory, the specified
// path cannot be so long that you cannot append an 8.3 file
// name (that is, the directory name cannot exceed MAX_PATH
// minus 12)." Since MAX_PATH is 260, 260 - 12 = 248.
//
// The MSDN docs appear to say that a normal path that is 248 bytes long
// will work; empirically the path must be less then 248 bytes long.
if len(path) < 248 {
// Don't fix. (This is how Go 1.7 and earlier worked,
// not automatically generating the \\?\ form)
return path
}
// The extended form begins with \\?\, as in
// \\?\c:\windows\foo.txt or \\?\UNC\server\share\foo.txt.
// The extended form disables evaluation of . and .. path
// elements and disables the interpretation of / as equivalent
// to \. The conversion here rewrites / to \ and elides
// . elements as well as trailing or duplicate separators. For
// simplicity it avoids the conversion entirely for relative
// paths or paths containing .. elements. For now,
// \\server\share paths are not converted to
// \\?\UNC\server\share paths because the rules for doing so
// are less well-specified.
if len(path) >= 2 && path[:2] == `\\` {
// Don't canonicalize UNC paths.
return path
}
if !isAbs(path) {
// Relative path
return path
}
const prefix = `\\?`
pathbuf := make([]byte, len(prefix)+len(path)+len(`\`))
copy(pathbuf, prefix)
n := len(path)
r, w := 0, len(prefix)
for r < n {
switch {
case os.IsPathSeparator(path[r]):
// empty block
r++
case path[r] == '.' && (r+1 == n || os.IsPathSeparator(path[r+1])):
// /./
r++
case r+1 < n && path[r] == '.' && path[r+1] == '.' && (r+2 == n || os.IsPathSeparator(path[r+2])):
// /../ is currently unhandled
return path
default:
pathbuf[w] = '\\'
w++
for ; r < n && !os.IsPathSeparator(path[r]); r++ {
pathbuf[w] = path[r]
w++
}
}
}
// A drive's root directory needs a trailing \
if w == len(`\\?\c:`) {
pathbuf[w] = '\\'
w++
}
return string(pathbuf[:w])
}
func isAbs(path string) (b bool) {
v := volumeName(path)
if v == "" {
return false
}
path = path[len(v):]
if path == "" {
return false
}
return os.IsPathSeparator(path[0])
}
func volumeName(path string) (v string) {
if len(path) < 2 {
return ""
}
// with drive letter
c := path[0]
if path[1] == ':' &&
('0' <= c && c <= '9' || 'a' <= c && c <= 'z' ||
'A' <= c && c <= 'Z') {
return path[:2]
}
// is it UNC
if l := len(path); l >= 5 && os.IsPathSeparator(path[0]) && os.IsPathSeparator(path[1]) &&
!os.IsPathSeparator(path[2]) && path[2] != '.' {
// first, leading `\\` and next shouldn't be `\`. its server name.
for n := 3; n < l-1; n++ {
// second, next '\' shouldn't be repeated.
if os.IsPathSeparator(path[n]) {
n++
// third, following something characters. its share name.
if !os.IsPathSeparator(path[n]) {
if path[n] == '.' {
break
}
for ; n < l; n++ {
if os.IsPathSeparator(path[n]) {
break
}
}
return path[:n]
}
break
}
}
}
return ""
}
|
fs
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/fs/rename_windows.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build windows
package fs
import (
"os"
"syscall"
"github.com/pkg/errors"
)
// renameFallback attempts to determine the appropriate fallback to failed rename
// operation depending on the resulting error.
func renameFallback(err error, src, dst string) error {
// Rename may fail if src and dst are on different devices; fall back to
// copy if we detect that case. syscall.EXDEV is the common name for the
// cross device link error which has varying output text across different
// operating systems.
terr, ok := err.(*os.LinkError)
if !ok {
return err
}
if terr.Err != syscall.EXDEV {
// In windows it can drop down to an operating system call that
// returns an operating system error with a different number and
// message. Checking for that as a fall back.
noerr, ok := terr.Err.(syscall.Errno)
// 0x11 (ERROR_NOT_SAME_DEVICE) is the windows error.
// See https://msdn.microsoft.com/en-us/library/cc231199.aspx
if ok && noerr != 0x11 {
return errors.Wrapf(terr, "link error: cannot rename %s to %s", src, dst)
}
}
return renameByCopy(src, dst)
}
|
fs
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/fs/fs_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package fs
import (
"io/ioutil"
"os"
"path/filepath"
"reflect"
"runtime"
"strings"
"testing"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
// This function tests HadFilepathPrefix. It should test it on both case
// sensitive and insensitive situations. However, the only reliable way to test
// case-insensitive behaviour is if using case-insensitive filesystem. This
// cannot be guaranteed in an automated test. Therefore, the behaviour of the
// tests is not to test case sensitivity on *nix and to assume that Windows is
// case-insensitive. Please see link below for some background.
//
// https://superuser.com/questions/266110/how-do-you-make-windows-7-fully-case-sensitive-with-respect-to-the-filesystem
//
// NOTE: NTFS can be made case-sensitive. However many Windows programs,
// including Windows Explorer do not handle gracefully multiple files that
// differ only in capitalization. It is possible that this can cause these tests
// to fail on some setups.
func TestHasFilepathPrefix(t *testing.T) {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
// dir2 is the same as dir but with different capitalization on Windows to
// test case insensitivity
var dir2 string
if runtime.GOOS == "windows" {
dir = strings.ToLower(dir)
dir2 = strings.ToUpper(dir)
} else {
dir2 = dir
}
// For testing trailing and repeated separators
sep := string(os.PathSeparator)
cases := []struct {
path string
prefix string
want bool
}{
{filepath.Join(dir, "a", "b"), filepath.Join(dir2), true},
{filepath.Join(dir, "a", "b"), dir2 + sep + sep + "a", true},
{filepath.Join(dir, "a", "b"), filepath.Join(dir2, "a") + sep, true},
{filepath.Join(dir, "a", "b") + sep, filepath.Join(dir2), true},
{dir + sep + sep + filepath.Join("a", "b"), filepath.Join(dir2, "a"), true},
{filepath.Join(dir, "a", "b"), filepath.Join(dir2, "a"), true},
{filepath.Join(dir, "a", "b"), filepath.Join(dir2, "a", "b"), true},
{filepath.Join(dir, "a", "b"), filepath.Join(dir2, "c"), false},
{filepath.Join(dir, "a", "b"), filepath.Join(dir2, "a", "d", "b"), false},
{filepath.Join(dir, "a", "b"), filepath.Join(dir2, "a", "b2"), false},
{filepath.Join(dir), filepath.Join(dir2, "a", "b"), false},
{filepath.Join(dir, "ab"), filepath.Join(dir2, "a", "b"), false},
{filepath.Join(dir, "ab"), filepath.Join(dir2, "a"), false},
{filepath.Join(dir, "123"), filepath.Join(dir2, "123"), true},
{filepath.Join(dir, "123"), filepath.Join(dir2, "1"), false},
{filepath.Join(dir, "⌘"), filepath.Join(dir2, "⌘"), true},
{filepath.Join(dir, "a"), filepath.Join(dir2, "⌘"), false},
{filepath.Join(dir, "⌘"), filepath.Join(dir2, "a"), false},
}
for _, c := range cases {
if err := os.MkdirAll(c.path, 0755); err != nil {
t.Fatal(err)
}
if err = os.MkdirAll(c.prefix, 0755); err != nil {
t.Fatal(err)
}
got, err := HasFilepathPrefix(c.path, c.prefix)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if c.want != got {
t.Fatalf("dir: %q, prefix: %q, expected: %v, got: %v", c.path, c.prefix, c.want, got)
}
}
}
// This function tests HadFilepathPrefix. It should test it on both case
// sensitive and insensitive situations. However, the only reliable way to test
// case-insensitive behaviour is if using case-insensitive filesystem. This
// cannot be guaranteed in an automated test. Therefore, the behaviour of the
// tests is not to test case sensitivity on *nix and to assume that Windows is
// case-insensitive. Please see link below for some background.
//
// https://superuser.com/questions/266110/how-do-you-make-windows-7-fully-case-sensitive-with-respect-to-the-filesystem
//
// NOTE: NTFS can be made case-sensitive. However many Windows programs,
// including Windows Explorer do not handle gracefully multiple files that
// differ only in capitalization. It is possible that this can cause these tests
// to fail on some setups.
func TestHasFilepathPrefix_Files(t *testing.T) {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
// dir2 is the same as dir but with different capitalization on Windows to
// test case insensitivity
var dir2 string
if runtime.GOOS == "windows" {
dir = strings.ToLower(dir)
dir2 = strings.ToUpper(dir)
} else {
dir2 = dir
}
existingFile := filepath.Join(dir, "exists")
if err = os.MkdirAll(existingFile, 0755); err != nil {
t.Fatal(err)
}
nonExistingFile := filepath.Join(dir, "does_not_exists")
cases := []struct {
path string
prefix string
want bool
err bool
}{
{existingFile, filepath.Join(dir2), true, false},
{nonExistingFile, filepath.Join(dir2), false, true},
}
for _, c := range cases {
got, err := HasFilepathPrefix(c.path, c.prefix)
if err != nil && !c.err {
t.Fatalf("unexpected error: %s", err)
}
if c.want != got {
t.Fatalf("dir: %q, prefix: %q, expected: %v, got: %v", c.path, c.prefix, c.want, got)
}
}
}
func TestEquivalentPaths(t *testing.T) {
h := test.NewHelper(t)
h.TempDir("dir")
h.TempDir("dir2")
h.TempFile("file", "")
h.TempFile("file2", "")
h.TempDir("DIR")
h.TempFile("FILE", "")
testcases := []struct {
p1, p2 string
caseSensitiveEquivalent bool
caseInensitiveEquivalent bool
err bool
}{
{h.Path("dir"), h.Path("dir"), true, true, false},
{h.Path("file"), h.Path("file"), true, true, false},
{h.Path("dir"), h.Path("dir2"), false, false, false},
{h.Path("file"), h.Path("file2"), false, false, false},
{h.Path("dir"), h.Path("file"), false, false, false},
{h.Path("dir"), h.Path("DIR"), false, true, false},
{strings.ToLower(h.Path("dir")), strings.ToUpper(h.Path("dir")), false, true, true},
}
caseSensitive, err := IsCaseSensitiveFilesystem(h.Path("dir"))
if err != nil {
t.Fatal("unexpcted error:", err)
}
for _, tc := range testcases {
got, err := EquivalentPaths(tc.p1, tc.p2)
if err != nil && !tc.err {
t.Error("unexpected error:", err)
}
if caseSensitive {
if tc.caseSensitiveEquivalent != got {
t.Errorf("expected EquivalentPaths(%q, %q) to be %t on case-sensitive filesystem, got %t", tc.p1, tc.p2, tc.caseSensitiveEquivalent, got)
}
} else {
if tc.caseInensitiveEquivalent != got {
t.Errorf("expected EquivalentPaths(%q, %q) to be %t on case-insensitive filesystem, got %t", tc.p1, tc.p2, tc.caseInensitiveEquivalent, got)
}
}
}
}
func TestRenameWithFallback(t *testing.T) {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
if err = RenameWithFallback(filepath.Join(dir, "does_not_exists"), filepath.Join(dir, "dst")); err == nil {
t.Fatal("expected an error for non existing file, but got nil")
}
srcpath := filepath.Join(dir, "src")
if srcf, err := os.Create(srcpath); err != nil {
t.Fatal(err)
} else {
srcf.Close()
}
if err = RenameWithFallback(srcpath, filepath.Join(dir, "dst")); err != nil {
t.Fatal(err)
}
srcpath = filepath.Join(dir, "a")
if err = os.MkdirAll(srcpath, 0777); err != nil {
t.Fatal(err)
}
dstpath := filepath.Join(dir, "b")
if err = os.MkdirAll(dstpath, 0777); err != nil {
t.Fatal(err)
}
if err = RenameWithFallback(srcpath, dstpath); err == nil {
t.Fatal("expected an error if dst is an existing directory, but got nil")
}
}
func TestIsCaseSensitiveFilesystem(t *testing.T) {
isLinux := runtime.GOOS == "linux"
isWindows := runtime.GOOS == "windows"
isMacOS := runtime.GOOS == "darwin"
if !isLinux && !isWindows && !isMacOS {
t.Skip("Run this test on Windows, Linux and macOS only")
}
dir, err := ioutil.TempDir("", "TestCaseSensitivity")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
var want bool
if isLinux {
want = true
} else {
want = false
}
got, err := IsCaseSensitiveFilesystem(dir)
if err != nil {
t.Fatalf("unexpected error message: \n\t(GOT) %+v", err)
}
if want != got {
t.Fatalf("unexpected value returned: \n\t(GOT) %t\n\t(WNT) %t", got, want)
}
}
func TestReadActualFilenames(t *testing.T) {
// We are trying to skip this test on file systems which are case-sensiive. We could
// have used `fs.IsCaseSensitiveFilesystem` for this check. However, the code we are
// testing also relies on `fs.IsCaseSensitiveFilesystem`. So a bug in
// `fs.IsCaseSensitiveFilesystem` could prevent this test from being run. This is the
// only scenario where we prefer the OS heuristic over doing the actual work of
// validating filesystem case sensitivity via `fs.IsCaseSensitiveFilesystem`.
if runtime.GOOS != "windows" && runtime.GOOS != "darwin" {
t.Skip("skip this test on non-Windows, non-macOS")
}
h := test.NewHelper(t)
defer h.Cleanup()
h.TempDir("")
tmpPath := h.Path(".")
// First, check the scenarios for which we expect an error.
_, err := ReadActualFilenames(filepath.Join(tmpPath, "does_not_exists"), []string{""})
switch {
case err == nil:
t.Fatal("expected err for non-existing folder")
// use `errors.Cause` because the error is wrapped and returned
case !os.IsNotExist(errors.Cause(err)):
t.Fatalf("unexpected error: %+v", err)
}
h.TempFile("tmpFile", "")
_, err = ReadActualFilenames(h.Path("tmpFile"), []string{""})
switch {
case err == nil:
t.Fatal("expected err for passing file instead of directory")
case err != errPathNotDir:
t.Fatalf("unexpected error: %+v", err)
}
cases := []struct {
createFiles []string
names []string
want map[string]string
}{
// If we supply no filenames to the function, it should return an empty map.
{nil, nil, map[string]string{}},
// If the directory contains the given file with different case, it should return
// a map which has the given filename as the key and actual filename as the value.
{
[]string{"test1.txt"},
[]string{"Test1.txt"},
map[string]string{"Test1.txt": "test1.txt"},
},
// 1. If the given filename is same as the actual filename, map should have the
// same key and value for the file.
// 2. If the given filename is present with different case for file extension,
// it should return a map which has the given filename as the key and actual
// filename as the value.
// 3. If the given filename is not present even with a different case, the map
// returned should not have an entry for that filename.
{
[]string{"test2.txt", "test3.TXT"},
[]string{"test2.txt", "Test3.txt", "Test4.txt"},
map[string]string{
"test2.txt": "test2.txt",
"Test3.txt": "test3.TXT",
},
},
}
for _, c := range cases {
for _, file := range c.createFiles {
h.TempFile(file, "")
}
got, err := ReadActualFilenames(tmpPath, c.names)
if err != nil {
t.Fatalf("unexpected error: %+v", err)
}
if !reflect.DeepEqual(c.want, got) {
t.Fatalf("returned value does not match expected: \n\t(GOT) %v\n\t(WNT) %v",
got, c.want)
}
}
}
func TestGenTestFilename(t *testing.T) {
cases := []struct {
str string
want string
}{
{"abc", "Abc"},
{"ABC", "aBC"},
{"AbC", "abC"},
{"αβγ", "Αβγ"},
{"123", "123"},
{"1a2", "1A2"},
{"12a", "12A"},
{"⌘", "⌘"},
}
for _, c := range cases {
got := genTestFilename(c.str)
if c.want != got {
t.Fatalf("str: %q, expected: %q, got: %q", c.str, c.want, got)
}
}
}
func BenchmarkGenTestFilename(b *testing.B) {
cases := []string{
strings.Repeat("a", 128),
strings.Repeat("A", 128),
strings.Repeat("α", 128),
strings.Repeat("1", 128),
strings.Repeat("⌘", 128),
}
for i := 0; i < b.N; i++ {
for _, str := range cases {
genTestFilename(str)
}
}
}
func TestCopyDir(t *testing.T) {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcdir := filepath.Join(dir, "src")
if err := os.MkdirAll(srcdir, 0755); err != nil {
t.Fatal(err)
}
files := []struct {
path string
contents string
fi os.FileInfo
}{
{path: "myfile", contents: "hello world"},
{path: filepath.Join("subdir", "file"), contents: "subdir file"},
}
// Create structure indicated in 'files'
for i, file := range files {
fn := filepath.Join(srcdir, file.path)
dn := filepath.Dir(fn)
if err = os.MkdirAll(dn, 0755); err != nil {
t.Fatal(err)
}
fh, err := os.Create(fn)
if err != nil {
t.Fatal(err)
}
if _, err = fh.Write([]byte(file.contents)); err != nil {
t.Fatal(err)
}
fh.Close()
files[i].fi, err = os.Stat(fn)
if err != nil {
t.Fatal(err)
}
}
destdir := filepath.Join(dir, "dest")
if err := CopyDir(srcdir, destdir); err != nil {
t.Fatal(err)
}
// Compare copy against structure indicated in 'files'
for _, file := range files {
fn := filepath.Join(srcdir, file.path)
dn := filepath.Dir(fn)
dirOK, err := IsDir(dn)
if err != nil {
t.Fatal(err)
}
if !dirOK {
t.Fatalf("expected %s to be a directory", dn)
}
got, err := ioutil.ReadFile(fn)
if err != nil {
t.Fatal(err)
}
if file.contents != string(got) {
t.Fatalf("expected: %s, got: %s", file.contents, string(got))
}
gotinfo, err := os.Stat(fn)
if err != nil {
t.Fatal(err)
}
if file.fi.Mode() != gotinfo.Mode() {
t.Fatalf("expected %s: %#v\n to be the same mode as %s: %#v",
file.path, file.fi.Mode(), fn, gotinfo.Mode())
}
}
}
func TestCopyDirFail_SrcInaccessible(t *testing.T) {
if runtime.GOOS == "windows" {
// XXX: setting permissions works differently in
// Microsoft Windows. Skipping this this until a
// compatible implementation is provided.
t.Skip("skipping on windows")
}
var srcdir, dstdir string
cleanup := setupInaccessibleDir(t, func(dir string) error {
srcdir = filepath.Join(dir, "src")
return os.MkdirAll(srcdir, 0755)
})
defer cleanup()
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
dstdir = filepath.Join(dir, "dst")
if err = CopyDir(srcdir, dstdir); err == nil {
t.Fatalf("expected error for CopyDir(%s, %s), got none", srcdir, dstdir)
}
}
func TestCopyDirFail_DstInaccessible(t *testing.T) {
if runtime.GOOS == "windows" {
// XXX: setting permissions works differently in
// Microsoft Windows. Skipping this this until a
// compatible implementation is provided.
t.Skip("skipping on windows")
}
var srcdir, dstdir string
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcdir = filepath.Join(dir, "src")
if err = os.MkdirAll(srcdir, 0755); err != nil {
t.Fatal(err)
}
cleanup := setupInaccessibleDir(t, func(dir string) error {
dstdir = filepath.Join(dir, "dst")
return nil
})
defer cleanup()
if err := CopyDir(srcdir, dstdir); err == nil {
t.Fatalf("expected error for CopyDir(%s, %s), got none", srcdir, dstdir)
}
}
func TestCopyDirFail_SrcIsNotDir(t *testing.T) {
var srcdir, dstdir string
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcdir = filepath.Join(dir, "src")
if _, err = os.Create(srcdir); err != nil {
t.Fatal(err)
}
dstdir = filepath.Join(dir, "dst")
if err = CopyDir(srcdir, dstdir); err == nil {
t.Fatalf("expected error for CopyDir(%s, %s), got none", srcdir, dstdir)
}
if err != errSrcNotDir {
t.Fatalf("expected %v error for CopyDir(%s, %s), got %s", errSrcNotDir, srcdir, dstdir, err)
}
}
func TestCopyDirFail_DstExists(t *testing.T) {
var srcdir, dstdir string
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcdir = filepath.Join(dir, "src")
if err = os.MkdirAll(srcdir, 0755); err != nil {
t.Fatal(err)
}
dstdir = filepath.Join(dir, "dst")
if err = os.MkdirAll(dstdir, 0755); err != nil {
t.Fatal(err)
}
if err = CopyDir(srcdir, dstdir); err == nil {
t.Fatalf("expected error for CopyDir(%s, %s), got none", srcdir, dstdir)
}
if err != errDstExist {
t.Fatalf("expected %v error for CopyDir(%s, %s), got %s", errDstExist, srcdir, dstdir, err)
}
}
func TestCopyDirFailOpen(t *testing.T) {
if runtime.GOOS == "windows" {
// XXX: setting permissions works differently in
// Microsoft Windows. os.Chmod(..., 0222) below is not
// enough for the file to be readonly, and os.Chmod(...,
// 0000) returns an invalid argument error. Skipping
// this this until a compatible implementation is
// provided.
t.Skip("skipping on windows")
}
var srcdir, dstdir string
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcdir = filepath.Join(dir, "src")
if err = os.MkdirAll(srcdir, 0755); err != nil {
t.Fatal(err)
}
srcfn := filepath.Join(srcdir, "file")
srcf, err := os.Create(srcfn)
if err != nil {
t.Fatal(err)
}
srcf.Close()
// setup source file so that it cannot be read
if err = os.Chmod(srcfn, 0222); err != nil {
t.Fatal(err)
}
dstdir = filepath.Join(dir, "dst")
if err = CopyDir(srcdir, dstdir); err == nil {
t.Fatalf("expected error for CopyDir(%s, %s), got none", srcdir, dstdir)
}
}
func TestCopyFile(t *testing.T) {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcf, err := os.Create(filepath.Join(dir, "srcfile"))
if err != nil {
t.Fatal(err)
}
want := "hello world"
if _, err := srcf.Write([]byte(want)); err != nil {
t.Fatal(err)
}
srcf.Close()
destf := filepath.Join(dir, "destf")
if err := copyFile(srcf.Name(), destf); err != nil {
t.Fatal(err)
}
got, err := ioutil.ReadFile(destf)
if err != nil {
t.Fatal(err)
}
if want != string(got) {
t.Fatalf("expected: %s, got: %s", want, string(got))
}
wantinfo, err := os.Stat(srcf.Name())
if err != nil {
t.Fatal(err)
}
gotinfo, err := os.Stat(destf)
if err != nil {
t.Fatal(err)
}
if wantinfo.Mode() != gotinfo.Mode() {
t.Fatalf("expected %s: %#v\n to be the same mode as %s: %#v", srcf.Name(), wantinfo.Mode(), destf, gotinfo.Mode())
}
}
func TestCopyFileSymlink(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
h.TempDir(".")
testcases := map[string]string{
filepath.Join("./testdata/symlinks/file-symlink"): filepath.Join(h.Path("."), "dst-file"),
filepath.Join("./testdata/symlinks/windows-file-symlink"): filepath.Join(h.Path("."), "windows-dst-file"),
filepath.Join("./testdata/symlinks/invalid-symlink"): filepath.Join(h.Path("."), "invalid-symlink"),
}
for symlink, dst := range testcases {
t.Run(symlink, func(t *testing.T) {
var err error
if err = copyFile(symlink, dst); err != nil {
t.Fatalf("failed to copy symlink: %s", err)
}
var want, got string
if runtime.GOOS == "windows" {
// Creating symlinks on Windows require an additional permission
// regular users aren't granted usually. So we copy the file
// content as a fall back instead of creating a real symlink.
srcb, err := ioutil.ReadFile(symlink)
h.Must(err)
dstb, err := ioutil.ReadFile(dst)
h.Must(err)
want = string(srcb)
got = string(dstb)
} else {
want, err = os.Readlink(symlink)
h.Must(err)
got, err = os.Readlink(dst)
if err != nil {
t.Fatalf("could not resolve symlink: %s", err)
}
}
if want != got {
t.Fatalf("resolved path is incorrect. expected %s, got %s", want, got)
}
})
}
}
func TestCopyFileLongFilePath(t *testing.T) {
if runtime.GOOS != "windows" {
// We want to ensure the temporary fix actually fixes the issue with
// os.Chmod and long file paths. This is only applicable on Windows.
t.Skip("skipping on non-windows")
}
h := test.NewHelper(t)
h.TempDir(".")
defer h.Cleanup()
tmpPath := h.Path(".")
// Create a directory with a long-enough path name to cause the bug in #774.
dirName := ""
for len(tmpPath+string(os.PathSeparator)+dirName) <= 300 {
dirName += "directory"
}
h.TempDir(dirName)
h.TempFile(dirName+string(os.PathSeparator)+"src", "")
tmpDirPath := tmpPath + string(os.PathSeparator) + dirName + string(os.PathSeparator)
err := copyFile(tmpDirPath+"src", tmpDirPath+"dst")
if err != nil {
t.Fatalf("unexpected error while copying file: %v", err)
}
}
// C:\Users\appveyor\AppData\Local\Temp\1\gotest639065787\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890\dir4567890
func TestCopyFileFail(t *testing.T) {
if runtime.GOOS == "windows" {
// XXX: setting permissions works differently in
// Microsoft Windows. Skipping this this until a
// compatible implementation is provided.
t.Skip("skipping on windows")
}
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
srcf, err := os.Create(filepath.Join(dir, "srcfile"))
if err != nil {
t.Fatal(err)
}
srcf.Close()
var dstdir string
cleanup := setupInaccessibleDir(t, func(dir string) error {
dstdir = filepath.Join(dir, "dir")
return os.Mkdir(dstdir, 0777)
})
defer cleanup()
fn := filepath.Join(dstdir, "file")
if err := copyFile(srcf.Name(), fn); err == nil {
t.Fatalf("expected error for %s, got none", fn)
}
}
// setupInaccessibleDir creates a temporary location with a single
// directory in it, in such a way that that directory is not accessible
// after this function returns.
//
// op is called with the directory as argument, so that it can create
// files or other test artifacts.
//
// If setupInaccessibleDir fails in its preparation, or op fails, t.Fatal
// will be invoked.
//
// This function returns a cleanup function that removes all the temporary
// files this function creates. It is the caller's responsibility to call
// this function before the test is done running, whether there's an error or not.
func setupInaccessibleDir(t *testing.T, op func(dir string) error) func() {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
return nil // keep compiler happy
}
subdir := filepath.Join(dir, "dir")
cleanup := func() {
if err := os.Chmod(subdir, 0777); err != nil {
t.Error(err)
}
if err := os.RemoveAll(dir); err != nil {
t.Error(err)
}
}
if err := os.Mkdir(subdir, 0777); err != nil {
cleanup()
t.Fatal(err)
return nil
}
if err := op(subdir); err != nil {
cleanup()
t.Fatal(err)
return nil
}
if err := os.Chmod(subdir, 0666); err != nil {
cleanup()
t.Fatal(err)
return nil
}
return cleanup
}
func TestEnsureDir(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
h.TempDir(".")
h.TempFile("file", "")
tmpPath := h.Path(".")
var dn string
cleanup := setupInaccessibleDir(t, func(dir string) error {
dn = filepath.Join(dir, "dir")
return os.Mkdir(dn, 0777)
})
defer cleanup()
tests := map[string]bool{
// [success] A dir already exists for the given path.
tmpPath: true,
// [success] Dir does not exist but parent dir exists, so should get created.
filepath.Join(tmpPath, "testdir"): true,
// [failure] Dir and parent dir do not exist, should return an error.
filepath.Join(tmpPath, "notexist", "testdir"): false,
// [failure] Regular file present at given path.
h.Path("file"): false,
// [failure] Path inaccessible.
dn: false,
}
if runtime.GOOS == "windows" {
// This test doesn't work on Microsoft Windows because
// of the differences in how file permissions are
// implemented. For this to work, the directory where
// the directory exists should be inaccessible.
delete(tests, dn)
}
for path, shouldEnsure := range tests {
err := EnsureDir(path, 0777)
if shouldEnsure {
if err != nil {
t.Fatalf("unexpected error %q for %q", err, path)
} else if ok, err := IsDir(path); !ok {
t.Fatalf("expected directory to be preset at %q", path)
t.Fatal(err)
}
} else if err == nil {
t.Fatalf("expected error for path %q, got none", path)
}
}
}
func TestIsRegular(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
var fn string
cleanup := setupInaccessibleDir(t, func(dir string) error {
fn = filepath.Join(dir, "file")
fh, err := os.Create(fn)
if err != nil {
return err
}
return fh.Close()
})
defer cleanup()
tests := map[string]struct {
exists bool
err bool
}{
wd: {false, true},
filepath.Join(wd, "testdata"): {false, true},
filepath.Join(wd, "testdata", "test.file"): {true, false},
filepath.Join(wd, "this_file_does_not_exist.thing"): {false, false},
fn: {false, true},
}
if runtime.GOOS == "windows" {
// This test doesn't work on Microsoft Windows because
// of the differences in how file permissions are
// implemented. For this to work, the directory where
// the file exists should be inaccessible.
delete(tests, fn)
}
for f, want := range tests {
got, err := IsRegular(f)
if err != nil {
if want.exists != got {
t.Fatalf("expected %t for %s, got %t", want.exists, f, got)
}
if !want.err {
t.Fatalf("expected no error, got %v", err)
}
} else {
if want.err {
t.Fatalf("expected error for %s, got none", f)
}
}
if got != want.exists {
t.Fatalf("expected %t for %s, got %t", want, f, got)
}
}
}
func TestIsDir(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
var dn string
cleanup := setupInaccessibleDir(t, func(dir string) error {
dn = filepath.Join(dir, "dir")
return os.Mkdir(dn, 0777)
})
defer cleanup()
tests := map[string]struct {
exists bool
err bool
}{
wd: {true, false},
filepath.Join(wd, "testdata"): {true, false},
filepath.Join(wd, "main.go"): {false, true},
filepath.Join(wd, "this_file_does_not_exist.thing"): {false, true},
dn: {false, true},
}
if runtime.GOOS == "windows" {
// This test doesn't work on Microsoft Windows because
// of the differences in how file permissions are
// implemented. For this to work, the directory where
// the directory exists should be inaccessible.
delete(tests, dn)
}
for f, want := range tests {
got, err := IsDir(f)
if err != nil && !want.err {
t.Fatalf("expected no error, got %v", err)
}
if got != want.exists {
t.Fatalf("expected %t for %s, got %t", want.exists, f, got)
}
}
}
func TestIsNonEmptyDir(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
h := test.NewHelper(t)
defer h.Cleanup()
h.TempDir("empty")
testCases := []struct {
path string
empty bool
err bool
}{
{wd, true, false},
{"testdata", true, false},
{filepath.Join(wd, "fs.go"), false, true},
{filepath.Join(wd, "this_file_does_not_exist.thing"), false, false},
{h.Path("empty"), false, false},
}
// This test case doesn't work on Microsoft Windows because of the
// differences in how file permissions are implemented.
if runtime.GOOS != "windows" {
var inaccessibleDir string
cleanup := setupInaccessibleDir(t, func(dir string) error {
inaccessibleDir = filepath.Join(dir, "empty")
return os.Mkdir(inaccessibleDir, 0777)
})
defer cleanup()
testCases = append(testCases, struct {
path string
empty bool
err bool
}{inaccessibleDir, false, true})
}
for _, want := range testCases {
got, err := IsNonEmptyDir(want.path)
if want.err && err == nil {
if got {
t.Fatalf("wanted false with error for %v, but got true", want.path)
}
t.Fatalf("wanted an error for %v, but it was nil", want.path)
}
if got != want.empty {
t.Fatalf("wanted %t for %v, but got %t", want.empty, want.path, got)
}
}
}
func TestIsSymlink(t *testing.T) {
dir, err := ioutil.TempDir("", "dep")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
dirPath := filepath.Join(dir, "directory")
if err = os.MkdirAll(dirPath, 0777); err != nil {
t.Fatal(err)
}
filePath := filepath.Join(dir, "file")
f, err := os.Create(filePath)
if err != nil {
t.Fatal(err)
}
f.Close()
dirSymlink := filepath.Join(dir, "dirSymlink")
fileSymlink := filepath.Join(dir, "fileSymlink")
if err = os.Symlink(dirPath, dirSymlink); err != nil {
t.Fatal(err)
}
if err = os.Symlink(filePath, fileSymlink); err != nil {
t.Fatal(err)
}
var (
inaccessibleFile string
inaccessibleSymlink string
)
cleanup := setupInaccessibleDir(t, func(dir string) error {
inaccessibleFile = filepath.Join(dir, "file")
if fh, err := os.Create(inaccessibleFile); err != nil {
return err
} else if err = fh.Close(); err != nil {
return err
}
inaccessibleSymlink = filepath.Join(dir, "symlink")
return os.Symlink(inaccessibleFile, inaccessibleSymlink)
})
defer cleanup()
tests := map[string]struct{ expected, err bool }{
dirPath: {false, false},
filePath: {false, false},
dirSymlink: {true, false},
fileSymlink: {true, false},
inaccessibleFile: {false, true},
inaccessibleSymlink: {false, true},
}
if runtime.GOOS == "windows" {
// XXX: setting permissions works differently in Windows. Skipping
// these cases until a compatible implementation is provided.
delete(tests, inaccessibleFile)
delete(tests, inaccessibleSymlink)
}
for path, want := range tests {
got, err := IsSymlink(path)
if err != nil {
if !want.err {
t.Errorf("expected no error, got %v", err)
}
}
if got != want.expected {
t.Errorf("expected %t for %s, got %t", want.expected, path, got)
}
}
}
|
test
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/test/writer.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package test
import (
"strings"
"testing"
"unicode"
)
// Writer adapts a testing.TB to the io.Writer interface
type Writer struct {
testing.TB
}
func (t Writer) Write(b []byte) (n int, err error) {
str := string(b)
if len(str) == 0 {
return 0, nil
}
for _, part := range strings.Split(str, "\n") {
str := strings.TrimRightFunc(part, unicode.IsSpace)
if len(str) != 0 {
t.Log(str)
}
}
return len(b), err
}
|
test
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/test/test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//lint:file-ignore U1000 unused fns we might want to use later.
package test
import (
"bytes"
"flag"
"fmt"
"go/format"
"io"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"strings"
"sync"
"testing"
"github.com/pkg/errors"
)
var (
// ExeSuffix is the suffix of executable files; ".exe" on Windows.
ExeSuffix string
mu sync.Mutex
// PrintLogs controls logging of test commands.
PrintLogs = flag.Bool("logs", false, "log stdin/stdout of test commands")
// UpdateGolden controls updating test fixtures.
UpdateGolden = flag.Bool("update", false, "update golden files")
)
const (
manifestName = "Gopkg.toml"
lockName = "Gopkg.lock"
)
func init() {
switch runtime.GOOS {
case "windows":
ExeSuffix = ".exe"
}
}
// Helper with utilities for testing.
type Helper struct {
t *testing.T
temps []string
wd string
origWd string
env []string
tempdir string
ran bool
inParallel bool
stdout, stderr bytes.Buffer
}
// NewHelper initializes a new helper for testing.
func NewHelper(t *testing.T) *Helper {
wd, err := os.Getwd()
if err != nil {
panic(err)
}
return &Helper{t: t, origWd: wd}
}
// Must gives a fatal error if err is not nil.
func (h *Helper) Must(err error) {
if err != nil {
h.t.Fatalf("%+v", err)
}
}
// check gives a test non-fatal error if err is not nil.
func (h *Helper) check(err error) {
if err != nil {
h.t.Errorf("%+v", err)
}
}
// Parallel runs the test in parallel by calling t.Parallel.
func (h *Helper) Parallel() {
if h.ran {
h.t.Fatalf("%+v", errors.New("internal testsuite error: call to parallel after run"))
}
if h.wd != "" {
h.t.Fatalf("%+v", errors.New("internal testsuite error: call to parallel after cd"))
}
for _, e := range h.env {
if strings.HasPrefix(e, "GOROOT=") || strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "GOBIN=") {
val := e[strings.Index(e, "=")+1:]
if strings.HasPrefix(val, "testdata") || strings.HasPrefix(val, "./testdata") {
h.t.Fatalf("%+v", errors.Errorf("internal testsuite error: call to parallel with testdata in environment (%s)", e))
}
}
}
h.inParallel = true
h.t.Parallel()
}
// pwd returns the current directory.
func (h *Helper) pwd() string {
wd, err := os.Getwd()
if err != nil {
h.t.Fatalf("%+v", errors.Wrap(err, "could not get working directory"))
}
return wd
}
// Cd changes the current directory to the named directory. Note that
// using this means that the test must not be run in parallel with any
// other tests.
func (h *Helper) Cd(dir string) {
if h.inParallel {
h.t.Fatalf("%+v", errors.New("internal testsuite error: changing directory when running in parallel"))
}
if h.wd == "" {
h.wd = h.pwd()
}
abs, err := filepath.Abs(dir)
if err == nil {
h.Setenv("PWD", abs)
}
err = os.Chdir(dir)
h.Must(errors.Wrapf(err, "Unable to cd to %s", dir))
}
// Setenv sets an environment variable to use when running the test go
// command.
func (h *Helper) Setenv(name, val string) {
if h.inParallel && (name == "GOROOT" || name == "GOPATH" || name == "GOBIN") && (strings.HasPrefix(val, "testdata") || strings.HasPrefix(val, "./testdata")) {
h.t.Fatalf("%+v", errors.Errorf("internal testsuite error: call to setenv with testdata (%s=%s) after parallel", name, val))
}
h.unsetenv(name)
h.env = append(h.env, name+"="+val)
}
// unsetenv removes an environment variable.
func (h *Helper) unsetenv(name string) {
if h.env == nil {
h.env = append([]string(nil), os.Environ()...)
}
for i, v := range h.env {
if strings.HasPrefix(v, name+"=") {
h.env = append(h.env[:i], h.env[i+1:]...)
break
}
}
}
// DoRun runs the test go command, recording stdout and stderr and
// returning exit status.
func (h *Helper) DoRun(args []string) error {
if h.inParallel {
for _, arg := range args {
if strings.HasPrefix(arg, "testdata") || strings.HasPrefix(arg, "./testdata") {
h.t.Fatalf("%+v", errors.New("internal testsuite error: parallel run using testdata"))
}
}
}
if *PrintLogs {
h.t.Logf("running testdep %v", args)
}
var prog string
if h.wd == "" {
prog = "./testdep" + ExeSuffix
} else {
prog = filepath.Join(h.wd, "testdep"+ExeSuffix)
}
newargs := args
if args[0] != "check" {
newargs = append([]string{args[0], "-v"}, args[1:]...)
}
cmd := exec.Command(prog, newargs...)
h.stdout.Reset()
h.stderr.Reset()
cmd.Stdout = &h.stdout
cmd.Stderr = &h.stderr
cmd.Env = h.env
status := cmd.Run()
if *PrintLogs {
if h.stdout.Len() > 0 {
h.t.Log("standard output:")
h.t.Log(h.stdout.String())
}
if h.stderr.Len() > 0 {
h.t.Log("standard error:")
h.t.Log(h.stderr.String())
}
}
h.ran = true
return errors.Wrapf(status, "Error running %s\n%s", strings.Join(newargs, " "), h.stderr.String())
}
// Run runs the test go command, and expects it to succeed.
func (h *Helper) Run(args ...string) {
if runtime.GOOS == "windows" {
mu.Lock()
defer mu.Unlock()
}
if status := h.DoRun(args); status != nil {
h.t.Logf("go %v failed unexpectedly: %v", args, status)
h.t.FailNow()
}
}
// runFail runs the test go command, and expects it to fail.
func (h *Helper) runFail(args ...string) {
if status := h.DoRun(args); status == nil {
h.t.Fatalf("%+v", errors.New("testgo succeeded unexpectedly"))
} else {
h.t.Log("testgo failed as expected:", status)
}
}
// RunGo runs a go command, and expects it to succeed.
func (h *Helper) RunGo(args ...string) {
cmd := exec.Command("go", args...)
h.stdout.Reset()
h.stderr.Reset()
cmd.Stdout = &h.stdout
cmd.Stderr = &h.stderr
cmd.Dir = h.wd
cmd.Env = h.env
status := cmd.Run()
if h.stdout.Len() > 0 {
h.t.Log("go standard output:")
h.t.Log(h.stdout.String())
}
if h.stderr.Len() > 0 {
h.t.Log("go standard error:")
h.t.Log(h.stderr.String())
}
if status != nil {
h.t.Logf("go %v failed unexpectedly: %v", args, status)
h.t.FailNow()
}
}
// NeedsExternalNetwork makes sure the tests needing external network will not
// be run when executing tests in short mode.
func NeedsExternalNetwork(t *testing.T) {
if testing.Short() {
t.Skip("skipping test: no external network in -short mode")
}
}
// NeedsGit will make sure the tests that require git will be skipped if the
// git binary is not available.
func NeedsGit(t *testing.T) {
if _, err := exec.LookPath("git"); err != nil {
t.Skip("skipping because git binary not found")
}
}
// RunGit runs a git command, and expects it to succeed.
func (h *Helper) RunGit(dir string, args ...string) {
cmd := exec.Command("git", args...)
h.stdout.Reset()
h.stderr.Reset()
cmd.Stdout = &h.stdout
cmd.Stderr = &h.stderr
cmd.Dir = dir
cmd.Env = h.env
status := cmd.Run()
if *PrintLogs {
if h.stdout.Len() > 0 {
h.t.Logf("git %v standard output:", args)
h.t.Log(h.stdout.String())
}
if h.stderr.Len() > 0 {
h.t.Logf("git %v standard error:", args)
h.t.Log(h.stderr.String())
}
}
if status != nil {
h.t.Logf("git %v failed unexpectedly: %v", args, status)
h.t.FailNow()
}
}
// getStdout returns standard output of the testgo run as a string.
func (h *Helper) getStdout() string {
if !h.ran {
h.t.Fatalf("%+v", errors.New("internal testsuite error: stdout called before run"))
}
return h.stdout.String()
}
// getStderr returns standard error of the testgo run as a string.
func (h *Helper) getStderr() string {
if !h.ran {
h.t.Fatalf("%+v", errors.New("internal testsuite error: stdout called before run"))
}
return h.stderr.String()
}
// doGrepMatch looks for a regular expression in a buffer, and returns
// whether it is found. The regular expression is matched against
// each line separately, as with the grep command.
func (h *Helper) doGrepMatch(match string, b *bytes.Buffer) bool {
if !h.ran {
h.t.Fatalf("%+v", errors.New("internal testsuite error: grep called before run"))
}
re := regexp.MustCompile(match)
for _, ln := range bytes.Split(b.Bytes(), []byte{'\n'}) {
if re.Match(ln) {
return true
}
}
return false
}
// doGrep looks for a regular expression in a buffer and fails if it
// is not found. The name argument is the name of the output we are
// searching, "output" or "error". The msg argument is logged on
// failure.
func (h *Helper) doGrep(match string, b *bytes.Buffer, name, msg string) {
if !h.doGrepMatch(match, b) {
h.t.Log(msg)
h.t.Logf("pattern %v not found in standard %s", match, name)
h.t.FailNow()
}
}
// grepStdout looks for a regular expression in the test run's
// standard output and fails, logging msg, if it is not found.
func (h *Helper) grepStdout(match, msg string) {
h.doGrep(match, &h.stdout, "output", msg)
}
// grepStderr looks for a regular expression in the test run's
// standard error and fails, logging msg, if it is not found.
func (h *Helper) grepStderr(match, msg string) {
h.doGrep(match, &h.stderr, "error", msg)
}
// grepBoth looks for a regular expression in the test run's standard
// output or stand error and fails, logging msg, if it is not found.
func (h *Helper) grepBoth(match, msg string) {
if !h.doGrepMatch(match, &h.stdout) && !h.doGrepMatch(match, &h.stderr) {
h.t.Log(msg)
h.t.Logf("pattern %v not found in standard output or standard error", match)
h.t.FailNow()
}
}
// doGrepNot looks for a regular expression in a buffer and fails if
// it is found. The name and msg arguments are as for doGrep.
func (h *Helper) doGrepNot(match string, b *bytes.Buffer, name, msg string) {
if h.doGrepMatch(match, b) {
h.t.Log(msg)
h.t.Logf("pattern %v found unexpectedly in standard %s", match, name)
h.t.FailNow()
}
}
// grepStdoutNot looks for a regular expression in the test run's
// standard output and fails, logging msg, if it is found.
func (h *Helper) grepStdoutNot(match, msg string) {
h.doGrepNot(match, &h.stdout, "output", msg)
}
// grepStderrNot looks for a regular expression in the test run's
// standard error and fails, logging msg, if it is found.
func (h *Helper) grepStderrNot(match, msg string) {
h.doGrepNot(match, &h.stderr, "error", msg)
}
// grepBothNot looks for a regular expression in the test run's
// standard output or stand error and fails, logging msg, if it is
// found.
func (h *Helper) grepBothNot(match, msg string) {
if h.doGrepMatch(match, &h.stdout) || h.doGrepMatch(match, &h.stderr) {
h.t.Log(msg)
h.t.Fatalf("%+v", errors.Errorf("pattern %v found unexpectedly in standard output or standard error", match))
}
}
// doGrepCount counts the number of times a regexp is seen in a buffer.
func (h *Helper) doGrepCount(match string, b *bytes.Buffer) int {
if !h.ran {
h.t.Fatalf("%+v", errors.New("internal testsuite error: doGrepCount called before run"))
}
re := regexp.MustCompile(match)
c := 0
for _, ln := range bytes.Split(b.Bytes(), []byte{'\n'}) {
if re.Match(ln) {
c++
}
}
return c
}
// grepCountBoth returns the number of times a regexp is seen in both
// standard output and standard error.
func (h *Helper) grepCountBoth(match string) int {
return h.doGrepCount(match, &h.stdout) + h.doGrepCount(match, &h.stderr)
}
// creatingTemp records that the test plans to create a temporary file
// or directory. If the file or directory exists already, it will be
// removed. When the test completes, the file or directory will be
// removed if it exists.
func (h *Helper) creatingTemp(path string) {
if filepath.IsAbs(path) && !strings.HasPrefix(path, h.tempdir) {
h.t.Fatalf("%+v", errors.Errorf("internal testsuite error: creatingTemp(%q) with absolute path not in temporary directory", path))
}
// If we have changed the working directory, make sure we have
// an absolute path, because we are going to change directory
// back before we remove the temporary.
if h.wd != "" && !filepath.IsAbs(path) {
path = filepath.Join(h.pwd(), path)
}
h.Must(os.RemoveAll(path))
h.temps = append(h.temps, path)
}
// makeTempdir makes a temporary directory for a run of testgo. If
// the temporary directory was already created, this does nothing.
func (h *Helper) makeTempdir() {
if h.tempdir == "" {
var err error
h.tempdir, err = ioutil.TempDir("", "gotest")
h.Must(err)
}
}
// TempFile adds a temporary file for a run of testgo.
func (h *Helper) TempFile(path, contents string) {
h.makeTempdir()
h.Must(os.MkdirAll(filepath.Join(h.tempdir, filepath.Dir(path)), 0755))
bytes := []byte(contents)
if strings.HasSuffix(path, ".go") {
formatted, err := format.Source(bytes)
if err == nil {
bytes = formatted
}
}
h.Must(ioutil.WriteFile(filepath.Join(h.tempdir, path), bytes, 0644))
}
// WriteTestFile writes a file to the testdata directory from memory. src is
// relative to ./testdata.
func (h *Helper) WriteTestFile(src string, content string) error {
err := ioutil.WriteFile(filepath.Join(h.origWd, "testdata", src), []byte(content), 0666)
return err
}
// GetFile reads a file into memory
func (h *Helper) GetFile(path string) io.ReadCloser {
content, err := os.Open(path)
if err != nil {
h.t.Fatalf("%+v", errors.Wrapf(err, "Unable to open file: %s", path))
}
return content
}
// GetTestFile reads a file from the testdata directory into memory. src is
// relative to ./testdata.
func (h *Helper) GetTestFile(src string) io.ReadCloser {
fullPath := filepath.Join(h.origWd, "testdata", src)
return h.GetFile(fullPath)
}
// GetTestFileString reads a file from the testdata directory into memory. src is
// relative to ./testdata.
func (h *Helper) GetTestFileString(src string) string {
srcf := h.GetTestFile(src)
defer srcf.Close()
content, err := ioutil.ReadAll(srcf)
if err != nil {
h.t.Fatalf("%+v", err)
}
return string(content)
}
// TempCopy copies a temporary file from testdata into the temporary directory.
// dest is relative to the temp directory location, and src is relative to
// ./testdata.
func (h *Helper) TempCopy(dest, src string) {
in := h.GetTestFile(src)
defer in.Close()
h.TempDir(filepath.Dir(dest))
out, err := os.Create(filepath.Join(h.tempdir, dest))
if err != nil {
panic(err)
}
defer out.Close()
io.Copy(out, in)
}
// TempDir adds a temporary directory for a run of testgo.
func (h *Helper) TempDir(path string) {
h.makeTempdir()
fullPath := filepath.Join(h.tempdir, path)
if err := os.MkdirAll(fullPath, 0755); err != nil && !os.IsExist(err) {
h.t.Fatalf("%+v", errors.Errorf("Unable to create temp directory: %s", fullPath))
}
}
// Path returns the absolute pathname to file with the temporary
// directory.
func (h *Helper) Path(name string) string {
if h.tempdir == "" {
h.t.Fatalf("%+v", errors.Errorf("internal testsuite error: path(%q) with no tempdir", name))
}
var joined string
if name == "." {
joined = h.tempdir
} else {
joined = filepath.Join(h.tempdir, name)
}
// Ensure it's the absolute, symlink-less path we're returning
abs, err := filepath.EvalSymlinks(joined)
if err != nil {
h.t.Fatalf("%+v", errors.Wrapf(err, "internal testsuite error: could not get absolute path for dir(%q)", joined))
}
return abs
}
// MustExist fails if path does not exist.
func (h *Helper) MustExist(path string) {
if err := h.ShouldExist(path); err != nil {
h.t.Fatalf("%+v", err)
}
}
// ShouldExist returns an error if path does not exist.
func (h *Helper) ShouldExist(path string) error {
if !h.Exist(path) {
return errors.Errorf("%s does not exist but should", path)
}
return nil
}
// Exist returns whether or not a path exists
func (h *Helper) Exist(path string) bool {
if _, err := os.Stat(path); err != nil {
if os.IsNotExist(err) {
return false
}
h.t.Fatalf("%+v", errors.Wrapf(err, "Error checking if path exists: %s", path))
}
return true
}
// MustNotExist fails if path exists.
func (h *Helper) MustNotExist(path string) {
if err := h.ShouldNotExist(path); err != nil {
h.t.Fatalf("%+v", err)
}
}
// ShouldNotExist returns an error if path exists.
func (h *Helper) ShouldNotExist(path string) error {
if h.Exist(path) {
return errors.Errorf("%s exists but should not", path)
}
return nil
}
// Cleanup cleans up a test that runs testgo.
func (h *Helper) Cleanup() {
if h.wd != "" {
if err := os.Chdir(h.wd); err != nil {
// We are unlikely to be able to continue.
fmt.Fprintln(os.Stderr, "could not restore working directory, crashing:", err)
os.Exit(2)
}
}
// NOTE(mattn): It seems that sometimes git.exe is not dead
// when cleanup() is called. But we do not know any way to wait for it.
if runtime.GOOS == "windows" {
mu.Lock()
exec.Command(`taskkill`, `/F`, `/IM`, `git.exe`).Run()
mu.Unlock()
}
for _, path := range h.temps {
h.check(os.RemoveAll(path))
}
if h.tempdir != "" {
h.check(os.RemoveAll(h.tempdir))
}
}
// ReadManifest returns the manifest in the current directory.
func (h *Helper) ReadManifest() string {
m := filepath.Join(h.pwd(), manifestName)
h.MustExist(m)
f, err := ioutil.ReadFile(m)
h.Must(err)
return string(f)
}
// ReadLock returns the lock in the current directory.
func (h *Helper) ReadLock() string {
l := filepath.Join(h.pwd(), lockName)
h.MustExist(l)
f, err := ioutil.ReadFile(l)
h.Must(err)
return string(f)
}
// GetCommit treats repo as a path to a git repository and returns the current
// revision.
func (h *Helper) GetCommit(repo string) string {
repoPath := h.Path("pkg/dep/sources/https---" + strings.Replace(repo, "/", "-", -1))
cmd := exec.Command("git", "rev-parse", "HEAD")
cmd.Dir = repoPath
out, err := cmd.CombinedOutput()
if err != nil {
h.t.Fatalf("%+v", errors.Wrapf(err, "git commit failed: out -> %s", string(out)))
}
return strings.TrimSpace(string(out))
}
|
integration
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/test/integration/testproj.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package integration
import (
"bytes"
"io"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"runtime"
"sort"
"strings"
"testing"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
const (
projectRoot = "src/github.com/golang/notexist"
)
// RunFunc defines the function signature for an integration test command to execute.
type RunFunc func(prog string, newargs []string, outW, errW io.Writer, dir string, env []string) error
// TestProject manages the "virtual" test project directory structure
// and content
type TestProject struct {
t *testing.T
preImports []string
tempdir string
env []string
origWd string
stdout bytes.Buffer
stderr bytes.Buffer
run RunFunc
}
// NewTestProject initializes a new test's project directory.
func NewTestProject(t *testing.T, initPath, wd string, run RunFunc) *TestProject {
// Cleaning up the GIT_DIR variable is useful when running tests under git
// rebase. In any case, since we're operating with temporary clones,
// no pre-existing value could be useful here.
// We do it globally because the internal runs don't actually use the
// TestProject's environment.
os.Unsetenv("GIT_DIR")
new := &TestProject{
t: t,
origWd: wd,
env: os.Environ(),
run: run,
}
new.makeRootTempDir()
new.TempDir(projectRoot, "vendor")
new.CopyTree(initPath)
new.Setenv("GOPATH", new.tempdir)
return new
}
// Cleanup (remove) the test project's directory.
func (p *TestProject) Cleanup() {
os.RemoveAll(p.tempdir)
}
// Path to the test project directory.
func (p *TestProject) Path(args ...string) string {
return filepath.Join(p.tempdir, filepath.Join(args...))
}
// ProjPath builds an import path for the test project.
func (p *TestProject) ProjPath(args ...string) string {
localPath := append([]string{projectRoot}, args...)
return p.Path(localPath...)
}
// TempDir creates a temporary directory for the test project.
func (p *TestProject) TempDir(args ...string) {
fullPath := p.Path(args...)
if err := os.MkdirAll(fullPath, 0755); err != nil && !os.IsExist(err) {
p.t.Fatalf("%+v", errors.Errorf("Unable to create temp directory: %s", fullPath))
}
}
// TempProjDir builds the path to a package within the test project.
func (p *TestProject) TempProjDir(args ...string) {
localPath := append([]string{projectRoot}, args...)
p.TempDir(localPath...)
}
// VendorPath lists the contents of the test project's vendor directory.
func (p *TestProject) VendorPath(args ...string) string {
localPath := append([]string{projectRoot, "vendor"}, args...)
p.TempDir(localPath...)
return p.Path(localPath...)
}
// RunGo runs a go command, and expects it to succeed.
func (p *TestProject) RunGo(args ...string) {
cmd := exec.Command("go", args...)
p.stdout.Reset()
p.stderr.Reset()
cmd.Stdout = &p.stdout
cmd.Stderr = &p.stderr
cmd.Dir = p.tempdir
cmd.Env = p.env
status := cmd.Run()
if p.stdout.Len() > 0 {
p.t.Log("go standard output:")
p.t.Log(p.stdout.String())
}
if p.stderr.Len() > 0 {
p.t.Log("go standard error:")
p.t.Log(p.stderr.String())
}
if status != nil {
p.t.Logf("go %v failed unexpectedly: %v", args, status)
p.t.FailNow()
}
}
// RunGit runs a git command, and expects it to succeed.
func (p *TestProject) RunGit(dir string, args ...string) {
cmd := exec.Command("git", args...)
p.stdout.Reset()
p.stderr.Reset()
cmd.Stdout = &p.stdout
cmd.Stderr = &p.stderr
cmd.Dir = dir
cmd.Env = p.env
status := cmd.Run()
if *test.PrintLogs {
if p.stdout.Len() > 0 {
p.t.Logf("git %v standard output:", args)
p.t.Log(p.stdout.String())
}
if p.stderr.Len() > 0 {
p.t.Logf("git %v standard error:", args)
p.t.Log(p.stderr.String())
}
}
if status != nil {
p.t.Logf("git %v failed unexpectedly: %v", args, status)
p.t.FailNow()
}
}
// GetStdout gets the Stdout output from test run.
func (p *TestProject) GetStdout() string {
return p.stdout.String()
}
// GetStderr gets the Stderr output from test run.
func (p *TestProject) GetStderr() string {
return p.stderr.String()
}
// GetVendorGit populates the initial vendor directory for a test project.
func (p *TestProject) GetVendorGit(ip string) {
parse := strings.Split(ip, "/")
gitDir := strings.Join(parse[:len(parse)-1], string(filepath.Separator))
p.TempProjDir("vendor", gitDir)
p.RunGit(p.ProjPath("vendor", gitDir), "clone", "http://"+ip)
}
// DoRun executes the integration test command against the test project.
func (p *TestProject) DoRun(args []string) error {
if *test.PrintLogs {
p.t.Logf("running testdep %v", args)
}
prog := filepath.Join(p.origWd, "testdep"+test.ExeSuffix)
newargs := args
if args[0] != "check" {
newargs = append([]string{args[0], "-v"}, args[1:]...)
}
p.stdout.Reset()
p.stderr.Reset()
status := p.run(prog, newargs, &p.stdout, &p.stderr, p.ProjPath(""), p.env)
if *test.PrintLogs {
if p.stdout.Len() > 0 {
p.t.Logf("\nstandard output:%s", p.stdout.String())
}
if p.stderr.Len() > 0 {
p.t.Logf("standard error:\n%s", p.stderr.String())
}
}
return status
}
// CopyTree recursively copies a source directory into the test project's directory.
func (p *TestProject) CopyTree(src string) {
filepath.Walk(src,
func(path string, info os.FileInfo, err error) error {
if path != src {
localpath := path[len(src)+1:]
if info.IsDir() {
p.TempDir(projectRoot, localpath)
} else {
destpath := filepath.Join(p.ProjPath(), localpath)
copyFile(destpath, path)
}
}
return nil
})
}
func copyFile(dest, src string) {
in, err := os.Open(src)
if err != nil {
panic(err)
}
defer in.Close()
out, err := os.Create(dest)
if err != nil {
panic(err)
}
defer out.Close()
io.Copy(out, in)
}
// GetVendorPaths collects final vendor paths at a depth of three levels.
func (p *TestProject) GetVendorPaths() []string {
vendorPath := p.ProjPath("vendor")
result := make([]string, 0)
filepath.Walk(
vendorPath,
func(path string, info os.FileInfo, err error) error {
if len(path) > len(vendorPath) && info.IsDir() {
parse := strings.Split(path[len(vendorPath)+1:], string(filepath.Separator))
if len(parse) == 3 {
result = append(result, strings.Join(parse, "/"))
return filepath.SkipDir
}
}
return nil
},
)
sort.Strings(result)
return result
}
// GetImportPaths collect final vendor paths at a depth of three levels.
func (p *TestProject) GetImportPaths() []string {
importPath := p.Path("src")
result := make([]string, 0)
filepath.Walk(
importPath,
func(path string, info os.FileInfo, err error) error {
if len(path) > len(importPath) && info.IsDir() {
parse := strings.Split(path[len(importPath)+1:], string(filepath.Separator))
if len(parse) == 3 {
result = append(result, strings.Join(parse, "/"))
return filepath.SkipDir
}
}
return nil
},
)
sort.Strings(result)
return result
}
// RecordImportPaths takes a snapshot of the import paths before test is run.
func (p *TestProject) RecordImportPaths() {
p.preImports = p.GetImportPaths()
}
// CompareImportPaths compares import paths before and after test commands.
func (p *TestProject) CompareImportPaths() {
wantImportPaths := p.preImports
gotImportPaths := p.GetImportPaths()
if len(gotImportPaths) != len(wantImportPaths) {
p.t.Fatalf("Import path count changed during command: pre %d post %d", len(wantImportPaths), len(gotImportPaths))
}
for ind := range gotImportPaths {
if gotImportPaths[ind] != wantImportPaths[ind] {
p.t.Errorf("Change in import paths during: pre %s post %s", gotImportPaths, wantImportPaths)
}
}
}
// makeRootTempdir makes a temporary directory for a run of testgo. If
// the temporary directory was already created, this does nothing.
func (p *TestProject) makeRootTempDir() {
if p.tempdir == "" {
var err error
p.tempdir, err = ioutil.TempDir("", "gotest")
p.Must(err)
// Fix for OSX where the tempdir is a symlink:
if runtime.GOOS == "darwin" {
p.tempdir, err = filepath.EvalSymlinks(p.tempdir)
p.Must(err)
}
}
}
// Setenv sets an environment variable to use when running the test go
// command.
func (p *TestProject) Setenv(name, val string) {
p.env = append(p.env, name+"="+val)
}
// Must gives a fatal error if err is not nil.
func (p *TestProject) Must(err error) {
if err != nil {
p.t.Fatalf("%+v", err)
}
}
|
integration
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/test/integration/testcase.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package integration
import (
"encoding/json"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"unicode"
"github.com/golang/dep/internal/test"
)
// TestCase manages a test case directory structure and content
type TestCase struct {
t *testing.T
name string
rootPath string
initialPath string
finalPath string
Commands [][]string `json:"commands"`
ShouldFail bool `json:"should-fail"`
ErrorExpected string `json:"error-expected"`
GopathInitial map[string]string `json:"gopath-initial"`
VendorInitial map[string]string `json:"vendor-initial"`
VendorFinal []string `json:"vendor-final"`
InitPath string `json:"init-path"`
RequiredFeatureFlag string `json:"feature"`
}
// NewTestCase creates a new TestCase.
func NewTestCase(t *testing.T, dir, name string) *TestCase {
rootPath := filepath.FromSlash(filepath.Join(dir, name))
n := &TestCase{
t: t,
name: name,
rootPath: rootPath,
initialPath: filepath.Join(rootPath, "initial"),
finalPath: filepath.Join(rootPath, "final"),
}
j, err := ioutil.ReadFile(filepath.Join(rootPath, "testcase.json"))
if err != nil {
t.Fatal(err)
}
err = json.Unmarshal(j, n)
if err != nil {
t.Fatal(err)
}
// Flip ShouldFail on if it's not set, but there's an expected error.
if n.ErrorExpected != "" && !n.ShouldFail {
n.ShouldFail = true
}
return n
}
// InitialPath represents the initial set of files in a project.
func (tc *TestCase) InitialPath() string {
return tc.initialPath
}
// UpdateFile updates the golden file with the working result.
func (tc *TestCase) UpdateFile(goldenPath, workingPath string) {
exists, working, err := getFile(workingPath)
if err != nil {
tc.t.Fatalf("Error reading project file %s: %s", goldenPath, err)
}
golden := filepath.Join(tc.finalPath, goldenPath)
if exists {
if err := tc.WriteFile(golden, working); err != nil {
tc.t.Fatal(err)
}
} else {
err := os.Remove(golden)
if err != nil && !os.IsNotExist(err) {
tc.t.Fatal(err)
}
}
}
// CompareFile compares the golden file with the working result.
func (tc *TestCase) CompareFile(goldenPath, working string) {
golden := filepath.Join(tc.finalPath, goldenPath)
gotExists, got, err := getFile(working)
if err != nil {
tc.t.Fatalf("Error reading project file %q: %s", goldenPath, err)
}
wantExists, want, err := getFile(golden)
if err != nil {
tc.t.Fatalf("Error reading testcase file %q: %s", goldenPath, err)
}
if wantExists && gotExists {
if want != got {
tc.t.Errorf("%s was not as expected\n(WNT):\n%s\n(GOT):\n%s", filepath.Base(goldenPath), want, got)
}
} else if !wantExists && gotExists {
tc.t.Errorf("%q created where none was expected", goldenPath)
} else if wantExists && !gotExists {
tc.t.Errorf("%q not created where one was expected", goldenPath)
}
}
// UpdateOutput updates the golden file for stdout with the working result.
func (tc *TestCase) UpdateOutput(stdout string) {
stdoutPath := filepath.Join(tc.rootPath, "stdout.txt")
_, err := os.Stat(stdoutPath)
if err != nil {
if os.IsNotExist(err) {
// Don't update the stdout.txt file if it doesn't exist.
return
}
panic(err)
}
if err := tc.WriteFile(stdoutPath, stdout); err != nil {
tc.t.Fatal(err)
}
}
// CompareOutput compares expected and actual stdout output.
func (tc *TestCase) CompareOutput(stdout string) {
expected, err := ioutil.ReadFile(filepath.Join(tc.rootPath, "stdout.txt"))
if err != nil {
if os.IsNotExist(err) {
// Nothing to verify
return
}
panic(err)
}
expStr := normalizeLines(string(expected))
stdout = normalizeLines(stdout)
if expStr != stdout {
tc.t.Errorf("stdout was not as expected\n(WNT):\n%s\n(GOT):\n%s\n", expStr, stdout)
}
}
// normalizeLines returns a version with trailing whitespace stripped from each line.
func normalizeLines(s string) string {
lines := strings.Split(s, "\n")
for i := range lines {
lines[i] = strings.TrimRightFunc(lines[i], unicode.IsSpace)
}
return strings.Join(lines, "\n")
}
// CompareError compares expected and actual stderr output.
func (tc *TestCase) CompareError(err error, stderr string) {
wantExists, want := tc.ErrorExpected != "", tc.ErrorExpected
gotExists, got := stderr != "" && err != nil, stderr
if wantExists && gotExists {
switch c := strings.Count(got, want); c {
case 0:
tc.t.Errorf("error did not contain expected string:\n\t(GOT): %s\n\t(WNT): %s", got, want)
case 1:
default:
tc.t.Errorf("expected error %s matches %d times to actual error %s", want, c, got)
}
} else if !wantExists && gotExists {
tc.t.Fatalf("error raised where none was expected: \n%v", stderr)
} else if wantExists && !gotExists {
tc.t.Error("error not raised where one was expected:", want)
}
}
// CompareCmdFailure checks to see if the failure/success (in the sense of an
// exit code) was as expected by the test fixture.
func (tc *TestCase) CompareCmdFailure(gotFail bool) {
if gotFail == tc.ShouldFail {
return
}
if tc.ShouldFail {
tc.t.Errorf("expected command to fail, but it did not")
} else {
tc.t.Errorf("expected command not to fail, but it did")
}
}
// CompareVendorPaths validates the vendor directory contents.
func (tc *TestCase) CompareVendorPaths(gotVendorPaths []string) {
if *test.UpdateGolden {
tc.VendorFinal = gotVendorPaths
} else {
wantVendorPaths := tc.VendorFinal
if len(gotVendorPaths) != len(wantVendorPaths) {
tc.t.Fatalf("Wrong number of vendor paths created: want %d got %d", len(wantVendorPaths), len(gotVendorPaths))
}
for ind := range gotVendorPaths {
if gotVendorPaths[ind] != wantVendorPaths[ind] {
tc.t.Errorf("Mismatch in vendor paths created: want %s got %s", wantVendorPaths, gotVendorPaths)
}
}
}
}
// WriteFile writes a file using the default file permissions.
func (tc *TestCase) WriteFile(src string, content string) error {
return ioutil.WriteFile(src, []byte(content), 0666)
}
func getFile(path string) (bool, string, error) {
_, err := os.Stat(path)
if err != nil {
return false, "", nil
}
f, err := ioutil.ReadFile(path)
if err != nil {
return true, "", err
}
return true, string(f), nil
}
|
importers
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/importers.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package importers
import (
"log"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/glide"
"github.com/golang/dep/internal/importers/glock"
"github.com/golang/dep/internal/importers/godep"
"github.com/golang/dep/internal/importers/govend"
"github.com/golang/dep/internal/importers/govendor"
"github.com/golang/dep/internal/importers/gvt"
"github.com/golang/dep/internal/importers/vndr"
)
// Importer handles importing configuration from other dependency managers into
// the dep configuration format.
type Importer interface {
// Name of the importer.
Name() string
// Import the config found in the directory.
Import(path string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error)
// HasDepMetadata checks if a directory contains config that the importer can handle.
HasDepMetadata(dir string) bool
}
// BuildAll returns a slice of all the importers.
func BuildAll(logger *log.Logger, verbose bool, sm gps.SourceManager) []Importer {
return []Importer{
glide.NewImporter(logger, verbose, sm),
godep.NewImporter(logger, verbose, sm),
vndr.NewImporter(logger, verbose, sm),
govend.NewImporter(logger, verbose, sm),
gvt.NewImporter(logger, verbose, sm),
govendor.NewImporter(logger, verbose, sm),
glock.NewImporter(logger, verbose, sm),
}
}
|
vndr
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/vndr/importer_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package vndr
import (
"bytes"
"fmt"
"log"
"path/filepath"
"reflect"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestVndrConfig_Convert(t *testing.T) {
testCases := map[string]struct {
packages []vndrPackage
importertest.TestCase
}{
"package": {
[]vndrPackage{{
importPath: importertest.Project,
reference: importertest.V1Rev,
repository: importertest.ProjectSrc,
}},
importertest.TestCase{
WantSourceRepo: importertest.ProjectSrc,
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
},
"missing importPath": {
[]vndrPackage{{
reference: importertest.V1Tag,
}},
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid vndr configuration, import path is required",
},
},
"missing reference": {
[]vndrPackage{{
importPath: importertest.Project,
}},
importertest.TestCase{
WantWarning: fmt.Sprintf(
"Warning: Invalid vndr configuration, reference not found for import path %q",
importertest.Project,
),
},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.packages = testCase.packages
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
func TestVndrConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
h.TempDir(filepath.Join("src", importertest.RootProject))
h.TempCopy(vndrFile(importertest.RootProject), "vendor.conf")
projectRoot := h.Path(importertest.RootProject)
logOutput := bytes.NewBuffer(nil)
ctx.Err = log.New(logOutput, "", 0)
v := NewImporter(ctx.Err, false, sm)
if !v.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect vndr configuration file")
}
m, l, err := v.Import(projectRoot, importertest.RootProject)
h.Must(err)
wantM := dep.NewManifest()
c1, _ := gps.NewSemverConstraint("^0.8.1")
wantM.Constraints["github.com/sdboyer/deptest"] = gps.ProjectProperties{
Source: "https://github.com/sdboyer/deptest.git",
Constraint: c1,
}
c2, _ := gps.NewSemverConstraint("^2.0.0")
wantM.Constraints["github.com/sdboyer/deptestdos"] = gps.ProjectProperties{
Constraint: c2,
}
if !reflect.DeepEqual(wantM, m) {
t.Errorf("unexpected manifest\nhave=%+v\nwant=%+v", m, wantM)
}
wantL := &dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{
ProjectRoot: "github.com/sdboyer/deptest",
Source: "https://github.com/sdboyer/deptest.git",
},
gps.NewVersion("v0.8.1").Pair("3f4c3bea144e112a69bbe5d8d01c1b09a544253f"),
nil,
),
gps.NewLockedProject(
gps.ProjectIdentifier{
ProjectRoot: "github.com/sdboyer/deptestdos",
},
gps.NewVersion("v2.0.0").Pair("5c607206be5decd28e6263ffffdcee067266015e"),
nil,
),
},
}
if !reflect.DeepEqual(wantL, l) {
t.Errorf("unexpected lock\nhave=%+v\nwant=%+v", l, wantL)
}
goldenFile := "golden.txt"
got := logOutput.String()
want := h.GetTestFileString(goldenFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(goldenFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", goldenFile))
}
} else {
t.Fatalf("expected %s, got %s", want, got)
}
}
}
func TestParseVndrLine(t *testing.T) {
testcase := func(in string, wantPkg *vndrPackage, wantErr error) func(*testing.T) {
return func(t *testing.T) {
havePkg, haveErr := parseVndrLine(in)
switch {
case wantPkg == nil:
if havePkg != nil {
t.Errorf("expected nil package, have %v", havePkg)
}
case havePkg == nil:
if wantPkg != nil {
t.Errorf("expected non-nil package %v, have nil", wantPkg)
}
default:
if !reflect.DeepEqual(havePkg, wantPkg) {
t.Errorf("unexpected package, have=%v, want=%v", *havePkg, *wantPkg)
}
}
switch {
case wantErr == nil:
if haveErr != nil {
t.Errorf("expected nil err, have %v", haveErr)
}
case haveErr == nil:
if wantErr != nil {
t.Errorf("expected non-nil err %v, have nil", wantErr)
}
default:
if haveErr.Error() != wantErr.Error() {
t.Errorf("expected err=%q, have err=%q", wantErr.Error(), haveErr.Error())
}
}
}
}
t.Run("normal line",
testcase("github.com/golang/notreal v1.0.0",
&vndrPackage{
importPath: "github.com/golang/notreal",
reference: "v1.0.0",
}, nil))
t.Run("with repo",
testcase("github.com/golang/notreal v1.0.0 https://github.com/golang/notreal",
&vndrPackage{
importPath: "github.com/golang/notreal",
reference: "v1.0.0",
repository: "https://github.com/golang/notreal",
}, nil))
t.Run("trailing comment",
testcase("github.com/golang/notreal v1.0.0 https://github.com/golang/notreal # cool comment",
&vndrPackage{
importPath: "github.com/golang/notreal",
reference: "v1.0.0",
repository: "https://github.com/golang/notreal",
}, nil))
t.Run("empty line", testcase("", nil, nil))
t.Run("comment line", testcase("# comment", nil, nil))
t.Run("comment line with leading whitespace", testcase(" # comment", nil, nil))
t.Run("missing revision",
testcase("github.com/golang/notreal", nil,
errors.New("invalid config format: \"github.com/golang/notreal\""),
))
}
|
vndr
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/vndr/importer.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package vndr
import (
"bufio"
"log"
"os"
"path/filepath"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
)
func vndrFile(dir string) string {
return filepath.Join(dir, "vendor.conf")
}
// Importer imports vndr configuration into the dep configuration format.
type Importer struct {
*base.Importer
packages []vndrPackage
}
// NewImporter for vndr.
func NewImporter(log *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(log, verbose, sm)}
}
// Name of the importer.
func (v *Importer) Name() string { return "vndr" }
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (v *Importer) HasDepMetadata(dir string) bool {
_, err := os.Stat(vndrFile(dir))
return err == nil
}
// Import the config found in the directory.
func (v *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
v.Logger.Println("Detected vndr configuration file...")
err := v.loadVndrFile(dir)
if err != nil {
return nil, nil, errors.Wrapf(err, "unable to load vndr file")
}
m, l := v.convert(pr)
return m, l, nil
}
func (v *Importer) loadVndrFile(dir string) error {
v.Logger.Printf("Converting from vendor.conf...")
path := vndrFile(dir)
f, err := os.Open(path)
if err != nil {
return errors.Wrapf(err, "unable to open %s", path)
}
defer f.Close()
scanner := bufio.NewScanner(f)
for scanner.Scan() {
pkg, err := parseVndrLine(scanner.Text())
if err != nil {
v.Logger.Printf(" Warning: Skipping line. Unable to parse: %s\n", err)
continue
}
if pkg == nil {
// Could be an empty line or one which is just a comment
continue
}
v.packages = append(v.packages, *pkg)
}
if err := scanner.Err(); err != nil {
v.Logger.Printf(" Warning: Ignoring errors found while parsing %s: %s\n", path, err)
}
return nil
}
func (v *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
packages := make([]base.ImportedPackage, 0, len(v.packages))
for _, pkg := range v.packages {
// Validate
if pkg.importPath == "" {
v.Logger.Println(
" Warning: Skipping project. Invalid vndr configuration, import path is required",
)
continue
}
if pkg.reference == "" {
v.Logger.Printf(
" Warning: Invalid vndr configuration, reference not found for import path %q\n",
pkg.importPath,
)
}
ip := base.ImportedPackage{
Name: pkg.importPath,
Source: pkg.repository,
LockHint: pkg.reference,
}
packages = append(packages, ip)
}
v.ImportPackages(packages, true)
return v.Manifest, v.Lock
}
type vndrPackage struct {
importPath string
reference string
repository string
}
func parseVndrLine(line string) (*vndrPackage, error) {
commentIdx := strings.Index(line, "#")
if commentIdx >= 0 {
line = line[:commentIdx]
}
line = strings.TrimSpace(line)
if line == "" {
return nil, nil
}
parts := strings.Fields(line)
if !(len(parts) == 2 || len(parts) == 3) {
return nil, errors.Errorf("invalid config format: %q", line)
}
pkg := &vndrPackage{
importPath: parts[0],
reference: parts[1],
}
if len(parts) == 3 {
pkg.repository = parts[2]
}
return pkg, nil
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/vndr/testdata/vendor.conf
|
github.com/sdboyer/deptest 3f4c3bea144e112a69bbe5d8d01c1b09a544253f https://github.com/sdboyer/deptest.git # trailing comment
# line comment
github.com/sdboyer/deptestdos v2.0.0 # trailing comment
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/vndr/testdata/golden.txt
|
Detected vndr configuration file...
Converting from vendor.conf...
Using ^0.8.1 as initial constraint for imported dep github.com/sdboyer/deptest
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
|
glide
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glide/importer_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package glide
import (
"bytes"
"log"
"path/filepath"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestGlideConfig_Convert(t *testing.T) {
testCases := map[string]struct {
yaml glideYaml
lock glideLock
importertest.TestCase
}{
"project": {
glideYaml{
Imports: []glidePackage{
{
Name: importertest.Project,
Repository: importertest.ProjectSrc,
Reference: importertest.V2Branch,
},
},
},
glideLock{
Imports: []glideLockedPackage{
{
Name: importertest.Project,
Repository: importertest.ProjectSrc,
Revision: importertest.V2PatchRev,
},
},
},
importertest.TestCase{
WantSourceRepo: importertest.ProjectSrc,
WantConstraint: importertest.V2Branch,
WantRevision: importertest.V2PatchRev,
WantVersion: importertest.V2PatchTag,
},
},
"test project": {
glideYaml{
Imports: []glidePackage{
{
Name: importertest.Project,
Repository: importertest.ProjectSrc,
Reference: importertest.V2Branch,
},
},
},
glideLock{
Imports: []glideLockedPackage{
{
Name: importertest.Project,
Repository: importertest.ProjectSrc,
Revision: importertest.V2PatchRev,
},
},
},
importertest.TestCase{
WantSourceRepo: importertest.ProjectSrc,
WantConstraint: importertest.V2Branch,
WantRevision: importertest.V2PatchRev,
WantVersion: importertest.V2PatchTag,
},
},
"yaml only": {
glideYaml{
Imports: []glidePackage{
{
Name: importertest.Project,
Repository: importertest.ProjectSrc,
Reference: importertest.V2Branch,
},
},
},
glideLock{},
importertest.TestCase{
WantSourceRepo: importertest.ProjectSrc,
WantConstraint: importertest.V2Branch,
},
},
"ignored package": {
glideYaml{
Ignores: []string{importertest.Project},
},
glideLock{},
importertest.TestCase{
WantIgnored: []string{importertest.Project},
},
},
"exclude dir": {
glideYaml{
ExcludeDirs: []string{"samples"},
},
glideLock{},
importertest.TestCase{
WantIgnored: []string{importertest.RootProject + "/samples"},
},
},
"exclude dir ignores mismatched package name": {
glideYaml{
Name: "github.com/golang/mismatched-package-name",
ExcludeDirs: []string{"samples"},
},
glideLock{},
importertest.TestCase{
WantIgnored: []string{importertest.RootProject + "/samples"},
},
},
"missing package name": {
glideYaml{
Imports: []glidePackage{{Name: ""}},
},
glideLock{},
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid glide configuration, Name is required",
},
},
"warn unused os field": {
glideYaml{
Imports: []glidePackage{
{
Name: importertest.Project,
OS: "windows",
},
}},
glideLock{},
importertest.TestCase{
WantWarning: "specified an os",
},
},
"warn unused arch field": {
glideYaml{
Imports: []glidePackage{
{
Name: importertest.Project,
Arch: "i686",
},
}},
glideLock{},
importertest.TestCase{
WantWarning: "specified an arch",
},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.glideConfig = testCase.yaml
g.glideLock = testCase.lock
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
func TestGlideConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
h.TempDir(filepath.Join("src", importertest.RootProject))
h.TempCopy(filepath.Join(importertest.RootProject, glideYamlName), "glide.yaml")
h.TempCopy(filepath.Join(importertest.RootProject, glideLockName), "glide.lock")
projectRoot := h.Path(importertest.RootProject)
// Capture stderr so we can verify output
verboseOutput := &bytes.Buffer{}
ctx.Err = log.New(verboseOutput, "", 0)
g := NewImporter(ctx.Err, false, sm) // Disable verbose so that we don't print values that change each test run
if !g.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect the glide configuration files")
}
m, l, err := g.Import(projectRoot, importertest.RootProject)
h.Must(err)
if m == nil {
t.Fatal("Expected the manifest to be generated")
}
if l == nil {
t.Fatal("Expected the lock to be generated")
}
goldenFile := "golden.txt"
got := verboseOutput.String()
want := h.GetTestFileString(goldenFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(goldenFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", goldenFile))
}
} else {
t.Fatalf("want %s, got %s", want, got)
}
}
}
|
glide
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glide/importer.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package glide
import (
"bytes"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/fs"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
)
const glideYamlName = "glide.yaml"
const glideLockName = "glide.lock"
// Importer imports glide configuration into the dep configuration format.
type Importer struct {
*base.Importer
glideConfig glideYaml
glideLock glideLock
lockFound bool
}
// NewImporter for glide.
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(logger, verbose, sm)}
}
type glideYaml struct {
Name string `yaml:"package"`
Ignores []string `yaml:"ignore"`
ExcludeDirs []string `yaml:"excludeDirs"`
Imports []glidePackage `yaml:"import"`
TestImports []glidePackage `yaml:"testImport"`
}
type glideLock struct {
Imports []glideLockedPackage `yaml:"imports"`
TestImports []glideLockedPackage `yaml:"testImports"`
}
type glidePackage struct {
Name string `yaml:"package"`
Reference string `yaml:"version"` // could contain a semver, tag or branch
Repository string `yaml:"repo"`
// Unsupported fields that we will warn if used
Subpackages []string `yaml:"subpackages"`
OS string `yaml:"os"`
Arch string `yaml:"arch"`
}
type glideLockedPackage struct {
Name string `yaml:"name"`
Revision string `yaml:"version"`
Repository string `yaml:"repo"`
}
// Name of the importer.
func (g *Importer) Name() string {
return "glide"
}
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (g *Importer) HasDepMetadata(dir string) bool {
// Only require glide.yaml, the lock is optional
y := filepath.Join(dir, glideYamlName)
if _, err := os.Stat(y); err != nil {
return false
}
return true
}
// Import the config found in the directory.
func (g *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
err := g.load(dir)
if err != nil {
return nil, nil, err
}
m, l := g.convert(pr)
return m, l, nil
}
// load the glide configuration files. Failure to load `glide.yaml` is considered
// unrecoverable and an error is returned for it. But if there is any error while trying
// to load the lock file, only a warning is logged.
func (g *Importer) load(projectDir string) error {
g.Logger.Println("Detected glide configuration files...")
y := filepath.Join(projectDir, glideYamlName)
if g.Verbose {
g.Logger.Printf(" Loading %s", y)
}
yb, err := ioutil.ReadFile(y)
if err != nil {
return errors.Wrapf(err, "unable to read %s", y)
}
err = yaml.Unmarshal(yb, &g.glideConfig)
if err != nil {
return errors.Wrapf(err, "unable to parse %s", y)
}
l := filepath.Join(projectDir, glideLockName)
if exists, _ := fs.IsRegular(l); exists {
if g.Verbose {
g.Logger.Printf(" Loading %s", l)
}
lb, err := ioutil.ReadFile(l)
if err != nil {
g.Logger.Printf(" Warning: Ignoring lock file. Unable to read %s: %s\n", l, err)
return nil
}
lock := glideLock{}
err = yaml.Unmarshal(lb, &lock)
if err != nil {
g.Logger.Printf(" Warning: Ignoring lock file. Unable to parse %s: %s\n", l, err)
return nil
}
g.lockFound = true
g.glideLock = lock
}
return nil
}
// convert the glide configuration files into dep configuration files.
func (g *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
projectName := string(pr)
task := bytes.NewBufferString("Converting from glide.yaml")
if g.lockFound {
task.WriteString(" and glide.lock")
}
task.WriteString("...")
g.Logger.Println(task)
numPkgs := len(g.glideConfig.Imports) + len(g.glideConfig.TestImports) + len(g.glideLock.Imports) + len(g.glideLock.TestImports)
packages := make([]base.ImportedPackage, 0, numPkgs)
// Constraints
for _, pkg := range append(g.glideConfig.Imports, g.glideConfig.TestImports...) {
// Validate
if pkg.Name == "" {
g.Logger.Println(
" Warning: Skipping project. Invalid glide configuration, Name is required",
)
continue
}
// Warn
if g.Verbose {
if pkg.OS != "" {
g.Logger.Printf(" The %s package specified an os, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.\n", pkg.Name)
}
if pkg.Arch != "" {
g.Logger.Printf(" The %s package specified an arch, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.\n", pkg.Name)
}
}
ip := base.ImportedPackage{
Name: pkg.Name,
Source: pkg.Repository,
ConstraintHint: pkg.Reference,
}
packages = append(packages, ip)
}
// Locks
for _, pkg := range append(g.glideLock.Imports, g.glideLock.TestImports...) {
// Validate
if pkg.Name == "" {
g.Logger.Println(" Warning: Skipping project. Invalid glide lock, Name is required")
continue
}
ip := base.ImportedPackage{
Name: pkg.Name,
Source: pkg.Repository,
LockHint: pkg.Revision,
}
packages = append(packages, ip)
}
g.ImportPackages(packages, false)
// Ignores
g.Manifest.Ignored = append(g.Manifest.Ignored, g.glideConfig.Ignores...)
if len(g.glideConfig.ExcludeDirs) > 0 {
if g.glideConfig.Name != "" && g.glideConfig.Name != projectName {
g.Logger.Printf(" Glide thinks the package is '%s' but dep thinks it is '%s', using dep's value.\n", g.glideConfig.Name, projectName)
}
for _, dir := range g.glideConfig.ExcludeDirs {
pkg := path.Join(projectName, dir)
g.Manifest.Ignored = append(g.Manifest.Ignored, pkg)
}
}
return g.Manifest, g.Lock
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glide/testdata/glide.yaml
|
package: github.com/golang/notexist
homepage: http://example.com
license: MIT
owners:
- name: Sam Boyer
email: sdboyer@example.com
homepage: http://sdboyer.io
ignore:
- github.com/sdboyer/dep-test
excludeDirs:
- samples
import:
- package: github.com/sdboyer/deptest
repo: https://github.com/sdboyer/deptest.git
vcs: git
version: master
- package: github.com/sdboyer/deptestdos
version: v2.0.0
testImport:
- package: github.com/golang/lint
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glide/testdata/golden.txt
|
Detected glide configuration files...
Converting from glide.yaml and glide.lock...
Using master as initial constraint for imported dep github.com/sdboyer/deptest
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
Trying * (cb00e56) as initial lock for imported dep github.com/golang/lint
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glide/testdata/glide.lock
|
hash: 16053c82a71f9bd509b05a4523df6bc418aed2083e4b8bd97a870bbc003256f8
updated: 2017-03-07T17:02:32.214383898-06:00
imports:
- name: github.com/sdboyer/deptest
repo: https://github.com/sdboyer/deptest.git
vcs: git
version: 3f4c3bea144e112a69bbe5d8d01c1b09a544253f
- name: github.com/sdboyer/deptestdos
version: 5c607206be5decd28e6263ffffdcee067266015e
testImports:
- name: github.com/golang/lint
version: cb00e5669539f047b2f4c53a421a01b0c8e172c6
|
godep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/godep/importer_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package godep
import (
"bytes"
"fmt"
"log"
"path/filepath"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestGodepConfig_Convert(t *testing.T) {
testCases := map[string]struct {
importertest.TestCase
json godepJSON
}{
"package without comment": {
importertest.TestCase{
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
godepJSON{
Imports: []godepPackage{
{
ImportPath: importertest.Project,
Rev: importertest.V1Rev,
},
},
},
},
"package with comment": {
importertest.TestCase{
WantConstraint: importertest.V2Branch,
WantRevision: importertest.V2PatchRev,
WantVersion: importertest.V2PatchTag,
},
godepJSON{
Imports: []godepPackage{
{
ImportPath: importertest.Project,
Rev: importertest.V2PatchRev,
Comment: importertest.V2Branch,
},
},
},
},
"missing package name": {
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid godep configuration, ImportPath is required",
},
godepJSON{
Imports: []godepPackage{{ImportPath: ""}},
},
},
"missing revision": {
importertest.TestCase{
WantWarning: fmt.Sprintf(
"Warning: Invalid godep configuration, Rev not found for ImportPath %q",
importertest.Project,
),
},
godepJSON{
Imports: []godepPackage{
{
ImportPath: importertest.Project,
},
},
},
},
"package with requirements": {
importertest.TestCase{
WantRequired: []string{importertest.Project},
},
godepJSON{
Required: []string{importertest.Project},
},
},
"package with local requirements": {
importertest.TestCase{
WantRequired: nil,
},
godepJSON{
Required: []string{"./..."},
},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.json = testCase.json
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
func TestGodepConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
cacheDir := "gps-repocache"
h.TempDir(cacheDir)
h.TempDir("src")
h.TempDir(filepath.Join("src", importertest.RootProject))
h.TempCopy(filepath.Join(importertest.RootProject, godepPath), "Godeps.json")
projectRoot := h.Path(importertest.RootProject)
sm, err := gps.NewSourceManager(gps.SourceManagerConfig{
Cachedir: h.Path(cacheDir),
Logger: log.New(test.Writer{TB: t}, "", 0),
})
h.Must(err)
defer sm.Release()
// Capture stderr so we can verify output
verboseOutput := &bytes.Buffer{}
logger := log.New(verboseOutput, "", 0)
g := NewImporter(logger, false, sm) // Disable Verbose so that we don't print values that change each test run
if !g.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect godep configuration file")
}
m, l, err := g.Import(projectRoot, importertest.RootProject)
h.Must(err)
if m == nil {
t.Fatal("Expected the manifest to be generated")
}
if l == nil {
t.Fatal("Expected the lock to be generated")
}
goldenFile := "golden.txt"
got := verboseOutput.String()
want := h.GetTestFileString(goldenFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(goldenFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", goldenFile))
}
} else {
t.Fatalf("want %s, got %s", want, got)
}
}
}
func TestGodepConfig_JsonLoad(t *testing.T) {
// This is same as cmd/dep/testdata/init/Godeps.json
wantJSON := godepJSON{
Imports: []godepPackage{
{
ImportPath: "github.com/sdboyer/deptest",
Rev: "3f4c3bea144e112a69bbe5d8d01c1b09a544253f",
},
{
ImportPath: "github.com/sdboyer/deptestdos",
Rev: "5c607206be5decd28e6263ffffdcee067266015e",
Comment: "v2.0.0",
},
},
}
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
h.TempCopy(filepath.Join(importertest.RootProject, godepPath), "Godeps.json")
projectRoot := h.Path(importertest.RootProject)
g := NewImporter(ctx.Err, true, nil)
err := g.load(projectRoot)
if err != nil {
t.Fatalf("Error while loading... %v", err)
}
if !equalImports(g.json.Imports, wantJSON.Imports) {
t.Fatalf("Expected imports to be equal. \n\t(GOT): %v\n\t(WNT): %v", g.json.Imports, wantJSON.Imports)
}
}
// equalImports compares two slices of godepPackage and checks if they are
// equal.
func equalImports(a, b []godepPackage) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
|
godep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/godep/importer.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package godep
import (
"encoding/json"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
)
const godepPath = "Godeps" + string(os.PathSeparator) + "Godeps.json"
// Importer imports godep configuration into the dep configuration format.
type Importer struct {
*base.Importer
json godepJSON
}
// NewImporter for godep.
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(logger, verbose, sm)}
}
type godepJSON struct {
Required []string `json:"Packages"`
Imports []godepPackage `json:"Deps"`
}
type godepPackage struct {
ImportPath string `json:"ImportPath"`
Rev string `json:"Rev"`
Comment string `json:"Comment"`
}
// Name of the importer.
func (g *Importer) Name() string {
return "godep"
}
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (g *Importer) HasDepMetadata(dir string) bool {
y := filepath.Join(dir, godepPath)
if _, err := os.Stat(y); err != nil {
return false
}
return true
}
// Import the config found in the directory.
func (g *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
err := g.load(dir)
if err != nil {
return nil, nil, err
}
m, l := g.convert(pr)
return m, l, nil
}
func (g *Importer) load(projectDir string) error {
g.Logger.Println("Detected godep configuration files...")
j := filepath.Join(projectDir, godepPath)
if g.Verbose {
g.Logger.Printf(" Loading %s", j)
}
jb, err := ioutil.ReadFile(j)
if err != nil {
return errors.Wrapf(err, "unable to read %s", j)
}
err = json.Unmarshal(jb, &g.json)
if err != nil {
return errors.Wrapf(err, "unable to parse %s", j)
}
return nil
}
func (g *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
g.Logger.Println("Converting from Godeps.json ...")
packages := make([]base.ImportedPackage, 0, len(g.json.Imports))
for _, pkg := range g.json.Imports {
// Validate
if pkg.ImportPath == "" {
g.Logger.Println(
" Warning: Skipping project. Invalid godep configuration, ImportPath is required",
)
continue
}
if pkg.Rev == "" {
g.Logger.Printf(
" Warning: Invalid godep configuration, Rev not found for ImportPath %q\n",
pkg.ImportPath,
)
}
ip := base.ImportedPackage{
Name: pkg.ImportPath,
LockHint: pkg.Rev,
ConstraintHint: pkg.Comment,
}
packages = append(packages, ip)
}
g.ImportPackages(packages, true)
required := make([]string, 0, len(g.json.Required))
for _, req := range g.json.Required {
if !strings.HasPrefix(req, ".") { // ignore project packages
required = append(required, req)
}
}
if len(required) > 0 {
g.Manifest.Required = required
}
return g.Manifest, g.Lock
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/godep/testdata/golden.txt
|
Detected godep configuration files...
Converting from Godeps.json ...
Using ^0.8.1 as initial constraint for imported dep github.com/sdboyer/deptest
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/godep/testdata/Godeps.json
|
{
"ImportPath": "github.com/golang/notexist",
"GoVersion": "go1.8",
"GodepVersion": "vXYZ",
"Deps": [
{
"ImportPath": "github.com/sdboyer/deptest",
"Rev": "3f4c3bea144e112a69bbe5d8d01c1b09a544253f"
},
{
"ImportPath": "github.com/sdboyer/deptestdos",
"Comment": "v2.0.0",
"Rev": "5c607206be5decd28e6263ffffdcee067266015e"
}
]
}
|
importertest
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/importertest/testcase.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package importertest
import (
"bytes"
"io/ioutil"
"log"
"sort"
"strings"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
// TestCase is a common set of validations applied to the result
// of an importer converting from an external config format to dep's.
type TestCase struct {
DefaultConstraintFromLock bool
WantSourceRepo string
WantConstraint string
WantRevision gps.Revision
WantVersion string
WantIgnored []string
WantRequired []string
WantWarning string
}
// NewTestContext creates a unique context with its own GOPATH for a single test.
func NewTestContext(h *test.Helper) *dep.Ctx {
h.TempDir("src")
pwd := h.Path(".")
discardLogger := log.New(ioutil.Discard, "", 0)
return &dep.Ctx{
GOPATH: pwd,
Out: discardLogger,
Err: discardLogger,
}
}
// Execute and validate the test case.
func (tc TestCase) Execute(t *testing.T, convert func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock)) error {
h := test.NewHelper(t)
defer h.Cleanup()
// Disable parallel tests until we can resolve this error on the Windows builds:
// "remote repository at https://github.com/carolynvs/deptest-importers does not exist, or is inaccessible"
//h.Parallel()
ctx := NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
// Capture stderr so we can verify warnings
output := &bytes.Buffer{}
ctx.Err = log.New(output, "", 0)
manifest, lock := convert(ctx.Err, sm)
return tc.validate(manifest, lock, output)
}
// validate returns an error if any of the testcase validations failed.
func (tc TestCase) validate(manifest *dep.Manifest, lock *dep.Lock, output *bytes.Buffer) error {
if !equalSlice(manifest.Ignored, tc.WantIgnored) {
return errors.Errorf("unexpected set of ignored projects: \n\t(GOT) %#v \n\t(WNT) %#v",
manifest.Ignored, tc.WantIgnored)
}
if !equalSlice(manifest.Required, tc.WantRequired) {
return errors.Errorf("unexpected set of required projects: \n\t(GOT) %#v \n\t(WNT) %#v",
manifest.Required, tc.WantRequired)
}
wantConstraintCount := 0
if tc.WantConstraint != "" {
wantConstraintCount = 1
}
gotConstraintCount := len(manifest.Constraints)
if gotConstraintCount != wantConstraintCount {
return errors.Errorf("unexpected number of constraints: \n\t(GOT) %v \n\t(WNT) %v",
gotConstraintCount, wantConstraintCount)
}
if tc.WantConstraint != "" {
d, ok := manifest.Constraints[Project]
if !ok {
return errors.Errorf("Expected the manifest to have a dependency for '%v'",
Project)
}
gotConstraint := d.Constraint.String()
if gotConstraint != tc.WantConstraint {
return errors.Errorf("unexpected constraint: \n\t(GOT) %v \n\t(WNT) %v",
gotConstraint, tc.WantConstraint)
}
}
// Lock checks.
wantLockCount := 0
if tc.WantRevision != "" {
wantLockCount = 1
}
gotLockCount := 0
if lock != nil {
gotLockCount = len(lock.P)
}
if gotLockCount != wantLockCount {
return errors.Errorf("unexpected number of locked projects: \n\t(GOT) %v \n\t(WNT) %v",
gotLockCount, wantLockCount)
}
if tc.WantRevision != "" {
lp := lock.P[0]
gotProjectRoot := lp.Ident().ProjectRoot
if gotProjectRoot != Project {
return errors.Errorf("unexpected root project in lock: \n\t(GOT) %v \n\t(WNT) %v",
gotProjectRoot, Project)
}
gotSource := lp.Ident().Source
if gotSource != tc.WantSourceRepo {
return errors.Errorf("unexpected source repository: \n\t(GOT) %v \n\t(WNT) %v",
gotSource, tc.WantSourceRepo)
}
// Break down the locked "version" into a version (optional) and revision
var gotVersion string
var gotRevision gps.Revision
if lpv, ok := lp.Version().(gps.PairedVersion); ok {
gotVersion = lpv.String()
gotRevision = lpv.Revision()
} else if lr, ok := lp.Version().(gps.Revision); ok {
gotRevision = lr
} else {
return errors.New("could not determine the type of the locked version")
}
if gotRevision != tc.WantRevision {
return errors.Errorf("unexpected locked revision: \n\t(GOT) %v \n\t(WNT) %v",
gotRevision,
tc.WantRevision)
}
if gotVersion != tc.WantVersion {
return errors.Errorf("unexpected locked version: \n\t(GOT) %v \n\t(WNT) %v",
gotVersion,
tc.WantVersion)
}
}
if tc.WantWarning != "" {
gotWarning := output.String()
if !strings.Contains(gotWarning, tc.WantWarning) {
return errors.Errorf("Expected the output to include the warning '%s' but got '%s'\n", tc.WantWarning, gotWarning)
}
}
return nil
}
// equalSlice is comparing two string slices for equality.
func equalSlice(a, b []string) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
if len(a) != len(b) {
return false
}
sort.Strings(a)
sort.Strings(b)
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
|
importertest
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/importertest/testdata.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package importertest
const (
// RootProject is the containing project performing the import.
RootProject = "github.com/golang/notexist"
// Project being imported.
Project = "github.com/carolynvs/deptest-importers"
// ProjectSrc is an alternate source for the imported project.
ProjectSrc = "https://github.com/carolynvs/deptest-importers.git"
// UntaggedRev is a revision without any tags.
UntaggedRev = "9b670d143bfb4a00f7461451d5c4a62f80e9d11d"
// UntaggedRevAbbrv is the result of running `git describe` on UntaggedRev
UntaggedRevAbbrv = "v1.0.0-1-g9b670d1"
// Beta1Tag is a non-semver tag.
Beta1Tag = "beta1"
// Beta1Rev is the revision of Beta1Tag
Beta1Rev = "7913ab26988c6fb1e16225f845a178e8849dd254"
// V2Branch is a branch that could be interpreted as a semver tag (but shouldn't).
V2Branch = "v2"
// V2Rev is the HEAD revision of V2Branch.
V2Rev = "45dcf5a09c64b48b6e836028a3bc672b19b9d11d"
// V2PatchTag is a prerelease semver tag on the non-default branch.
V2PatchTag = "v2.0.0-alpha1"
// V2PatchRev is the revision of V2PatchTag.
V2PatchRev = "347760b50204948ea63e531dd6560e56a9adde8f"
// V1Tag is a semver tag that matches V1Constraint.
V1Tag = "v1.0.0"
// V1Rev is the revision of V1Tag.
V1Rev = "d0c29640b17f77426b111f4c1640d716591aa70e"
// V1PatchTag is a semver tag that matches V1Constraint.
V1PatchTag = "v1.0.2"
// V1PatchRev is the revision of V1PatchTag
V1PatchRev = "788963efe22e3e6e24c776a11a57468bb2fcd780"
// V1Constraint is a constraint that matches multiple semver tags.
V1Constraint = "^1.0.0"
// MultiTaggedRev is a revision with multiple tags.
MultiTaggedRev = "34cf993cc346f65601fe4356dd68bd54d20a1bfe"
// MultiTaggedSemverTag is a semver tag on MultiTaggedRev.
MultiTaggedSemverTag = "v1.0.4"
// MultiTaggedPlainTag is a non-semver tag on MultiTaggedRev.
MultiTaggedPlainTag = "stable"
// NonexistentPrj is a dummy project which does not exist on Github.
NonexistentPrj = "github.com/nonexistent/project"
)
|
govend
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govend/importer_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package govend
import (
"bytes"
"fmt"
"log"
"path/filepath"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestGovendConfig_Convert(t *testing.T) {
testCases := map[string]struct {
yaml govendYAML
importertest.TestCase
}{
"package": {
govendYAML{
Imports: []govendPackage{
{
Path: importertest.Project,
Revision: importertest.V1Rev,
},
},
},
importertest.TestCase{
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
},
"missing package name": {
govendYAML{
Imports: []govendPackage{
{
Path: "",
},
},
},
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid govend configuration, path is required",
},
},
"missing revision": {
govendYAML{
Imports: []govendPackage{
{
Path: importertest.Project,
},
},
},
importertest.TestCase{
WantWarning: fmt.Sprintf(
" Warning: Skipping import with empty constraints. "+
"The solve step will add the dependency to the lock if needed: %q\n",
importertest.Project,
),
},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.yaml = testCase.yaml
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
func TestGovendConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
cacheDir := "gps-repocache"
h.TempDir(cacheDir)
h.TempDir("src")
h.TempDir(filepath.Join("src", importertest.RootProject))
h.TempCopy(filepath.Join(importertest.RootProject, govendYAMLName), "vendor.yml")
projectRoot := h.Path(importertest.RootProject)
sm, err := gps.NewSourceManager(gps.SourceManagerConfig{Cachedir: h.Path(cacheDir)})
h.Must(err)
defer sm.Release()
// Capture stderr so we can verify the import output
verboseOutput := &bytes.Buffer{}
logger := log.New(verboseOutput, "", 0)
// Disable Verbose so that we don't print values that change each test run
g := NewImporter(logger, false, sm)
if !g.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect govend configuration file")
}
m, l, err := g.Import(projectRoot, importertest.RootProject)
h.Must(err)
if m == nil {
t.Fatal("Expected the manifest to be generated")
}
if l == nil {
t.Fatal("Expected the lock to be generated")
}
govendImportOutputFile := "golden.txt"
got := verboseOutput.String()
want := h.GetTestFileString(govendImportOutputFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(govendImportOutputFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", govendImportOutputFile))
}
} else {
t.Fatalf("want %s, got %s", want, got)
}
}
}
func TestGovendConfig_YAMLLoad(t *testing.T) {
// This is same as cmd/testdata/init/govend/vendor.yml
wantYaml := govendYAML{
Imports: []govendPackage{
{
Path: "github.com/sdboyer/deptest",
Revision: "3f4c3bea144e112a69bbe5d8d01c1b09a544253f",
},
{
Path: "github.com/sdboyer/deptestdos",
Revision: "5c607206be5decd28e6263ffffdcee067266015e",
},
},
}
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
h.TempCopy(filepath.Join(importertest.RootProject, govendYAMLName), "vendor.yml")
projectRoot := h.Path(importertest.RootProject)
g := NewImporter(ctx.Err, true, nil)
err := g.load(projectRoot)
if err != nil {
t.Fatalf("Error while loading %v", err)
}
if !equalGovendImports(g.yaml.Imports, wantYaml.Imports) {
t.Fatalf("Expected import to be equal. \n\t(GOT): %v\n\t(WNT): %v", g.yaml.Imports, wantYaml.Imports)
}
}
func equalGovendImports(a, b []govendPackage) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
|
govend
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govend/importer.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package govend
import (
"io/ioutil"
"log"
"os"
"path/filepath"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
)
// ToDo: govend supports json and xml formats as well and we will add support for other formats in next PR - @RaviTezu
// govend don't have a separate lock file.
const govendYAMLName = "vendor.yml"
// Importer imports govend configuration in to the dep configuration format.
type Importer struct {
*base.Importer
yaml govendYAML
}
// NewImporter for govend.
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(logger, verbose, sm)}
}
type govendYAML struct {
Imports []govendPackage `yaml:"vendors"`
}
type govendPackage struct {
Path string `yaml:"path"`
Revision string `yaml:"rev"`
}
// Name of the importer.
func (g *Importer) Name() string {
return "govend"
}
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (g *Importer) HasDepMetadata(dir string) bool {
y := filepath.Join(dir, govendYAMLName)
if _, err := os.Stat(y); err != nil {
return false
}
return true
}
// Import the config found in the directory.
func (g *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
err := g.load(dir)
if err != nil {
return nil, nil, err
}
m, l := g.convert(pr)
return m, l, nil
}
// load the govend configuration files.
func (g *Importer) load(projectDir string) error {
g.Logger.Println("Detected govend configuration files...")
y := filepath.Join(projectDir, govendYAMLName)
if g.Verbose {
g.Logger.Printf(" Loading %s", y)
}
yb, err := ioutil.ReadFile(y)
if err != nil {
return errors.Wrapf(err, "unable to read %s", y)
}
err = yaml.Unmarshal(yb, &g.yaml)
if err != nil {
return errors.Wrapf(err, "unable to parse %s", y)
}
return nil
}
// convert the govend configuration files into dep configuration files.
func (g *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
g.Logger.Println("Converting from vendor.yaml...")
packages := make([]base.ImportedPackage, 0, len(g.yaml.Imports))
for _, pkg := range g.yaml.Imports {
// Path must not be empty
if pkg.Path == "" {
g.Logger.Println(
" Warning: Skipping project. Invalid govend configuration, path is required",
)
continue
}
if pkg.Revision == "" {
// Do not add 'empty constraints' to the manifest. Solve will add to lock if required.
g.Logger.Printf(
" Warning: Skipping import with empty constraints. "+
"The solve step will add the dependency to the lock if needed: %q\n",
pkg.Path,
)
continue
}
ip := base.ImportedPackage{
Name: pkg.Path,
LockHint: pkg.Revision,
}
packages = append(packages, ip)
}
g.ImportPackages(packages, true)
return g.Manifest, g.Lock
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govend/testdata/golden.txt
|
Detected govend configuration files...
Converting from vendor.yaml...
Using ^0.8.1 as initial constraint for imported dep github.com/sdboyer/deptest
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govend/testdata/vendor.yml
|
vendors:
- path: github.com/sdboyer/deptest
rev: 3f4c3bea144e112a69bbe5d8d01c1b09a544253f
- path: github.com/sdboyer/deptestdos
rev: 5c607206be5decd28e6263ffffdcee067266015e
|
govendor
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govendor/importer_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package govendor
import (
"bytes"
"log"
"path/filepath"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
const testGovendorProjectRoot = "github.com/golang/notexist"
func TestGovendorConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
h.TempDir(filepath.Join("src", testGovendorProjectRoot))
h.TempCopy(filepath.Join(testGovendorProjectRoot, govendorDir, govendorName), "vendor.json")
projectRoot := h.Path(testGovendorProjectRoot)
// Capture stderr so we can verify output
verboseOutput := &bytes.Buffer{}
ctx.Err = log.New(verboseOutput, "", 0)
g := NewImporter(ctx.Err, false, sm) // Disable verbose so that we don't print values that change each test run
if !g.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect the govendor configuration files")
}
m, l, err := g.Import(projectRoot, testGovendorProjectRoot)
h.Must(err)
if m == nil {
t.Fatal("Expected the manifest to be generated")
}
if l == nil {
t.Fatal("Expected the lock to be generated")
}
goldenFile := "golden.txt"
got := verboseOutput.String()
want := h.GetTestFileString(goldenFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(goldenFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", goldenFile))
}
} else {
t.Fatalf("expected %s, got %s", want, got)
}
}
}
func TestGovendorConfig_Convert(t *testing.T) {
testCases := map[string]struct {
file govendorFile
importertest.TestCase
}{
"project": {
govendorFile{
Package: []*govendorPackage{
{
Path: importertest.Project,
Origin: importertest.ProjectSrc,
Revision: importertest.V1Rev,
},
},
},
importertest.TestCase{
WantSourceRepo: importertest.ProjectSrc,
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
},
"skipped build tags": {
govendorFile{
Ignore: "test linux_amd64",
},
importertest.TestCase{
WantIgnored: nil,
},
},
"ignored external package": {
govendorFile{
Ignore: "github.com/sdboyer/deptest k8s.io/apimachinery",
},
importertest.TestCase{
WantIgnored: []string{"github.com/sdboyer/deptest*", "k8s.io/apimachinery*"},
},
},
"ignored internal package": {
govendorFile{
Ignore: "samples/ foo/bar",
},
importertest.TestCase{
WantIgnored: []string{importertest.RootProject + "/samples*", importertest.RootProject + "/foo/bar*"},
},
},
"missing package path": {
govendorFile{
Package: []*govendorPackage{
{
Revision: importertest.V2PatchRev,
},
},
},
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid govendor configuration, Path is required",
},
},
"missing package revision doesn't cause an error": {
govendorFile{
Package: []*govendorPackage{
{
Path: importertest.Project,
},
},
},
importertest.TestCase{
WantRevision: "",
},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.file = testCase.file
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
|
govendor
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govendor/importer.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package govendor
import (
"encoding/json"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
)
const govendorDir = "vendor"
const govendorName = "vendor.json"
// Importer imports govendor configuration into the dep configuration format.
type Importer struct {
*base.Importer
file govendorFile
}
// NewImporter for govendor.
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(logger, verbose, sm)}
}
// File is the structure of the vendor file.
type govendorFile struct {
RootPath string // Import path of vendor folder
Ignore string
Package []*govendorPackage
}
// Package represents each package.
type govendorPackage struct {
// See the vendor spec for definitions.
Origin string
Path string
Revision string
Version string
}
// Name of the importer.
func (g *Importer) Name() string {
return "govendor"
}
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (g *Importer) HasDepMetadata(dir string) bool {
y := filepath.Join(dir, govendorDir, govendorName)
if _, err := os.Stat(y); err != nil {
return false
}
return true
}
// Import the config found in the directory.
func (g *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
err := g.load(dir)
if err != nil {
return nil, nil, err
}
m, l := g.convert(pr)
return m, l, nil
}
func (g *Importer) load(projectDir string) error {
g.Logger.Println("Detected govendor configuration file...")
v := filepath.Join(projectDir, govendorDir, govendorName)
if g.Verbose {
g.Logger.Printf(" Loading %s", v)
}
vb, err := ioutil.ReadFile(v)
if err != nil {
return errors.Wrapf(err, "unable to read %s", v)
}
err = json.Unmarshal(vb, &g.file)
if err != nil {
return errors.Wrapf(err, "unable to parse %s", v)
}
return nil
}
func (g *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
g.Logger.Println("Converting from vendor.json...")
packages := make([]base.ImportedPackage, 0, len(g.file.Package))
for _, pkg := range g.file.Package {
// Path must not be empty
if pkg.Path == "" {
g.Logger.Println(
" Warning: Skipping project. Invalid govendor configuration, Path is required",
)
continue
}
// There are valid govendor configs in the wild that don't have a revision set
// so we are not requiring it to be set during import
ip := base.ImportedPackage{
Name: pkg.Path,
Source: pkg.Origin,
LockHint: pkg.Revision,
}
packages = append(packages, ip)
}
g.ImportPackages(packages, true)
if len(g.file.Ignore) > 0 {
// Govendor has three use cases here
// 1. 'test' - special case for ignoring test files
// 2. build tags - any string without a slash (/) in it
// 3. path and path prefix - any string with a slash (/) in it.
// The path case could be a full path or just a prefix.
// Dep doesn't support build tags right now: https://github.com/golang/dep/issues/120
for _, i := range strings.Split(g.file.Ignore, " ") {
if !strings.Contains(i, "/") {
g.Logger.Printf(" Govendor was configured to ignore the %s build tag, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.", i)
continue
}
var ignorePattern string
_, err := g.SourceManager.DeduceProjectRoot(i)
if err == nil { // external package
ignorePattern = i
} else { // relative package path in the current project
ignorePattern = path.Join(string(pr), i)
}
// Convert to a a wildcard ignore
ignorePattern = strings.TrimRight(ignorePattern, "/")
ignorePattern += "*"
g.Manifest.Ignored = append(g.Manifest.Ignored, ignorePattern)
}
}
return g.Manifest, g.Lock
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govendor/testdata/golden.txt
|
Detected govendor configuration file...
Converting from vendor.json...
Using ^0.8.1 as initial constraint for imported dep github.com/sdboyer/deptest
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
Govendor was configured to ignore the test build tag, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/govendor/testdata/vendor.json
|
{
"comment": "",
"ignore": "test github.com/sdboyer/dep-test",
"package": [
{
"checksumSHA1": "4R6TQcq0/gI/I2kKeUunuO/pEec=",
"origin": "github.com/carolynvs/deptest",
"path": "github.com/sdboyer/deptest",
"revision": "3f4c3bea144e112a69bbe5d8d01c1b09a544253f",
"revisionTime": "2017-02-22T03:31:47Z"
},
{
"checksumSHA1": "96YwrJjpE07ENey/eDWWnCWKQOw=",
"path": "github.com/sdboyer/deptestdos",
"revision": "5c607206be5decd28e6263ffffdcee067266015e",
"revisionTime": "2017-02-22T03:34:58Z",
"version": "v2",
"versionExact": "v2.0.0"
}
],
"rootPath": "github.com/golang/notexist"
}
|
base
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/base/importer_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package base
import (
"fmt"
"log"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
)
func TestBaseImporter_IsTag(t *testing.T) {
testcases := map[string]struct {
input string
wantIsTag bool
wantTag gps.Version
}{
"non-semver tag": {
input: importertest.Beta1Tag,
wantIsTag: true,
wantTag: gps.NewVersion(importertest.Beta1Tag).Pair(importertest.Beta1Rev),
},
"semver-tag": {
input: importertest.V1PatchTag,
wantIsTag: true,
wantTag: gps.NewVersion(importertest.V1PatchTag).Pair(importertest.V1PatchRev)},
"untagged revision": {
input: importertest.UntaggedRev,
wantIsTag: false,
},
"branch name": {
input: importertest.V2Branch,
wantIsTag: false,
},
"empty": {
input: "",
wantIsTag: false,
},
}
pi := gps.ProjectIdentifier{ProjectRoot: importertest.Project}
for name, tc := range testcases {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
// Disable parallel tests until we can resolve this error on the Windows builds:
// "remote repository at https://github.com/carolynvs/deptest-importers does not exist, or is inaccessible"
//h.Parallel()
ctx := importertest.NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
i := NewImporter(ctx.Err, ctx.Verbose, sm)
gotIsTag, gotTag, err := i.isTag(pi, tc.input)
h.Must(err)
if tc.wantIsTag != gotIsTag {
t.Fatalf("unexpected isTag result for %v: \n\t(GOT) %v \n\t(WNT) %v",
tc.input, gotIsTag, tc.wantIsTag)
}
if tc.wantTag != gotTag {
t.Fatalf("unexpected tag for %v: \n\t(GOT) %v \n\t(WNT) %v",
tc.input, gotTag, tc.wantTag)
}
})
}
}
func TestBaseImporter_LookupVersionForLockedProject(t *testing.T) {
testcases := map[string]struct {
revision gps.Revision
constraint gps.Constraint
wantVersion string
}{
"match revision to tag": {
revision: importertest.V1PatchRev,
wantVersion: importertest.V1PatchTag,
},
"match revision with multiple tags using constraint": {
revision: importertest.MultiTaggedRev,
constraint: gps.NewVersion(importertest.MultiTaggedPlainTag),
wantVersion: importertest.MultiTaggedPlainTag,
},
"revision with multiple tags with no constraint defaults to best match": {
revision: importertest.MultiTaggedRev,
wantVersion: importertest.MultiTaggedSemverTag,
},
"revision with multiple tags with nonmatching constraint defaults to best match": {
revision: importertest.MultiTaggedRev,
constraint: gps.NewVersion("thismatchesnothing"),
wantVersion: importertest.MultiTaggedSemverTag,
},
"untagged revision fallback to branch constraint": {
revision: importertest.UntaggedRev,
constraint: gps.NewBranch("master"),
wantVersion: "master",
},
"fallback to revision": {
revision: importertest.UntaggedRev,
wantVersion: importertest.UntaggedRev,
},
}
pi := gps.ProjectIdentifier{ProjectRoot: importertest.Project}
for name, tc := range testcases {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
// Disable parallel tests until we can resolve this error on the Windows builds:
// "remote repository at https://github.com/carolynvs/deptest-importers does not exist, or is inaccessible"
//h.Parallel()
ctx := importertest.NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
i := NewImporter(ctx.Err, ctx.Verbose, sm)
v, err := i.lookupVersionForLockedProject(pi, tc.constraint, tc.revision)
h.Must(err)
gotVersion := v.String()
if gotVersion != tc.wantVersion {
t.Fatalf("unexpected locked version: \n\t(GOT) %v\n\t(WNT) %v", gotVersion, tc.wantVersion)
}
})
}
}
func TestBaseImporter_ImportProjects(t *testing.T) {
testcases := map[string]struct {
importertest.TestCase
projects []ImportedPackage
}{
"tag constraints are skipped": {
importertest.TestCase{
WantVersion: importertest.Beta1Tag,
WantRevision: importertest.Beta1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.Beta1Rev,
ConstraintHint: importertest.Beta1Tag,
},
},
},
"tag lock hints Lock to tagged revision": {
importertest.TestCase{
WantVersion: importertest.Beta1Tag,
WantRevision: importertest.Beta1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.Beta1Tag,
},
},
},
"untagged revision ignores range constraint": {
importertest.TestCase{
WantRevision: importertest.UntaggedRev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.UntaggedRev,
ConstraintHint: importertest.V1Constraint,
},
},
},
"untagged revision keeps branch constraint": {
importertest.TestCase{
WantConstraint: "master",
WantVersion: "master",
WantRevision: importertest.UntaggedRev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.UntaggedRev,
ConstraintHint: "master",
},
},
},
"HEAD revisions default constraint to the matching branch": {
importertest.TestCase{
DefaultConstraintFromLock: true,
WantConstraint: importertest.V2Branch,
WantVersion: importertest.V2Branch,
WantRevision: importertest.V2Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V2Rev,
},
},
},
"Semver tagged revisions default to ^VERSION": {
importertest.TestCase{
DefaultConstraintFromLock: true,
WantConstraint: importertest.V1Constraint,
WantVersion: importertest.V1Tag,
WantRevision: importertest.V1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V1Rev,
},
},
},
"Semver lock hint defaults constraint to ^VERSION": {
importertest.TestCase{
DefaultConstraintFromLock: true,
WantConstraint: importertest.V1Constraint,
WantVersion: importertest.V1Tag,
WantRevision: importertest.V1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V1Tag,
},
},
},
"Semver constraint hint": {
importertest.TestCase{
WantConstraint: importertest.V1Constraint,
WantVersion: importertest.V1PatchTag,
WantRevision: importertest.V1PatchRev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V1PatchRev,
ConstraintHint: importertest.V1Constraint,
},
},
},
"Semver prerelease lock hint": {
importertest.TestCase{
WantConstraint: importertest.V2Branch,
WantVersion: importertest.V2PatchTag,
WantRevision: importertest.V2PatchRev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V2PatchRev,
ConstraintHint: importertest.V2Branch,
},
},
},
"Revision constraints are skipped": {
importertest.TestCase{
WantVersion: importertest.V1Tag,
WantRevision: importertest.V1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V1Rev,
ConstraintHint: importertest.V1Rev,
},
},
},
"Branch constraint hint": {
importertest.TestCase{
WantConstraint: "master",
WantVersion: importertest.V1Tag,
WantRevision: importertest.V1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V1Rev,
ConstraintHint: "master",
},
},
},
"Non-matching semver constraint is skipped": {
importertest.TestCase{
WantVersion: importertest.V1Tag,
WantRevision: importertest.V1Rev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.V1Rev,
ConstraintHint: "^2.0.0",
},
},
},
"git describe constraint is skipped": {
importertest.TestCase{
WantRevision: importertest.UntaggedRev,
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: importertest.UntaggedRev,
ConstraintHint: importertest.UntaggedRevAbbrv,
},
},
},
"consolidate subpackages under root": {
importertest.TestCase{
WantConstraint: "master",
WantVersion: "master",
WantRevision: importertest.UntaggedRev,
},
[]ImportedPackage{
{
Name: importertest.Project + "/subpkA",
ConstraintHint: "master",
},
{
Name: importertest.Project,
LockHint: importertest.UntaggedRev,
},
},
},
"skip duplicate packages": {
importertest.TestCase{
WantRevision: importertest.UntaggedRev,
},
[]ImportedPackage{
{
Name: importertest.Project + "/subpkgA",
LockHint: importertest.UntaggedRev, // first wins
},
{
Name: importertest.Project + "/subpkgB",
LockHint: importertest.V1Rev,
},
},
},
"skip empty lock hints": {
importertest.TestCase{
WantRevision: "",
},
[]ImportedPackage{
{
Name: importertest.Project,
LockHint: "",
},
},
},
"alternate source": {
importertest.TestCase{
WantConstraint: "*",
WantSourceRepo: importertest.ProjectSrc,
},
[]ImportedPackage{
{
Name: importertest.Project,
Source: importertest.ProjectSrc,
},
},
},
"skip default source": {
importertest.TestCase{
WantSourceRepo: "",
},
[]ImportedPackage{
{
Name: importertest.Project,
Source: "https://" + importertest.Project,
},
},
},
"skip vendored source": {
importertest.TestCase{
WantSourceRepo: "",
WantWarning: "vendored sources aren't supported",
},
[]ImportedPackage{
{
Name: importertest.Project,
Source: "example.com/vendor/" + importertest.Project,
},
},
},
"invalid project root": {
importertest.TestCase{
WantSourceRepo: "",
WantWarning: "Warning: Skipping project. Cannot determine the project root for invalid-project",
},
[]ImportedPackage{
{
Name: "invalid-project",
},
},
},
"nonexistent project": {
importertest.TestCase{
WantSourceRepo: "",
WantWarning: fmt.Sprintf(
"Warning: Skipping project. Unable to import lock %q for %s",
importertest.V1Tag, importertest.NonexistentPrj,
),
},
[]ImportedPackage{
{
Name: importertest.NonexistentPrj,
LockHint: importertest.V1Tag,
},
},
},
}
for name, tc := range testcases {
name := name
tc := tc
t.Run(name, func(t *testing.T) {
err := tc.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
i := NewImporter(logger, true, sm)
i.ImportPackages(tc.projects, tc.DefaultConstraintFromLock)
return i.Manifest, i.Lock
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
|
base
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/base/importer.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package base
import (
"log"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
fb "github.com/golang/dep/internal/feedback"
"github.com/pkg/errors"
)
// Importer provides a common implementation for importing from other
// dependency managers.
type Importer struct {
SourceManager gps.SourceManager
Logger *log.Logger
Verbose bool
Manifest *dep.Manifest
Lock *dep.Lock
}
// NewImporter creates a new Importer for embedding in an importer.
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{
Logger: logger,
Verbose: verbose,
Manifest: dep.NewManifest(),
Lock: &dep.Lock{},
SourceManager: sm,
}
}
// isTag determines if the specified value is a tag (plain or semver).
func (i *Importer) isTag(pi gps.ProjectIdentifier, value string) (bool, gps.Version, error) {
versions, err := i.SourceManager.ListVersions(pi)
if err != nil {
return false, nil, errors.Wrapf(err, "unable to list versions for %s(%s)", pi.ProjectRoot, pi.Source)
}
for _, version := range versions {
if version.Type() != gps.IsVersion && version.Type() != gps.IsSemver {
continue
}
if value == version.String() {
return true, version, nil
}
}
return false, nil, nil
}
// lookupVersionForLockedProject figures out the appropriate version for a locked
// project based on the locked revision and the constraint from the manifest.
// First try matching the revision to a version, then try the constraint from the
// manifest, then finally the revision.
func (i *Importer) lookupVersionForLockedProject(pi gps.ProjectIdentifier, c gps.Constraint, rev gps.Revision) (gps.Version, error) {
// Find the version that goes with this revision, if any
versions, err := i.SourceManager.ListVersions(pi)
if err != nil {
return rev, errors.Wrapf(err, "Unable to lookup the version represented by %s in %s(%s). Falling back to locking the revision only.", rev, pi.ProjectRoot, pi.Source)
}
var branchConstraint gps.PairedVersion
gps.SortPairedForUpgrade(versions) // Sort versions in asc order
var matches []gps.Version
for _, v := range versions {
if v.Revision() == rev {
matches = append(matches, v)
}
if c != nil && v.Type() == gps.IsBranch && v.String() == c.String() {
branchConstraint = v
}
}
// Try to narrow down the matches with the constraint. Otherwise return the first match.
if len(matches) > 0 {
if c != nil {
for _, v := range matches {
if i.testConstraint(c, v) {
return v, nil
}
}
}
return matches[0], nil
}
// Use branch constraint from the manifest
if branchConstraint != nil {
return branchConstraint.Unpair().Pair(rev), nil
}
// Give up and lock only to a revision
return rev, nil
}
// ImportedPackage is a common intermediate representation of a package imported
// from an external tool's configuration.
type ImportedPackage struct {
// Required. The package path, not necessarily the project root.
Name string
// Required. Text representing a revision or tag.
LockHint string
// Optional. Alternative source, or fork, for the project.
Source string
// Optional. Text representing a branch or version.
ConstraintHint string
}
// importedProject is a consolidated representation of a set of imported packages
// for the same project root.
type importedProject struct {
Root gps.ProjectRoot
ImportedPackage
}
// loadPackages consolidates all package references into a set of project roots.
func (i *Importer) loadPackages(packages []ImportedPackage) []importedProject {
// preserve the original order of the packages so that messages that
// are printed as they are processed are in a consistent order.
orderedProjects := make([]importedProject, 0, len(packages))
projects := make(map[gps.ProjectRoot]*importedProject, len(packages))
for _, pkg := range packages {
pr, err := i.SourceManager.DeduceProjectRoot(pkg.Name)
if err != nil {
i.Logger.Printf(
" Warning: Skipping project. Cannot determine the project root for %s: %s\n",
pkg.Name, err,
)
continue
}
pkg.Name = string(pr)
prj, exists := projects[pr]
if !exists {
prj := importedProject{pr, pkg}
orderedProjects = append(orderedProjects, prj)
projects[pr] = &orderedProjects[len(orderedProjects)-1]
continue
}
// The config found first "wins", though we allow for incrementally
// setting each field because some importers have a config and lock file.
if prj.Source == "" && pkg.Source != "" {
prj.Source = pkg.Source
}
if prj.ConstraintHint == "" && pkg.ConstraintHint != "" {
prj.ConstraintHint = pkg.ConstraintHint
}
if prj.LockHint == "" && pkg.LockHint != "" {
prj.LockHint = pkg.LockHint
}
}
return orderedProjects
}
// ImportPackages loads imported packages into the manifest and lock.
// - defaultConstraintFromLock specifies if a constraint should be defaulted
// based on the locked version when there wasn't a constraint hint.
//
// Rules:
// * When a constraint is ignored, default to *.
// * HEAD revisions default to the matching branch.
// * Semantic versions default to ^VERSION.
// * Revision constraints are ignored.
// * Versions that don't satisfy the constraint, drop the constraint.
// * Untagged revisions ignore non-branch constraint hints.
func (i *Importer) ImportPackages(packages []ImportedPackage, defaultConstraintFromLock bool) {
projects := i.loadPackages(packages)
for _, prj := range projects {
source := prj.Source
if len(source) > 0 {
isDefault, err := i.isDefaultSource(prj.Root, source)
if err != nil {
i.Logger.Printf(" Ignoring imported source %s for %s: %s", source, prj.Root, err.Error())
source = ""
} else if isDefault {
source = ""
} else if strings.Contains(source, "/vendor/") {
i.Logger.Printf(" Ignoring imported source %s for %s because vendored sources aren't supported", source, prj.Root)
source = ""
}
}
pc := gps.ProjectConstraint{
Ident: gps.ProjectIdentifier{
ProjectRoot: prj.Root,
Source: source,
},
}
var err error
pc.Constraint, err = i.SourceManager.InferConstraint(prj.ConstraintHint, pc.Ident)
if err != nil {
pc.Constraint = gps.Any()
}
var version gps.Version
if prj.LockHint != "" {
var isTag bool
// Determine if the lock hint is a revision or tag
isTag, version, err = i.isTag(pc.Ident, prj.LockHint)
if err != nil {
i.Logger.Printf(
" Warning: Skipping project. Unable to import lock %q for %v: %s\n",
prj.LockHint, pc.Ident, err,
)
continue
}
// If the hint is a revision, check if it is tagged
if !isTag {
revision := gps.Revision(prj.LockHint)
version, err = i.lookupVersionForLockedProject(pc.Ident, pc.Constraint, revision)
if err != nil {
version = nil
i.Logger.Println(err)
}
}
// Default the constraint based on the locked version
if defaultConstraintFromLock && prj.ConstraintHint == "" && version != nil {
c := i.convertToConstraint(version)
if c != nil {
pc.Constraint = c
}
}
}
// Ignore pinned constraints
if i.isConstraintPinned(pc.Constraint) {
if i.Verbose {
i.Logger.Printf(" Ignoring pinned constraint %v for %v.\n", pc.Constraint, pc.Ident)
}
pc.Constraint = gps.Any()
}
// Ignore constraints which conflict with the locked revision, so that
// solve doesn't later change the revision to satisfy the constraint.
if !i.testConstraint(pc.Constraint, version) {
if i.Verbose {
i.Logger.Printf(" Ignoring constraint %v for %v because it would invalidate the locked version %v.\n", pc.Constraint, pc.Ident, version)
}
pc.Constraint = gps.Any()
}
// Add constraint to manifest that is not empty (has a branch, version or source)
if !gps.IsAny(pc.Constraint) || pc.Ident.Source != "" {
i.Manifest.Constraints[pc.Ident.ProjectRoot] = gps.ProjectProperties{
Source: pc.Ident.Source,
Constraint: pc.Constraint,
}
fb.NewConstraintFeedback(pc, fb.DepTypeImported).LogFeedback(i.Logger)
}
if version != nil {
lp := gps.NewLockedProject(pc.Ident, version, nil)
i.Lock.P = append(i.Lock.P, lp)
fb.NewLockedProjectFeedback(lp, fb.DepTypeImported).LogFeedback(i.Logger)
}
}
}
// isConstraintPinned returns if a constraint is pinned to a specific revision.
func (i *Importer) isConstraintPinned(c gps.Constraint) bool {
if version, isVersion := c.(gps.Version); isVersion {
switch version.Type() {
case gps.IsRevision, gps.IsVersion:
return true
}
}
return false
}
// testConstraint verifies that the constraint won't invalidate the locked version.
func (i *Importer) testConstraint(c gps.Constraint, v gps.Version) bool {
// Assume branch constraints are satisfied
if version, isVersion := c.(gps.Version); isVersion {
if version.Type() == gps.IsBranch {
return true
}
}
return c.Matches(v)
}
// convertToConstraint turns a version into a constraint.
// Semver tags are converted to a range with the caret operator.
func (i *Importer) convertToConstraint(v gps.Version) gps.Constraint {
if v.Type() == gps.IsSemver {
c, err := gps.NewSemverConstraintIC(v.String())
if err != nil {
// This should never fail, because the type is semver.
// If it does fail somehow, don't let that impact the import.
return nil
}
return c
}
return v
}
func (i *Importer) isDefaultSource(projectRoot gps.ProjectRoot, sourceURL string) (bool, error) {
// this condition is mainly for gopkg.in imports,
// as some importers specify the repository url as https://gopkg.in/...,
// but SourceManager.SourceURLsForPath() returns https://github.com/... urls for gopkg.in
if sourceURL == "https://"+string(projectRoot) {
return true, nil
}
sourceURLs, err := i.SourceManager.SourceURLsForPath(string(projectRoot))
if err != nil {
return false, err
}
// The first url in the slice will be the default one (usually https://...)
if len(sourceURLs) > 0 && sourceURL == sourceURLs[0].String() {
return true, nil
}
return false, nil
}
|
gvt
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/gvt/importer_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gvt
import (
"bytes"
"fmt"
"log"
"path/filepath"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestGvtConfig_Convert(t *testing.T) {
testCases := map[string]struct {
importertest.TestCase
gvtConfig gvtManifest
}{
"package with master branch": {
importertest.TestCase{
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
gvtManifest{
Deps: []gvtPkg{
{
ImportPath: importertest.Project,
Revision: importertest.V1Rev,
Branch: "master",
},
},
},
},
"package with non-master branch": {
importertest.TestCase{
WantConstraint: importertest.V2Branch,
WantRevision: importertest.V2PatchRev,
WantVersion: importertest.V2PatchTag,
},
gvtManifest{
Deps: []gvtPkg{
{
ImportPath: importertest.Project,
Revision: importertest.V2PatchRev,
Branch: importertest.V2Branch,
},
},
},
},
"package with HEAD branch": {
importertest.TestCase{
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
gvtManifest{
Deps: []gvtPkg{
{
ImportPath: importertest.Project,
Revision: importertest.V1Rev,
Branch: "HEAD",
},
},
},
},
"package with alternate repository": {
importertest.TestCase{
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
WantSourceRepo: importertest.ProjectSrc,
},
gvtManifest{
Deps: []gvtPkg{
{
ImportPath: importertest.Project,
Repository: importertest.ProjectSrc,
Revision: importertest.V1Rev,
Branch: "master",
},
},
},
},
"missing package name": {
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid gvt configuration, ImportPath is required",
},
gvtManifest{
Deps: []gvtPkg{{ImportPath: ""}},
},
},
"missing revision": {
importertest.TestCase{
WantWarning: fmt.Sprintf(
"Warning: Invalid gvt configuration, Revision not found for ImportPath %q",
importertest.Project,
),
},
gvtManifest{
Deps: []gvtPkg{
{
ImportPath: importertest.Project,
},
},
},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.gvtConfig = testCase.gvtConfig
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
func TestGvtConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
cacheDir := "gps-repocache"
h.TempDir(cacheDir)
h.TempDir("src")
h.TempDir(filepath.Join("src", importertest.RootProject))
h.TempCopy(filepath.Join(importertest.RootProject, gvtPath), "manifest")
projectRoot := h.Path(importertest.RootProject)
sm, err := gps.NewSourceManager(gps.SourceManagerConfig{
Cachedir: h.Path(cacheDir),
Logger: log.New(test.Writer{TB: t}, "", 0),
})
h.Must(err)
defer sm.Release()
// Capture stderr so we can verify output
verboseOutput := &bytes.Buffer{}
logger := log.New(verboseOutput, "", 0)
g := NewImporter(logger, false, sm) // Disable verbose so that we don't print values that change each test run
if !g.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect gvt configuration file")
}
m, l, err := g.Import(projectRoot, importertest.RootProject)
h.Must(err)
if m == nil {
t.Fatal("Expected the manifest to be generated")
}
if l == nil {
t.Fatal("Expected the lock to be generated")
}
goldenFile := "golden.txt"
got := verboseOutput.String()
want := h.GetTestFileString(goldenFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(goldenFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", goldenFile))
}
} else {
t.Fatalf("want %s, got %s", want, got)
}
}
}
func TestGvtConfig_JsonLoad(t *testing.T) {
// This is same as testdata/manifest
wantConfig := gvtManifest{
Deps: []gvtPkg{
{
ImportPath: "github.com/sdboyer/deptest",
Revision: "3f4c3bea144e112a69bbe5d8d01c1b09a544253f",
Branch: "HEAD",
},
{
ImportPath: "github.com/sdboyer/deptestdos",
Revision: "5c607206be5decd28e6263ffffdcee067266015e",
Branch: "master",
},
{
ImportPath: "github.com/carolynvs/deptest-importers",
Revision: "b79bc9482da8bb7402cdc3e3fd984db250718dd7",
Branch: "v2",
},
},
}
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
h.TempCopy(filepath.Join(importertest.RootProject, gvtPath), "manifest")
projectRoot := h.Path(importertest.RootProject)
g := NewImporter(ctx.Err, true, nil)
err := g.load(projectRoot)
if err != nil {
t.Fatalf("Error while loading... %v", err)
}
if !equalImports(g.gvtConfig.Deps, wantConfig.Deps) {
t.Fatalf("Expected imports to be equal. \n\t(GOT): %v\n\t(WNT): %v", g.gvtConfig.Deps, wantConfig.Deps)
}
}
// equalImports compares two slices of gvtPkg and checks if they are
// equal.
func equalImports(a, b []gvtPkg) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
|
gvt
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/gvt/importer.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gvt
import (
"encoding/json"
"io/ioutil"
"log"
"os"
"path/filepath"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
)
const gvtPath = "vendor" + string(os.PathSeparator) + "manifest"
// Importer imports gvt configuration into the dep configuration format.
type Importer struct {
*base.Importer
gvtConfig gvtManifest
}
// NewImporter for gvt. It handles gb (gb-vendor) too as they share a common manifest file & format
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(logger, verbose, sm)}
}
type gvtManifest struct {
Deps []gvtPkg `json:"dependencies"`
}
type gvtPkg struct {
ImportPath string
Repository string
Revision string
Branch string
}
// Name of the importer.
func (g *Importer) Name() string {
return "gvt"
}
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (g *Importer) HasDepMetadata(dir string) bool {
y := filepath.Join(dir, gvtPath)
if _, err := os.Stat(y); err != nil {
return false
}
return true
}
// Import the config found in the directory.
func (g *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
err := g.load(dir)
if err != nil {
return nil, nil, err
}
m, l := g.convert(pr)
return m, l, nil
}
func (g *Importer) load(projectDir string) error {
g.Logger.Println("Detected gb/gvt configuration files...")
j := filepath.Join(projectDir, gvtPath)
if g.Verbose {
g.Logger.Printf(" Loading %s", j)
}
jb, err := ioutil.ReadFile(j)
if err != nil {
return errors.Wrapf(err, "unable to read %s", j)
}
err = json.Unmarshal(jb, &g.gvtConfig)
if err != nil {
return errors.Wrapf(err, "unable to parse %s", j)
}
return nil
}
func (g *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
g.Logger.Println("Converting from vendor/manifest ...")
packages := make([]base.ImportedPackage, 0, len(g.gvtConfig.Deps))
for _, pkg := range g.gvtConfig.Deps {
// Validate
if pkg.ImportPath == "" {
g.Logger.Println(
" Warning: Skipping project. Invalid gvt configuration, ImportPath is required",
)
continue
}
if pkg.Revision == "" {
g.Logger.Printf(
" Warning: Invalid gvt configuration, Revision not found for ImportPath %q\n",
pkg.ImportPath,
)
}
var contstraintHint = ""
if pkg.Branch == "HEAD" {
// gb-vendor sets "branch" to "HEAD", if the package was feteched via -tag or -revision,
// we pass the revision as the constraint hint
contstraintHint = pkg.Revision
} else if pkg.Branch != "master" {
// both gvt & gb-vendor set "branch" to "master" unless a different branch was requested.
// so it's not really a constraint unless it's a different branch
contstraintHint = pkg.Branch
}
ip := base.ImportedPackage{
Name: pkg.ImportPath,
Source: pkg.Repository,
LockHint: pkg.Revision,
ConstraintHint: contstraintHint,
}
packages = append(packages, ip)
}
g.ImportPackages(packages, true)
return g.Manifest, g.Lock
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/gvt/testdata/golden.txt
|
Detected gb/gvt configuration files...
Converting from vendor/manifest ...
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
Using v2 as initial constraint for imported dep github.com/carolynvs/deptest-importers
Trying v2 (b79bc94) as initial lock for imported dep github.com/carolynvs/deptest-importers
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/gvt/testdata/manifest
|
{
"dependencies": [
{
"importpath": "github.com/sdboyer/deptest",
"revision": "3f4c3bea144e112a69bbe5d8d01c1b09a544253f",
"branch": "HEAD"
},
{
"importpath": "github.com/sdboyer/deptestdos",
"revision": "5c607206be5decd28e6263ffffdcee067266015e",
"branch": "master"
},
{
"importpath": "github.com/carolynvs/deptest-importers",
"revision": "b79bc9482da8bb7402cdc3e3fd984db250718dd7",
"branch": "v2"
}
]
}
|
glock
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glock/importer_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package glock
import (
"bytes"
"fmt"
"log"
"path/filepath"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/importertest"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestGlockConfig_Convert(t *testing.T) {
testCases := map[string]struct {
importertest.TestCase
packages []glockPackage
}{
"package": {
importertest.TestCase{
WantConstraint: importertest.V1Constraint,
WantRevision: importertest.V1Rev,
WantVersion: importertest.V1Tag,
},
[]glockPackage{
{
importPath: importertest.Project,
revision: importertest.V1Rev,
},
},
},
"missing package name": {
importertest.TestCase{
WantWarning: "Warning: Skipping project. Invalid glock configuration, import path is required",
},
[]glockPackage{{importPath: ""}},
},
"missing revision": {
importertest.TestCase{
WantWarning: fmt.Sprintf(
" Warning: Skipping import with empty constraints. "+
"The solve step will add the dependency to the lock if needed: %q",
importertest.Project,
),
},
[]glockPackage{{importPath: importertest.Project}},
},
}
for name, testCase := range testCases {
name := name
testCase := testCase
t.Run(name, func(t *testing.T) {
err := testCase.Execute(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock) {
g := NewImporter(logger, true, sm)
g.packages = testCase.packages
return g.convert(importertest.RootProject)
})
if err != nil {
t.Fatalf("%#v", err)
}
})
}
}
func TestGlockConfig_LoadInvalid(t *testing.T) {
const testLine = "github.com/sdboyer/deptest 3f4c3bea144e112a69bbe5d8d01c1b09a544253f invalid"
_, err := parseGlockLine(testLine)
expected := fmt.Errorf("invalid glock configuration: %s", testLine)
if err.Error() != expected.Error() {
t.Errorf("want error %s, got %s", err, expected)
}
}
func TestGlockConfig_LoadEmptyLine(t *testing.T) {
pkg, err := parseGlockLine("")
if err != nil {
t.Fatalf("%#v", err)
}
if pkg != nil {
t.Errorf("want package nil, got %+v", pkg)
}
}
func TestGlockConfig_Import(t *testing.T) {
h := test.NewHelper(t)
defer h.Cleanup()
ctx := importertest.NewTestContext(h)
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
h.TempDir(filepath.Join("src", importertest.RootProject))
h.TempCopy(filepath.Join(importertest.RootProject, glockfile), glockfile)
projectRoot := h.Path(importertest.RootProject)
// Capture stderr so we can verify output
verboseOutput := &bytes.Buffer{}
ctx.Err = log.New(verboseOutput, "", 0)
g := NewImporter(ctx.Err, false, sm) // Disable verbose so that we don't print values that change each test run
if !g.HasDepMetadata(projectRoot) {
t.Fatal("Expected the importer to detect the glock configuration files")
}
m, l, err := g.Import(projectRoot, importertest.RootProject)
h.Must(err)
if m == nil {
t.Fatal("Expected the manifest to be generated")
}
if l == nil {
t.Fatal("Expected the lock to be generated")
}
goldenFile := "golden.txt"
got := verboseOutput.String()
want := h.GetTestFileString(goldenFile)
if want != got {
if *test.UpdateGolden {
if err := h.WriteTestFile(goldenFile, got); err != nil {
t.Fatalf("%+v", errors.Wrapf(err, "Unable to write updated golden file %s", goldenFile))
}
} else {
t.Fatalf("want %s, got %s", want, got)
}
}
}
|
glock
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glock/importer.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package glock
import (
"bufio"
"fmt"
"log"
"os"
"path/filepath"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/importers/base"
"github.com/pkg/errors"
)
const glockfile = "GLOCKFILE"
// Importer imports glock configuration into the dep configuration format.
type Importer struct {
*base.Importer
packages []glockPackage
}
// NewImporter for glock.
func NewImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *Importer {
return &Importer{Importer: base.NewImporter(logger, verbose, sm)}
}
// Name of the importer.
func (g *Importer) Name() string {
return "glock"
}
// HasDepMetadata checks if a directory contains config that the importer can handle.
func (g *Importer) HasDepMetadata(dir string) bool {
path := filepath.Join(dir, glockfile)
if _, err := os.Stat(path); err != nil {
return false
}
return true
}
// Import the config found in the directory.
func (g *Importer) Import(dir string, pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) {
err := g.load(dir)
if err != nil {
return nil, nil, err
}
m, l := g.convert(pr)
return m, l, nil
}
type glockPackage struct {
importPath string
revision string
}
func (g *Importer) load(projectDir string) error {
g.Logger.Println("Detected glock configuration files...")
path := filepath.Join(projectDir, glockfile)
if g.Verbose {
g.Logger.Printf(" Loading %s", path)
}
f, err := os.Open(path)
if err != nil {
return errors.Wrapf(err, "unable to open %s", path)
}
defer f.Close()
scanner := bufio.NewScanner(f)
for scanner.Scan() {
pkg, err := parseGlockLine(scanner.Text())
if err != nil {
g.Logger.Printf(" Warning: Skipping line. Unable to parse: %s\n", err)
continue
}
if pkg == nil {
continue
}
g.packages = append(g.packages, *pkg)
}
if err := scanner.Err(); err != nil {
g.Logger.Printf(" Warning: Ignoring errors found while parsing %s: %s\n", path, err)
}
return nil
}
func parseGlockLine(line string) (*glockPackage, error) {
fields := strings.Fields(line)
switch len(fields) {
case 2: // Valid.
case 0: // Skip empty lines.
return nil, nil
default:
return nil, fmt.Errorf("invalid glock configuration: %s", line)
}
// Skip commands.
if fields[0] == "cmd" {
return nil, nil
}
return &glockPackage{
importPath: fields[0],
revision: fields[1],
}, nil
}
func (g *Importer) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock) {
g.Logger.Println("Converting from GLOCKFILE ...")
packages := make([]base.ImportedPackage, 0, len(g.packages))
for _, pkg := range g.packages {
// Validate
if pkg.importPath == "" {
g.Logger.Println(
" Warning: Skipping project. Invalid glock configuration, import path is required",
)
continue
}
if pkg.revision == "" {
// Do not add 'empty constraints' to the manifest. Solve will add to lock if required.
g.Logger.Printf(
" Warning: Skipping import with empty constraints. "+
"The solve step will add the dependency to the lock if needed: %q\n",
pkg.importPath,
)
continue
}
packages = append(packages, base.ImportedPackage{
Name: pkg.importPath,
LockHint: pkg.revision,
})
}
g.ImportPackages(packages, true)
return g.Manifest, g.Lock
}
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glock/testdata/golden.txt
|
Detected glock configuration files...
Converting from GLOCKFILE ...
Using ^0.8.1 as initial constraint for imported dep github.com/sdboyer/deptest
Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest
Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos
Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos
|
testdata
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/internal/importers/glock/testdata/GLOCKFILE
|
cmd github.com/golang/lint
github.com/sdboyer/deptest 3f4c3bea144e112a69bbe5d8d01c1b09a544253f
github.com/sdboyer/deptestdos 5c607206be5decd28e6263ffffdcee067266015e
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/status.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"context"
"encoding/json"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"sort"
"strings"
"sync"
"text/tabwriter"
"text/template"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/paths"
"github.com/golang/dep/gps/verify"
"github.com/pkg/errors"
)
const availableTemplateVariables = "ProjectRoot, Constraint, Version, Revision, Latest, and PackageCount."
const availableDefaultTemplateVariables = `.Projects[]{
.ProjectRoot,.Source,.Constraint,.PackageCount,.Packages[],
.PruneOpts,.Digest,.Locked{.Branch,.Revision,.Version},
.Latest{.Revision,.Version}
},
.Metadata{
.AnalyzerName,.AnalyzerVersion,.InputImports,.SolverName,
.SolverVersion
}`
const statusShortHelp = `Report the status of the project's dependencies`
const statusLongHelp = `
With no arguments, print the status of each dependency of the project.
PROJECT Import path
CONSTRAINT Version constraint, from the manifest
VERSION Version chosen, from the lock
REVISION VCS revision of the chosen version
LATEST Latest VCS revision available
PKGS USED Number of packages from this project that are actually used
You may use the -f flag to create a custom format for the output of the
dep status command. The available fields you can utilize are as follows:
` + availableTemplateVariables + `
Status returns exit code zero if all dependencies are in a "good state".
`
const statusExamples = `
dep status
Displays a table of the various dependencies in the project along with
their properties such as the constraints they are bound by and the
revision they are at.
dep status -detail
Displays a detailed table of the dependencies in the project including
the value of any source rules used and full list of packages used from
each project (instead of simply a count). Text wrapping may make this
output hard to read.
dep status -f='{{if eq .Constraint "master"}}{{.ProjectRoot}} {{end}}'
Displays the list of package names constrained on the master branch.
The -f flag allows you to use Go templates along with it's various
constructs for formatting output data. Available flags are as follows:
` + availableTemplateVariables + `
dep status -detail -f='{{range $i, $p := .Projects}}{{if ne .Source "" -}}
{{- if $i}},{{end}}{{$p.ProjectRoot}}:{{$p.Source}}{{end}}{{end}}'
Displays the package name and source for each package with a source
rule defined, with a comma between each name-source pair.
When used with -detail, the -f flag applies the supplied Go templates
to the full output document, instead of to packages one at a time.
Available flags are as follows: ` + availableDefaultTemplateVariables + `
dep status -json
Displays the dependency information in JSON format as a list of
project objects. Each project object contains keys which correspond
to the table column names from the standard 'dep status' command.
Linux: dep status -dot | dot -T png | display
MacOS: dep status -dot | dot -T png | open -f -a /Applications/Preview.app
Windows: dep status -dot | dot -T png -o status.png; start status.png
Generates a visual representation of the dependency tree using GraphViz.
(Note: in order for this example to work you must first have graphviz
installed on your system)
`
const (
shortRev uint8 = iota
longRev
)
var (
errFailedUpdate = errors.New("failed to fetch updates")
errFailedListPkg = errors.New("failed to list packages")
errMultipleFailures = errors.New("multiple sources of failure")
errInputDigestMismatch = errors.New("input-digest mismatch")
)
func (cmd *statusCommand) Name() string { return "status" }
func (cmd *statusCommand) Args() string { return "[package...]" }
func (cmd *statusCommand) ShortHelp() string { return statusShortHelp }
func (cmd *statusCommand) LongHelp() string { return statusLongHelp }
func (cmd *statusCommand) Hidden() bool { return false }
func (cmd *statusCommand) Register(fs *flag.FlagSet) {
fs.BoolVar(&cmd.examples, "examples", false, "print detailed usage examples")
fs.BoolVar(&cmd.json, "json", false, "output in JSON format")
fs.StringVar(&cmd.template, "f", "", "output in text/template format")
fs.BoolVar(&cmd.lock, "lock", false, "output in the lock file format (assumes -detail)")
fs.BoolVar(&cmd.dot, "dot", false, "output the dependency graph in GraphViz format")
fs.BoolVar(&cmd.old, "old", false, "only show out-of-date dependencies")
fs.BoolVar(&cmd.missing, "missing", false, "only show missing dependencies")
fs.StringVar(&cmd.outFilePath, "out", "", "path to a file to which to write the output. Blank value will be ignored")
fs.BoolVar(&cmd.detail, "detail", false, "include more detail in the chosen format")
}
type statusCommand struct {
examples bool
json bool
template string
lock bool
output string
dot bool
old bool
missing bool
outFilePath string
detail bool
}
type outputter interface {
BasicHeader() error
BasicLine(*BasicStatus) error
BasicFooter() error
DetailHeader(*dep.SolveMeta) error
DetailLine(*DetailStatus) error
DetailFooter(*dep.SolveMeta) error
MissingHeader() error
MissingLine(*MissingStatus) error
MissingFooter() error
}
// Only a subset of the outputters should be able to output old statuses.
type oldOutputter interface {
OldHeader() error
OldLine(*OldStatus) error
OldFooter() error
}
type tableOutput struct{ w *tabwriter.Writer }
func (out *tableOutput) BasicHeader() error {
_, err := fmt.Fprintf(out.w, "PROJECT\tCONSTRAINT\tVERSION\tREVISION\tLATEST\tPKGS USED\n")
return err
}
func (out *tableOutput) BasicFooter() error {
return out.w.Flush()
}
func (out *tableOutput) BasicLine(bs *BasicStatus) error {
_, err := fmt.Fprintf(out.w,
"%s\t%s\t%s\t%s\t%s\t%d\t\n",
bs.ProjectRoot,
bs.getConsolidatedConstraint(),
formatVersion(bs.Version),
formatVersion(bs.Revision),
bs.getConsolidatedLatest(shortRev),
bs.PackageCount,
)
return err
}
func (out *tableOutput) DetailHeader(metadata *dep.SolveMeta) error {
_, err := fmt.Fprintf(out.w, "PROJECT\tSOURCE\tCONSTRAINT\tVERSION\tREVISION\tLATEST\tPKGS USED\n")
return err
}
func (out *tableOutput) DetailFooter(metadata *dep.SolveMeta) error {
return out.BasicFooter()
}
func (out *tableOutput) DetailLine(ds *DetailStatus) error {
_, err := fmt.Fprintf(out.w,
"%s\t%s\t%s\t%s\t%s\t%s\t[%s]\t\n",
ds.ProjectRoot,
ds.Source,
ds.getConsolidatedConstraint(),
formatVersion(ds.Version),
formatVersion(ds.Revision),
ds.getConsolidatedLatest(shortRev),
strings.Join(ds.Packages, ", "),
)
return err
}
func (out *tableOutput) MissingHeader() error {
_, err := fmt.Fprintln(out.w, "PROJECT\tMISSING PACKAGES")
return err
}
func (out *tableOutput) MissingLine(ms *MissingStatus) error {
_, err := fmt.Fprintf(out.w,
"%s\t%s\t\n",
ms.ProjectRoot,
ms.MissingPackages,
)
return err
}
func (out *tableOutput) MissingFooter() error {
return out.w.Flush()
}
func (out *tableOutput) OldHeader() error {
_, err := fmt.Fprintf(out.w, "PROJECT\tCONSTRAINT\tREVISION\tLATEST\n")
return err
}
func (out *tableOutput) OldLine(os *OldStatus) error {
_, err := fmt.Fprintf(out.w,
"%s\t%s\t%s\t%s\t\n",
os.ProjectRoot,
os.getConsolidatedConstraint(),
formatVersion(os.Revision),
os.getConsolidatedLatest(shortRev),
)
return err
}
func (out *tableOutput) OldFooter() error {
return out.w.Flush()
}
type jsonOutput struct {
w io.Writer
basic []*rawStatus
detail []rawDetailProject
missing []*MissingStatus
old []*rawOldStatus
}
func (out *jsonOutput) BasicHeader() error {
out.basic = []*rawStatus{}
return nil
}
func (out *jsonOutput) BasicFooter() error {
return json.NewEncoder(out.w).Encode(out.basic)
}
func (out *jsonOutput) BasicLine(bs *BasicStatus) error {
out.basic = append(out.basic, bs.marshalJSON())
return nil
}
func (out *jsonOutput) DetailHeader(metadata *dep.SolveMeta) error {
out.detail = []rawDetailProject{}
return nil
}
func (out *jsonOutput) DetailFooter(metadata *dep.SolveMeta) error {
doc := rawDetail{
Projects: out.detail,
Metadata: newRawMetadata(metadata),
}
return json.NewEncoder(out.w).Encode(doc)
}
func (out *jsonOutput) DetailLine(ds *DetailStatus) error {
out.detail = append(out.detail, *ds.marshalJSON())
return nil
}
func (out *jsonOutput) MissingHeader() error {
out.missing = []*MissingStatus{}
return nil
}
func (out *jsonOutput) MissingLine(ms *MissingStatus) error {
out.missing = append(out.missing, ms)
return nil
}
func (out *jsonOutput) MissingFooter() error {
return json.NewEncoder(out.w).Encode(out.missing)
}
func (out *jsonOutput) OldHeader() error {
out.old = []*rawOldStatus{}
return nil
}
func (out *jsonOutput) OldLine(os *OldStatus) error {
out.old = append(out.old, os.marshalJSON())
return nil
}
func (out *jsonOutput) OldFooter() error {
return json.NewEncoder(out.w).Encode(out.old)
}
type dotOutput struct {
w io.Writer
o string
g *graphviz
p *dep.Project
}
func (out *dotOutput) BasicHeader() error {
out.g = new(graphviz).New()
ptree := out.p.RootPackageTree
// TODO(sdboyer) should be true, true, false, out.p.Manifest.IgnoredPackages()
prm, _ := ptree.ToReachMap(true, false, false, nil)
out.g.createNode(string(out.p.ImportRoot), "", prm.FlattenFn(paths.IsStandardImportPath))
return nil
}
func (out *dotOutput) BasicFooter() error {
gvo := out.g.output("")
_, err := fmt.Fprint(out.w, gvo.String())
return err
}
func (out *dotOutput) BasicLine(bs *BasicStatus) error {
out.g.createNode(bs.ProjectRoot, bs.getConsolidatedVersion(), bs.Children)
return nil
}
func (out *dotOutput) DetailHeader(metadata *dep.SolveMeta) error {
return out.BasicHeader()
}
func (out *dotOutput) DetailFooter(metadata *dep.SolveMeta) error {
return out.BasicFooter()
}
func (out *dotOutput) DetailLine(ds *DetailStatus) error {
return out.BasicLine(&ds.BasicStatus)
}
func (out *dotOutput) MissingHeader() error { return nil }
func (out *dotOutput) MissingLine(ms *MissingStatus) error { return nil }
func (out *dotOutput) MissingFooter() error { return nil }
type templateOutput struct {
w io.Writer
tmpl *template.Template
detail []rawDetailProject
}
func (out *templateOutput) BasicHeader() error { return nil }
func (out *templateOutput) BasicFooter() error { return nil }
func (out *templateOutput) BasicLine(bs *BasicStatus) error {
data := rawStatus{
ProjectRoot: bs.ProjectRoot,
Constraint: bs.getConsolidatedConstraint(),
Version: bs.getConsolidatedVersion(),
Revision: bs.Revision.String(),
Latest: bs.getConsolidatedLatest(shortRev),
PackageCount: bs.PackageCount,
}
return out.tmpl.Execute(out.w, data)
}
func (out *templateOutput) DetailHeader(metadata *dep.SolveMeta) error {
out.detail = []rawDetailProject{}
return nil
}
func (out *templateOutput) DetailFooter(metadata *dep.SolveMeta) error {
raw := rawDetail{
Projects: out.detail,
Metadata: newRawMetadata(metadata),
}
return out.tmpl.Execute(out.w, raw)
}
func (out *templateOutput) DetailLine(ds *DetailStatus) error {
data := rawDetailProject{
ProjectRoot: ds.ProjectRoot,
Constraint: ds.getConsolidatedConstraint(),
Locked: formatDetailVersion(ds.Version, ds.Revision),
Latest: formatDetailLatestVersion(ds.Latest, ds.hasError),
PruneOpts: ds.getPruneOpts(),
Digest: ds.Digest.String(),
PackageCount: ds.PackageCount,
Source: ds.Source,
Packages: ds.Packages,
}
out.detail = append(out.detail, data)
return nil
}
func (out *templateOutput) OldHeader() error { return nil }
func (out *templateOutput) OldFooter() error { return nil }
func (out *templateOutput) OldLine(os *OldStatus) error {
return out.tmpl.Execute(out.w, os)
}
func (out *templateOutput) MissingHeader() error { return nil }
func (out *templateOutput) MissingFooter() error { return nil }
func (out *templateOutput) MissingLine(ms *MissingStatus) error {
return out.tmpl.Execute(out.w, ms)
}
func (cmd *statusCommand) Run(ctx *dep.Ctx, args []string) error {
if cmd.examples {
ctx.Err.Println(strings.TrimSpace(statusExamples))
return nil
}
if err := cmd.validateFlags(); err != nil {
return err
}
p, err := ctx.LoadProject()
if err != nil {
return err
}
sm, err := ctx.SourceManager()
if err != nil {
return err
}
sm.UseDefaultSignalHandling()
defer sm.Release()
if err := dep.ValidateProjectRoots(ctx, p.Manifest, sm); err != nil {
return err
}
var buf bytes.Buffer
var out outputter
switch {
case cmd.missing:
return errors.Errorf("not implemented")
case cmd.json:
out = &jsonOutput{
w: &buf,
}
case cmd.dot:
out = &dotOutput{
p: p,
o: cmd.output,
w: &buf,
}
case cmd.template != "":
tmpl, err := parseStatusTemplate(cmd.template)
if err != nil {
return err
}
out = &templateOutput{
w: &buf,
tmpl: tmpl,
}
case cmd.lock:
tmpl, err := parseStatusTemplate(statusLockTemplate)
if err != nil {
return err
}
out = &templateOutput{
w: &buf,
tmpl: tmpl,
}
default:
out = &tableOutput{
w: tabwriter.NewWriter(&buf, 0, 4, 2, ' ', 0),
}
}
// Check if the lock file exists.
if p.Lock == nil {
return errors.Errorf("no Gopkg.lock found. Run `dep ensure` to generate lock file")
}
if cmd.old {
if _, ok := out.(oldOutputter); !ok {
return errors.Errorf("invalid output format used")
}
err = cmd.runOld(ctx, out.(oldOutputter), p, sm)
ctx.Out.Print(buf.String())
return err
}
_, errCount, runerr := cmd.runStatusAll(ctx, out, p, sm)
if runerr != nil {
switch runerr {
case errFailedUpdate:
// Print the help when in non-verbose mode
if !ctx.Verbose {
ctx.Out.Printf("The status of %d projects are unknown due to errors. Rerun with `-v` flag to see details.\n", errCount)
}
case errInputDigestMismatch:
ctx.Err.Printf("Gopkg.lock is out of sync with imports and/or Gopkg.toml. Run `dep check` for details.\n")
default:
return runerr
}
}
if cmd.outFilePath == "" {
// Print the status output
ctx.Out.Print(buf.String())
} else {
file, err := os.Create(cmd.outFilePath)
if err != nil {
return fmt.Errorf("error creating output file: %v", err)
}
defer file.Close()
if _, err := io.Copy(file, bytes.NewReader(buf.Bytes())); err != nil {
return fmt.Errorf("error writing output file: %v", err)
}
}
return runerr
}
func (cmd *statusCommand) validateFlags() error {
// Operating mode flags.
var opModes []string
if cmd.old {
opModes = append(opModes, "-old")
}
if cmd.missing {
opModes = append(opModes, "-missing")
}
if cmd.detail {
opModes = append(opModes, "-detail")
}
// Check if any other flags are passed with -dot.
if cmd.dot {
if cmd.template != "" {
return errors.New("cannot pass template string with -dot")
}
if cmd.json {
return errors.New("cannot pass multiple output format flags")
}
if len(opModes) > 0 {
return errors.New("-dot generates dependency graph; cannot pass other flags")
}
}
if cmd.lock {
if cmd.template != "" {
return errors.New("cannot pass template string with -lock")
}
if !cmd.detail {
cmd.detail = true
}
}
if len(opModes) > 1 {
// List the flags because which flags are for operation mode might not
// be apparent to the users.
return errors.Wrapf(errors.New("cannot pass multiple operating mode flags"), "%v", opModes)
}
return nil
}
// OldStatus contains information about all the out of date packages in a project.
type OldStatus struct {
ProjectRoot string
Constraint gps.Constraint
Revision gps.Revision
Latest gps.Version
}
type rawOldStatus struct {
ProjectRoot, Constraint, Revision, Latest string
}
func (os OldStatus) getConsolidatedConstraint() string {
var constraint string
if os.Constraint != nil {
if v, ok := os.Constraint.(gps.Version); ok {
constraint = formatVersion(v)
} else {
constraint = os.Constraint.String()
}
}
return constraint
}
func (os OldStatus) getConsolidatedLatest(revSize uint8) string {
latest := ""
if os.Latest != nil {
switch revSize {
case shortRev:
latest = formatVersion(os.Latest)
case longRev:
latest = os.Latest.String()
}
}
return latest
}
func (os OldStatus) marshalJSON() *rawOldStatus {
return &rawOldStatus{
ProjectRoot: os.ProjectRoot,
Constraint: os.getConsolidatedConstraint(),
Revision: string(os.Revision),
Latest: os.getConsolidatedLatest(longRev),
}
}
func (cmd *statusCommand) runOld(ctx *dep.Ctx, out oldOutputter, p *dep.Project, sm gps.SourceManager) error {
// While the network churns on ListVersions() requests, statically analyze
// code from the current project.
ptree := p.RootPackageTree
// Set up a solver in order to check the InputHash.
params := gps.SolveParameters{
ProjectAnalyzer: dep.Analyzer{},
RootDir: p.AbsRoot,
RootPackageTree: ptree,
Manifest: p.Manifest,
// Locks aren't a part of the input hash check, so we can omit it.
}
logger := ctx.Err
if ctx.Verbose {
params.TraceLogger = ctx.Err
} else {
logger = log.New(ioutil.Discard, "", 0)
}
// Check update for all the projects.
params.ChangeAll = true
solver, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "fastpath solver prepare")
}
logger.Println("Solving dependency graph to determine which dependencies can be updated.")
solution, err := solver.Solve(context.TODO())
if err != nil {
return errors.Wrap(err, "runOld")
}
var oldStatuses []OldStatus
solutionProjects := solution.Projects()
for _, proj := range p.Lock.Projects() {
for _, sProj := range solutionProjects {
// Look for the same project in solution and lock.
if sProj.Ident().ProjectRoot != proj.Ident().ProjectRoot {
continue
}
// If revisions are not the same then it is old and we should display it.
latestRev, _, _ := gps.VersionComponentStrings(sProj.Version())
atRev, _, _ := gps.VersionComponentStrings(proj.Version())
if atRev == latestRev {
continue
}
var constraint gps.Constraint
// Getting Constraint.
if pp, has := p.Manifest.Ovr[proj.Ident().ProjectRoot]; has && pp.Constraint != nil {
// manifest has override for project.
constraint = pp.Constraint
} else if pp, has := p.Manifest.Constraints[proj.Ident().ProjectRoot]; has && pp.Constraint != nil {
// manifest has normal constraint.
constraint = pp.Constraint
} else {
// No constraint exists. No need to worry about displaying it.
continue
}
// Generate the old status data and append it.
os := OldStatus{
ProjectRoot: proj.Ident().String(),
Revision: gps.Revision(atRev),
Latest: gps.Revision(latestRev),
Constraint: constraint,
}
oldStatuses = append(oldStatuses, os)
}
}
out.OldHeader()
for _, ostat := range oldStatuses {
out.OldLine(&ostat)
}
out.OldFooter()
return nil
}
type rawStatus struct {
ProjectRoot string
Constraint string
Version string
Revision string
Latest string
PackageCount int
}
// rawDetail is is additional information used for the status when the
// -detail flag is specified
type rawDetail struct {
Projects []rawDetailProject
Metadata rawDetailMetadata
}
type rawDetailVersion struct {
Revision string `json:"Revision,omitempty"`
Version string `json:"Version,omitempty"`
Branch string `json:"Branch,omitempty"`
}
type rawDetailProject struct {
ProjectRoot string
Packages []string
Locked rawDetailVersion
Latest rawDetailVersion
PruneOpts string
Digest string
Source string `json:"Source,omitempty"`
Constraint string
PackageCount int
}
type rawDetailMetadata struct {
AnalyzerName string
AnalyzerVersion int
InputsDigest string // deprecated
InputImports []string
SolverName string
SolverVersion int
}
func newRawMetadata(metadata *dep.SolveMeta) rawDetailMetadata {
if metadata == nil {
return rawDetailMetadata{}
}
return rawDetailMetadata{
AnalyzerName: metadata.AnalyzerName,
AnalyzerVersion: metadata.AnalyzerVersion,
InputImports: metadata.InputImports,
SolverName: metadata.SolverName,
SolverVersion: metadata.SolverVersion,
}
}
// BasicStatus contains all the information reported about a single dependency
// in the summary/list status output mode.
type BasicStatus struct {
ProjectRoot string
Children []string
Constraint gps.Constraint
Version gps.UnpairedVersion
Revision gps.Revision
Latest gps.Version
PackageCount int
hasOverride bool
hasError bool
}
// DetailStatus contains all information reported about a single dependency
// in the detailed status output mode. The included information matches the
// information included about a a project in a lock file.
type DetailStatus struct {
BasicStatus
Packages []string
Source string
PruneOpts gps.PruneOptions
Digest verify.VersionedDigest
}
func (bs *BasicStatus) getConsolidatedConstraint() string {
var constraint string
if bs.Constraint != nil {
if v, ok := bs.Constraint.(gps.Version); ok {
constraint = formatVersion(v)
} else {
constraint = bs.Constraint.String()
}
}
if bs.hasOverride {
constraint += " (override)"
}
return constraint
}
func (bs *BasicStatus) getConsolidatedVersion() string {
version := formatVersion(bs.Revision)
if bs.Version != nil {
version = formatVersion(bs.Version)
}
return version
}
func (bs *BasicStatus) getConsolidatedLatest(revSize uint8) string {
latest := ""
if bs.Latest != nil {
switch revSize {
case shortRev:
latest = formatVersion(bs.Latest)
case longRev:
latest = bs.Latest.String()
}
}
if bs.hasError {
latest += "unknown"
}
return latest
}
func (ds *DetailStatus) getPruneOpts() string {
return (ds.PruneOpts & ^gps.PruneNestedVendorDirs).String()
}
func (bs *BasicStatus) marshalJSON() *rawStatus {
return &rawStatus{
ProjectRoot: bs.ProjectRoot,
Constraint: bs.getConsolidatedConstraint(),
Version: formatVersion(bs.Version),
Revision: string(bs.Revision),
Latest: bs.getConsolidatedLatest(longRev),
PackageCount: bs.PackageCount,
}
}
func (ds *DetailStatus) marshalJSON() *rawDetailProject {
rawStatus := ds.BasicStatus.marshalJSON()
return &rawDetailProject{
ProjectRoot: rawStatus.ProjectRoot,
Constraint: rawStatus.Constraint,
Locked: formatDetailVersion(ds.Version, ds.Revision),
Latest: formatDetailLatestVersion(ds.Latest, ds.hasError),
PruneOpts: ds.getPruneOpts(),
Digest: ds.Digest.String(),
Source: ds.Source,
Packages: ds.Packages,
PackageCount: ds.PackageCount,
}
}
// MissingStatus contains information about all the missing packages in a project.
type MissingStatus struct {
ProjectRoot string
MissingPackages []string
}
func (cmd *statusCommand) runStatusAll(ctx *dep.Ctx, out outputter, p *dep.Project, sm gps.SourceManager) (hasMissingPkgs bool, errCount int, err error) {
// While the network churns on ListVersions() requests, statically analyze
// code from the current project.
ptree := p.RootPackageTree
// Set up a solver in order to check the InputHash.
params := gps.SolveParameters{
ProjectAnalyzer: dep.Analyzer{},
RootDir: p.AbsRoot,
RootPackageTree: ptree,
Manifest: p.Manifest,
// Locks aren't a part of the input hash check, so we can omit it.
}
logger := ctx.Err
if ctx.Verbose {
params.TraceLogger = ctx.Err
} else {
logger = log.New(ioutil.Discard, "", 0)
}
if err := ctx.ValidateParams(sm, params); err != nil {
return false, 0, err
}
// Errors while collecting constraints should not fail the whole status run.
// It should count the error and tell the user about incomplete results.
cm, ccerrs := collectConstraints(ctx, p, sm)
if len(ccerrs) > 0 {
errCount += len(ccerrs)
}
// Get the project list and sort it so that the printed output users see is
// deterministically ordered. (This may be superfluous if the lock is always
// written in alpha order, but it doesn't hurt to double down.)
slp := p.Lock.Projects()
sort.Slice(slp, func(i, j int) bool {
return slp[i].Ident().Less(slp[j].Ident())
})
slcp := p.ChangedLock.Projects()
sort.Slice(slcp, func(i, j int) bool {
return slcp[i].Ident().Less(slcp[j].Ident())
})
lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree)
if lsat.Satisfied() {
// If the lock satisfies the inputs, we're guaranteed (barring manual
// meddling, about which we can do nothing) that the lock is a
// transitively complete picture of all deps. That eliminates the need
// for some checks.
logger.Println("Checking upstream projects:")
// DetailStatus channel to collect all the DetailStatus.
dsCh := make(chan *DetailStatus, len(slp))
// Error channels to collect different errors.
errListPkgCh := make(chan error, len(slp))
errListVerCh := make(chan error, len(slp))
var wg sync.WaitGroup
for i, proj := range slp {
wg.Add(1)
logger.Printf("(%d/%d) %s\n", i+1, len(slp), proj.Ident().ProjectRoot)
go func(proj verify.VerifiableProject) {
bs := BasicStatus{
ProjectRoot: string(proj.Ident().ProjectRoot),
PackageCount: len(proj.Packages()),
}
// Get children only for specific outputers
// in order to avoid slower status process.
switch out.(type) {
case *dotOutput:
ptr, err := sm.ListPackages(proj.Ident(), proj.Version())
if err != nil {
bs.hasError = true
errListPkgCh <- err
}
prm, _ := ptr.ToReachMap(true, true, false, p.Manifest.IgnoredPackages())
bs.Children = prm.FlattenFn(paths.IsStandardImportPath)
}
// Split apart the version from the lock into its constituent parts.
switch tv := proj.Version().(type) {
case gps.UnpairedVersion:
bs.Version = tv
case gps.Revision:
bs.Revision = tv
case gps.PairedVersion:
bs.Version = tv.Unpair()
bs.Revision = tv.Revision()
}
// Check if the manifest has an override for this project. If so,
// set that as the constraint.
if pp, has := p.Manifest.Ovr[proj.Ident().ProjectRoot]; has && pp.Constraint != nil {
bs.hasOverride = true
bs.Constraint = pp.Constraint
} else if pp, has := p.Manifest.Constraints[proj.Ident().ProjectRoot]; has && pp.Constraint != nil {
// If the manifest has a constraint then set that as the constraint.
bs.Constraint = pp.Constraint
} else {
bs.Constraint = gps.Any()
for _, c := range cm[bs.ProjectRoot] {
bs.Constraint = c.Constraint.Intersect(bs.Constraint)
}
}
// Only if we have a non-rev and non-plain version do/can we display
// anything wrt the version's updateability.
if bs.Version != nil && bs.Version.Type() != gps.IsVersion {
c, has := p.Manifest.Constraints[proj.Ident().ProjectRoot]
if !has {
// Get constraint for locked project
for _, lockedP := range p.Lock.P {
if lockedP.Ident().ProjectRoot == proj.Ident().ProjectRoot {
// Use the unpaired version as the constraint for checking updates.
c.Constraint = bs.Version
}
}
}
// TODO: This constraint is only the constraint imposed by the
// current project, not by any transitive deps. As a result,
// transitive project deps will always show "any" here.
bs.Constraint = c.Constraint
vl, err := sm.ListVersions(proj.Ident())
if err == nil {
gps.SortPairedForUpgrade(vl)
for _, v := range vl {
// Because we've sorted the version list for
// upgrade, the first version we encounter that
// matches our constraint will be what we want.
if c.Constraint.Matches(v) {
// Latest should be of the same type as the Version.
if bs.Version.Type() == gps.IsSemver {
bs.Latest = v
} else {
bs.Latest = v.Revision()
}
break
}
}
} else {
// Failed to fetch version list (could happen due to
// network issue).
bs.hasError = true
errListVerCh <- err
}
}
ds := DetailStatus{
BasicStatus: bs,
}
if cmd.detail {
ds.Source = proj.Ident().Source
ds.Packages = proj.Packages()
ds.PruneOpts = proj.PruneOpts
ds.Digest = proj.Digest
}
dsCh <- &ds
wg.Done()
}(proj.(verify.VerifiableProject))
}
wg.Wait()
close(dsCh)
close(errListPkgCh)
close(errListVerCh)
// Newline after printing the status progress output.
logger.Println()
// List Packages errors. This would happen only for dot output.
if len(errListPkgCh) > 0 {
err = errFailedListPkg
if ctx.Verbose {
for err := range errListPkgCh {
ctx.Err.Println(err.Error())
}
ctx.Err.Println()
}
}
// List Version errors.
if len(errListVerCh) > 0 {
if err == nil {
err = errFailedUpdate
} else {
err = errMultipleFailures
}
// Count ListVersions error because we get partial results when
// this happens.
errCount += len(errListVerCh)
if ctx.Verbose {
for err := range errListVerCh {
ctx.Err.Println(err.Error())
}
ctx.Err.Println()
}
}
if cmd.detail {
// A map of ProjectRoot and *DetailStatus. This is used in maintain the
// order of DetailStatus in output by collecting all the DetailStatus and
// then using them in order.
dsMap := make(map[string]*DetailStatus)
for ds := range dsCh {
dsMap[ds.ProjectRoot] = ds
}
if err := detailOutputAll(out, slp, dsMap, &p.Lock.SolveMeta); err != nil {
return false, 0, err
}
} else {
// A map of ProjectRoot and *BasicStatus. This is used in maintain the
// order of BasicStatus in output by collecting all the BasicStatus and
// then using them in order.
bsMap := make(map[string]*BasicStatus)
for bs := range dsCh {
bsMap[bs.ProjectRoot] = &bs.BasicStatus
}
if err := basicOutputAll(out, slp, bsMap); err != nil {
return false, 0, err
}
}
return false, errCount, err
}
rm, _ := ptree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages())
external := rm.FlattenFn(paths.IsStandardImportPath)
roots := make(map[gps.ProjectRoot][]string, len(external))
type fail struct {
ex string
err error
}
var errs []fail
for _, e := range external {
root, err := sm.DeduceProjectRoot(e)
if err != nil {
errs = append(errs, fail{
ex: e,
err: err,
})
continue
}
roots[root] = append(roots[root], e)
}
if len(errs) != 0 {
// TODO this is just a fix quick so staticcheck doesn't complain.
// Visually reconciling failure to deduce project roots with the rest of
// the mismatch output is a larger problem.
ctx.Err.Printf("Failed to deduce project roots for import paths:\n")
for _, fail := range errs {
ctx.Err.Printf("\t%s: %s\n", fail.ex, fail.err.Error())
}
return false, 0, errors.New("address issues with undeducible import paths to get more status information")
}
if err = out.MissingHeader(); err != nil {
return false, 0, err
}
outer:
for root, pkgs := range roots {
// TODO also handle the case where the project is present, but there
// are items missing from just the package list
for _, lp := range slp {
if lp.Ident().ProjectRoot == root {
continue outer
}
}
hasMissingPkgs = true
err := out.MissingLine(&MissingStatus{ProjectRoot: string(root), MissingPackages: pkgs})
if err != nil {
return false, 0, err
}
}
if err = out.MissingFooter(); err != nil {
return false, 0, err
}
// We are here because of an input-digest mismatch. Return error.
return hasMissingPkgs, 0, errInputDigestMismatch
}
// basicOutputAll takes an outputter, a project list, and a map of ProjectRoot to *BasicStatus and
// uses the outputter to output basic header, body lines (in the order of the project list), and
// footer based on the project information.
func basicOutputAll(out outputter, slp []gps.LockedProject, bsMap map[string]*BasicStatus) (err error) {
if err := out.BasicHeader(); err != nil {
return err
}
// Use the collected BasicStatus in outputter.
for _, proj := range slp {
if err := out.BasicLine(bsMap[string(proj.Ident().ProjectRoot)]); err != nil {
return err
}
}
return out.BasicFooter()
}
// detailOutputAll takes an outputter, a project list, and a map of ProjectRoot to *DetailStatus and
// uses the outputter to output detailed header, body lines (in the order of the project list), and
// footer based on the project information.
func detailOutputAll(out outputter, slp []gps.LockedProject, dsMap map[string]*DetailStatus, metadata *dep.SolveMeta) (err error) {
if err := out.DetailHeader(metadata); err != nil {
return err
}
// Use the collected BasicStatus in outputter.
for _, proj := range slp {
if err := out.DetailLine(dsMap[string(proj.Ident().ProjectRoot)]); err != nil {
return err
}
}
return out.DetailFooter(metadata)
}
func formatVersion(v gps.Version) string {
if v == nil {
return ""
}
switch v.Type() {
case gps.IsBranch:
return "branch " + v.String()
case gps.IsRevision:
r := v.String()
if len(r) > 7 {
r = r[:7]
}
return r
}
return v.String()
}
func formatDetailVersion(v gps.Version, r gps.Revision) rawDetailVersion {
if v == nil {
return rawDetailVersion{
Revision: r.String(),
}
}
switch v.Type() {
case gps.IsBranch:
return rawDetailVersion{
Branch: v.String(),
Revision: r.String(),
}
case gps.IsRevision:
return rawDetailVersion{
Revision: v.String(),
}
}
return rawDetailVersion{
Version: v.String(),
Revision: r.String(),
}
}
func formatDetailLatestVersion(v gps.Version, hasError bool) rawDetailVersion {
if hasError {
return rawDetailVersion{
Revision: "unknown",
}
}
return formatDetailVersion(v, "")
}
// projectConstraint stores ProjectRoot and Constraint for that project.
type projectConstraint struct {
Project gps.ProjectRoot
Constraint gps.Constraint
}
// constraintsCollection is a map of ProjectRoot(dependency) and a collection of
// projectConstraint for the dependencies. This can be used to find constraints
// on a dependency and the projects that apply those constraints.
type constraintsCollection map[string][]projectConstraint
// collectConstraints collects constraints declared by all the dependencies and
// constraints from the root project. It returns constraintsCollection and
// a slice of errors encountered while collecting the constraints, if any.
func collectConstraints(ctx *dep.Ctx, p *dep.Project, sm gps.SourceManager) (constraintsCollection, []error) {
logger := ctx.Err
if !ctx.Verbose {
logger = log.New(ioutil.Discard, "", 0)
}
logger.Println("Collecting project constraints:")
var mutex sync.Mutex
constraintCollection := make(constraintsCollection)
// Collect the complete set of direct project dependencies, incorporating
// requireds and ignores appropriately.
directDeps, err := p.GetDirectDependencyNames(sm)
if err != nil {
// Return empty collection, not nil, if we fail here.
return constraintCollection, []error{errors.Wrap(err, "failed to get direct dependencies")}
}
// Create a root analyzer.
rootAnalyzer := newRootAnalyzer(true, ctx, directDeps, sm)
lp := p.Lock.Projects()
// Channel for receiving all the errors.
errCh := make(chan error, len(lp))
var wg sync.WaitGroup
// Iterate through the locked projects and collect constraints of all the projects.
for i, proj := range lp {
wg.Add(1)
logger.Printf("(%d/%d) %s\n", i+1, len(lp), proj.Ident().ProjectRoot)
go func(proj gps.LockedProject) {
defer wg.Done()
manifest, _, err := sm.GetManifestAndLock(proj.Ident(), proj.Version(), rootAnalyzer)
if err != nil {
errCh <- errors.Wrap(err, "error getting manifest and lock")
return
}
// Get project constraints.
pc := manifest.DependencyConstraints()
// Obtain a lock for constraintCollection.
mutex.Lock()
defer mutex.Unlock()
// Iterate through the project constraints to get individual dependency
// project and constraint values.
for pr, pp := range pc {
// Check if the project constraint is imported in the root project
if _, ok := directDeps[pr]; !ok {
continue
}
tempCC := append(
constraintCollection[string(pr)],
projectConstraint{proj.Ident().ProjectRoot, pp.Constraint},
)
// Sort the inner projectConstraint slice by Project string.
// Required for consistent returned value.
sort.Sort(byProject(tempCC))
constraintCollection[string(pr)] = tempCC
}
}(proj)
}
wg.Wait()
close(errCh)
var errs []error
if len(errCh) > 0 {
for e := range errCh {
errs = append(errs, e)
logger.Println(e.Error())
}
}
// Incorporate constraints set in the manifest of the root project.
if p.Manifest != nil {
// Iterate through constraints in the manifest, append if it is a
// direct dependency
for pr, pp := range p.Manifest.Constraints {
if _, ok := directDeps[pr]; !ok {
continue
}
// Mark constraints coming from the manifest as "root"
tempCC := append(
constraintCollection[string(pr)],
projectConstraint{"root", pp.Constraint},
)
// Sort the inner projectConstraint slice by Project string.
// Required for consistent returned value.
sort.Sort(byProject(tempCC))
constraintCollection[string(pr)] = tempCC
}
}
return constraintCollection, errs
}
type byProject []projectConstraint
func (p byProject) Len() int { return len(p) }
func (p byProject) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p byProject) Less(i, j int) bool { return p[i].Project < p[j].Project }
func parseStatusTemplate(format string) (*template.Template, error) {
tmpl, err := template.New("status").Funcs(template.FuncMap{
"dec": func(i int) int {
return i - 1
},
"tomlStrSplit": tomlStrSplit,
"tomlStrSplit2": func(strlist []string, level int) string {
// Hardcode to two spaces.
inbracket, inp := strings.Repeat(" ", level), strings.Repeat(" ", level+1)
switch len(strlist) {
case 0:
return "[]"
case 1:
return fmt.Sprintf("[\"%s\"]", strlist[0])
default:
var buf bytes.Buffer
fmt.Fprintf(&buf, "[\n")
for _, str := range strlist {
fmt.Fprintf(&buf, "%s\"%s\",\n", inp, str)
}
fmt.Fprintf(&buf, "%s]", inbracket)
return buf.String()
}
},
}).Parse(format)
return tmpl, err
}
func tomlStrSplit(strlist []string) string {
switch len(strlist) {
case 0:
return "[]"
case 1:
return fmt.Sprintf("[\"%s\"]", strlist[0])
default:
var buf bytes.Buffer
// Hardcode to two spaces.
fmt.Fprintf(&buf, "[\n")
for _, str := range strlist {
fmt.Fprintf(&buf, " \"%s\",\n", str)
}
fmt.Fprintf(&buf, " ]")
return buf.String()
}
}
const statusLockTemplate = `# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
{{range $p := .Projects}}[[projects]]
{{- if $p.Locked.Branch}}
branch = "{{$p.Locked.Branch}}"
{{- end}}
digest = "{{$p.Digest}}"
name = "{{$p.ProjectRoot}}"
packages = {{(tomlStrSplit $p.Packages)}}
pruneopts = "{{$p.PruneOpts}}"
revision = "{{$p.Locked.Revision}}"
{{- if $p.Source}}
source = "{{$p.Source}}"
{{- end}}
{{- if $p.Locked.Version}}
version = "{{$p.Locked.Version}}"
{{- end}}
{{end}}[solve-meta]
analyzer-name = "{{.Metadata.AnalyzerName}}"
analyzer-version = {{.Metadata.AnalyzerVersion}}
input-imports = {{(tomlStrSplit .Metadata.InputImports)}}
solver-name = "{{.Metadata.SolverName}}"
solver-version = {{.Metadata.SolverVersion}}
`
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/check.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"sort"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/verify"
"github.com/pkg/errors"
)
const checkShortHelp = `Check if imports, Gopkg.toml, and Gopkg.lock are in sync`
const checkLongHelp = `
Check determines if your project is in a good state. If problems are found, it
prints a description of each issue, then exits 1. Passing -q suppresses output.
Flags control which specific checks will be run. By default, dep check verifies
that Gopkg.lock is in sync with Gopkg.toml and the imports in your project's .go
files, and that the vendor directory is in sync with Gopkg.lock. These checks
can be disabled with -skip-lock and -skip-vendor, respectively.
(See https://golang.github.io/dep/docs/ensure-mechanics.html#staying-in-sync for
more information on what it means to be "in sync.")
If your workflow necessitates that you modify the contents of vendor, you can
force check to ignore hash mismatches on a per-project basis by naming
project roots in Gopkg.toml's "noverify" list.
`
type checkCommand struct {
quiet bool
skiplock, skipvendor bool
}
func (cmd *checkCommand) Name() string { return "check" }
func (cmd *checkCommand) Args() string {
return "[-q] [-skip-lock] [-skip-vendor]"
}
func (cmd *checkCommand) ShortHelp() string { return checkShortHelp }
func (cmd *checkCommand) LongHelp() string { return checkLongHelp }
func (cmd *checkCommand) Hidden() bool { return false }
func (cmd *checkCommand) Register(fs *flag.FlagSet) {
fs.BoolVar(&cmd.skiplock, "skip-lock", false, "Skip checking that imports and Gopkg.toml are in sync with Gopkg.lock")
fs.BoolVar(&cmd.skipvendor, "skip-vendor", false, "Skip checking that vendor is in sync with Gopkg.lock")
fs.BoolVar(&cmd.quiet, "q", false, "Suppress non-error output")
}
func (cmd *checkCommand) Run(ctx *dep.Ctx, args []string) error {
logger := ctx.Out
if cmd.quiet {
logger = log.New(ioutil.Discard, "", 0)
}
p, err := ctx.LoadProject()
if err != nil {
return err
}
sm, err := ctx.SourceManager()
if err != nil {
return err
}
sm.UseDefaultSignalHandling()
defer sm.Release()
var fail bool
if !cmd.skiplock {
if p.Lock == nil {
return errors.New("Gopkg.lock does not exist, cannot check it against imports and Gopkg.toml")
}
lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, p.RootPackageTree)
delta := verify.DiffLocks(p.Lock, p.ChangedLock)
sat, changed := lsat.Satisfied(), delta.Changed(verify.PruneOptsChanged|verify.HashVersionChanged)
if changed || !sat {
fail = true
logger.Println("# Gopkg.lock is out of sync:")
if !sat {
logger.Printf("%s\n", sprintLockUnsat(lsat))
}
if changed {
// Sort, for deterministic output.
var ordered []string
for pr := range delta.ProjectDeltas {
ordered = append(ordered, string(pr))
}
sort.Strings(ordered)
for _, pr := range ordered {
lpd := delta.ProjectDeltas[gps.ProjectRoot(pr)]
// Only two possible changes right now are prune opts
// changing or a missing hash digest (for old Gopkg.lock
// files)
if lpd.PruneOptsChanged() {
// Override what's on the lockdiff with the extra info we have;
// this lets us excise PruneNestedVendorDirs and get the real
// value from the input param in place.
old := lpd.PruneOptsBefore & ^gps.PruneNestedVendorDirs
new := lpd.PruneOptsAfter & ^gps.PruneNestedVendorDirs
logger.Printf("%s: prune options changed (%s -> %s)\n", pr, old, new)
}
if lpd.HashVersionWasZero() {
logger.Printf("%s: no hash digest in lock\n", pr)
}
}
}
}
}
if !cmd.skipvendor {
if p.Lock == nil {
return errors.New("Gopkg.lock does not exist, cannot check vendor against it")
}
statuses, err := p.VerifyVendor()
if err != nil {
return errors.Wrap(err, "error while verifying vendor")
}
if fail {
logger.Println()
}
noverify := make(map[string]bool)
for _, skip := range p.Manifest.NoVerify {
noverify[skip] = true
}
var vendorfail, hasnoverify bool
// One full pass through, to see if we need to print the header, and to
// create an array of names to sort for deterministic output.
var ordered []string
for path, status := range statuses {
ordered = append(ordered, path)
switch status {
case verify.DigestMismatchInLock, verify.HashVersionMismatch, verify.EmptyDigestInLock, verify.NotInLock:
if noverify[path] {
hasnoverify = true
continue
}
fallthrough
case verify.NotInTree:
// NoVerify cannot be used to make dep check ignore the absence
// of a project entirely.
if noverify[path] {
delete(noverify, path)
}
fail = true
if !vendorfail {
vendorfail = true
}
}
}
sort.Strings(ordered)
var vfbuf, novbuf bytes.Buffer
var bufptr *bytes.Buffer
fmt.Fprintf(&vfbuf, "# vendor is out of sync:\n")
fmt.Fprintf(&novbuf, "# out of sync, but ignored, due to noverify in Gopkg.toml:\n")
for _, pr := range ordered {
if noverify[pr] {
bufptr = &novbuf
} else {
bufptr = &vfbuf
}
status := statuses[pr]
switch status {
case verify.NotInTree:
fmt.Fprintf(bufptr, "%s: missing from vendor\n", pr)
case verify.NotInLock:
fi, err := os.Stat(filepath.Join(p.AbsRoot, "vendor", pr))
if err != nil {
return errors.Wrap(err, "could not stat file that VerifyVendor claimed existed")
}
if fi.IsDir() {
fmt.Fprintf(bufptr, "%s: unused project\n", pr)
} else {
fmt.Fprintf(bufptr, "%s: orphaned file\n", pr)
}
case verify.DigestMismatchInLock:
fmt.Fprintf(bufptr, "%s: hash of vendored tree not equal to digest in Gopkg.lock\n", pr)
case verify.EmptyDigestInLock:
fmt.Fprintf(bufptr, "%s: no digest in Gopkg.lock to compare against hash of vendored tree\n", pr)
case verify.HashVersionMismatch:
// This will double-print if the hash version is zero, but
// that's a rare case that really only occurs before the first
// run with a version of dep >=0.5.0, so it's fine.
fmt.Fprintf(bufptr, "%s: hash algorithm mismatch, want version %v\n", pr, verify.HashVersion)
}
}
if vendorfail {
logger.Print(vfbuf.String())
if hasnoverify {
logger.Println()
}
}
if hasnoverify {
logger.Print(novbuf.String())
}
}
if fail {
return silentfail{}
}
return nil
}
func sprintLockUnsat(lsat verify.LockSatisfaction) string {
var buf bytes.Buffer
sort.Strings(lsat.MissingImports)
for _, missing := range lsat.MissingImports {
fmt.Fprintf(&buf, "%s: imported or required, but missing from Gopkg.lock's input-imports\n", missing)
}
sort.Strings(lsat.ExcessImports)
for _, excess := range lsat.ExcessImports {
fmt.Fprintf(&buf, "%s: in Gopkg.lock's input-imports, but neither imported nor required\n", excess)
}
var ordered []string
for pr := range lsat.UnmetOverrides {
ordered = append(ordered, string(pr))
}
sort.Strings(ordered)
for _, pr := range ordered {
unmatched := lsat.UnmetOverrides[gps.ProjectRoot(pr)]
fmt.Fprintf(&buf, "%s@%s: not allowed by override %s\n", pr, unmatched.V, unmatched.C)
}
ordered = ordered[:0]
for pr := range lsat.UnmetConstraints {
ordered = append(ordered, string(pr))
}
sort.Strings(ordered)
for _, pr := range ordered {
unmatched := lsat.UnmetConstraints[gps.ProjectRoot(pr)]
fmt.Fprintf(&buf, "%s@%s: not allowed by constraint %s\n", pr, unmatched.V, unmatched.C)
}
return strings.TrimSpace(buf.String())
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/init.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"context"
"flag"
"log"
"os"
"path/filepath"
"time"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/fs"
"github.com/pkg/errors"
)
const initShortHelp = `Set up a new Go project, or migrate an existing one`
const initLongHelp = `
Initialize the project at filepath root by parsing its dependencies, writing
manifest and lock files, and vendoring the dependencies. If root isn't
specified, use the current directory.
When configuration for another dependency management tool is detected, it is
imported into the initial manifest and lock. Use the -skip-tools flag to
disable this behavior. The following external tools are supported:
glide, godep, vndr, govend, gb, gvt, govendor, glock.
Any dependencies that are not constrained by external configuration use the
GOPATH analysis below.
By default, the dependencies are resolved over the network. A version will be
selected from the versions available from the upstream source per the following
algorithm:
- Tags conforming to semver (sorted by semver rules)
- Default branch(es) (sorted lexicographically)
- Non-semver tags (sorted lexicographically)
An alternate mode can be activated by passing -gopath. In this mode, the version
of each dependency will reflect the current state of the GOPATH. If a dependency
doesn't exist in the GOPATH, a version will be selected based on the above
network version selection algorithm.
A Gopkg.toml file will be written with inferred version constraints for all
direct dependencies. Gopkg.lock will be written with precise versions, and
vendor/ will be populated with the precise versions written to Gopkg.lock.
`
func (cmd *initCommand) Name() string { return "init" }
func (cmd *initCommand) Args() string { return "[root]" }
func (cmd *initCommand) ShortHelp() string { return initShortHelp }
func (cmd *initCommand) LongHelp() string { return initLongHelp }
func (cmd *initCommand) Hidden() bool { return false }
func (cmd *initCommand) Register(fs *flag.FlagSet) {
fs.BoolVar(&cmd.noExamples, "no-examples", false, "don't include example in Gopkg.toml")
fs.BoolVar(&cmd.skipTools, "skip-tools", false, "skip importing configuration from other dependency managers")
fs.BoolVar(&cmd.gopath, "gopath", false, "search in GOPATH for dependencies")
}
type initCommand struct {
noExamples bool
skipTools bool
gopath bool
}
func (cmd *initCommand) Run(ctx *dep.Ctx, args []string) error {
if len(args) > 1 {
return errors.Errorf("too many args (%d)", len(args))
}
var root string
if len(args) == 0 {
root = ctx.WorkingDir
} else {
root = args[0]
if !filepath.IsAbs(args[0]) {
root = filepath.Join(ctx.WorkingDir, args[0])
}
if err := os.MkdirAll(root, os.FileMode(0777)); err != nil {
return errors.Wrapf(err, "init failed: unable to create a directory at %s", root)
}
}
p, err := cmd.establishProjectAt(root, ctx)
if err != nil {
return err
}
sm, err := ctx.SourceManager()
if err != nil {
return errors.Wrap(err, "init failed: unable to create a source manager")
}
sm.UseDefaultSignalHandling()
defer sm.Release()
if ctx.Verbose {
ctx.Out.Println("Getting direct dependencies...")
}
directDeps, err := p.GetDirectDependencyNames(sm)
if err != nil {
return errors.Wrap(err, "init failed: unable to determine direct dependencies")
}
if ctx.Verbose {
ctx.Out.Printf("Checked %d directories for packages.\nFound %d direct dependencies.\n", len(p.RootPackageTree.Packages), len(directDeps))
}
// Initialize with imported data, then fill in the gaps using the GOPATH
rootAnalyzer := newRootAnalyzer(cmd.skipTools, ctx, directDeps, sm)
p.Manifest, p.Lock, err = rootAnalyzer.InitializeRootManifestAndLock(root, p.ImportRoot)
if err != nil {
return errors.Wrap(err, "init failed: unable to prepare an initial manifest and lock for the solver")
}
// Set default prune options for go-tests and unused-packages
p.Manifest.PruneOptions.DefaultOptions = gps.PruneNestedVendorDirs | gps.PruneGoTestFiles | gps.PruneUnusedPackages
if cmd.gopath {
gs := newGopathScanner(ctx, directDeps, sm)
err = gs.InitializeRootManifestAndLock(p.Manifest, p.Lock)
if err != nil {
return errors.Wrap(err, "init failed: unable to scan the GOPATH for dependencies")
}
}
rootAnalyzer.skipTools = importDuringSolve()
copyLock := *p.Lock // Copy lock before solving. Use this to separate new lock projects from solved lock
params := gps.SolveParameters{
RootDir: root,
RootPackageTree: p.RootPackageTree,
Manifest: p.Manifest,
Lock: p.Lock,
ProjectAnalyzer: rootAnalyzer,
}
if ctx.Verbose {
params.TraceLogger = ctx.Err
}
if err := ctx.ValidateParams(sm, params); err != nil {
return errors.Wrapf(err, "init failed: validation of solve parameters failed")
}
s, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "init failed: unable to prepare the solver")
}
soln, err := s.Solve(context.TODO())
if err != nil {
err = handleAllTheFailuresOfTheWorld(err)
return errors.Wrap(err, "init failed: unable to solve the dependency graph")
}
p.Lock = dep.LockFromSolution(soln, p.Manifest.PruneOptions)
rootAnalyzer.FinalizeRootManifestAndLock(p.Manifest, p.Lock, copyLock)
// Pass timestamp (yyyyMMddHHmmss format) as suffix to backup name.
vendorbak, err := dep.BackupVendor(filepath.Join(root, "vendor"), time.Now().Format("20060102150405"))
if err != nil {
return errors.Wrap(err, "init failed: first backup vendor/, delete it, and then retry the previous command: failed to backup existing vendor directory")
}
if vendorbak != "" {
ctx.Err.Printf("Old vendor backed up to %v", vendorbak)
}
sw, err := dep.NewSafeWriter(p.Manifest, nil, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions, nil)
if err != nil {
return errors.Wrap(err, "init failed: unable to create a SafeWriter")
}
var logger *log.Logger
if ctx.Verbose {
logger = ctx.Err
}
if err := sw.Write(root, sm, !cmd.noExamples, logger); err != nil {
return errors.Wrap(err, "init failed: unable to write the manifest, lock and vendor directory to disk")
}
return nil
}
// establishProjectAt attempts to set up the provided path as the root for the
// project to be created.
//
// It checks for being within a GOPATH, that there is no pre-existing manifest
// and lock, and that we can successfully infer the root import path from
// GOPATH.
//
// If successful, it returns a dep.Project, ready for further use.
func (cmd *initCommand) establishProjectAt(root string, ctx *dep.Ctx) (*dep.Project, error) {
var err error
p := new(dep.Project)
if err = p.SetRoot(root); err != nil {
return nil, errors.Wrapf(err, "init failed: unable to set the root project to %s", root)
}
ctx.GOPATH, err = ctx.DetectProjectGOPATH(p)
if err != nil {
return nil, errors.Wrapf(err, "init failed: unable to detect the containing GOPATH")
}
mf := filepath.Join(root, dep.ManifestName)
lf := filepath.Join(root, dep.LockName)
mok, err := fs.IsRegular(mf)
if err != nil {
return nil, errors.Wrapf(err, "init failed: unable to check for an existing manifest at %s", mf)
}
if mok {
return nil, errors.Errorf("init aborted: manifest already exists at %s", mf)
}
lok, err := fs.IsRegular(lf)
if err != nil {
return nil, errors.Wrapf(err, "init failed: unable to check for an existing lock at %s", lf)
}
if lok {
return nil, errors.Errorf("invalid aborted: lock already exists at %s", lf)
}
ip, err := ctx.ImportForAbs(root)
if err != nil {
return nil, errors.Wrapf(err, "init failed: unable to determine the import path for the root project %s", root)
}
p.ImportRoot = gps.ProjectRoot(ip)
return p, nil
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/root_analyzer.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"context"
"io/ioutil"
"log"
"github.com/golang/dep"
"github.com/golang/dep/gps"
fb "github.com/golang/dep/internal/feedback"
"github.com/golang/dep/internal/importers"
"golang.org/x/sync/errgroup"
)
// rootAnalyzer supplies manifest/lock data from both dep and external tool's
// configuration files.
// * When used on the root project, it imports only from external tools.
// * When used by the solver for dependencies, it first looks for dep config,
// then external tools.
type rootAnalyzer struct {
skipTools bool
ctx *dep.Ctx
sm gps.SourceManager
directDeps map[gps.ProjectRoot]bool
}
func newRootAnalyzer(skipTools bool, ctx *dep.Ctx, directDeps map[gps.ProjectRoot]bool, sm gps.SourceManager) *rootAnalyzer {
return &rootAnalyzer{
skipTools: skipTools,
ctx: ctx,
sm: sm,
directDeps: directDeps,
}
}
func (a *rootAnalyzer) InitializeRootManifestAndLock(dir string, pr gps.ProjectRoot) (rootM *dep.Manifest, rootL *dep.Lock, err error) {
if !a.skipTools {
rootM, rootL = a.importManifestAndLock(dir, pr, false)
}
if rootM == nil {
rootM = dep.NewManifest()
// Since we didn't find anything to import, dep's cache is empty.
// We are prefetching dependencies and logging so that the subsequent solve step
// doesn't spend a long time retrieving dependencies without feedback for the user.
if err := a.cacheDeps(pr); err != nil {
return nil, nil, err
}
}
if rootL == nil {
rootL = &dep.Lock{}
}
return
}
func (a *rootAnalyzer) cacheDeps(pr gps.ProjectRoot) error {
logger := a.ctx.Err
g, _ := errgroup.WithContext(context.TODO())
concurrency := 4
syncDep := func(pr gps.ProjectRoot, sm gps.SourceManager) error {
if err := sm.SyncSourceFor(gps.ProjectIdentifier{ProjectRoot: pr}); err != nil {
logger.Printf("Unable to cache %s - %s", pr, err)
return err
}
return nil
}
deps := make(chan gps.ProjectRoot)
for i := 0; i < concurrency; i++ {
g.Go(func() error {
for d := range deps {
err := syncDep(gps.ProjectRoot(d), a.sm)
if err != nil {
return err
}
}
return nil
})
}
g.Go(func() error {
defer close(deps)
for pr := range a.directDeps {
logger.Printf("Caching package %q", pr)
deps <- pr
}
return nil
})
if err := g.Wait(); err != nil {
return err
}
logger.Printf("Successfully cached all deps.")
return nil
}
func (a *rootAnalyzer) importManifestAndLock(dir string, pr gps.ProjectRoot, suppressLogs bool) (*dep.Manifest, *dep.Lock) {
logger := a.ctx.Err
if suppressLogs {
logger = log.New(ioutil.Discard, "", 0)
}
for _, i := range importers.BuildAll(logger, a.ctx.Verbose, a.sm) {
if i.HasDepMetadata(dir) {
a.ctx.Err.Printf("Importing configuration from %s. These are only initial constraints, and are further refined during the solve process.", i.Name())
m, l, err := i.Import(dir, pr)
if err != nil {
a.ctx.Err.Printf(
"Warning: Encountered an unrecoverable error while trying to import %s config from %q: %s",
i.Name(), dir, err,
)
break
}
a.removeTransitiveDependencies(m)
return m, l
}
}
var emptyManifest = dep.NewManifest()
return emptyManifest, nil
}
func (a *rootAnalyzer) removeTransitiveDependencies(m *dep.Manifest) {
for pr := range m.Constraints {
if _, isDirect := a.directDeps[pr]; !isDirect {
delete(m.Constraints, pr)
}
}
}
// DeriveManifestAndLock evaluates a dependency for existing dependency manager
// configuration (ours or external) and passes any configuration found back
// to the solver.
func (a *rootAnalyzer) DeriveManifestAndLock(dir string, pr gps.ProjectRoot) (gps.Manifest, gps.Lock, error) {
// Ignore other tools if we find dep configuration
var depAnalyzer dep.Analyzer
if depAnalyzer.HasDepMetadata(dir) {
return depAnalyzer.DeriveManifestAndLock(dir, pr)
}
if !a.skipTools {
// The assignment back to an interface prevents interface-based nil checks from failing later
var manifest gps.Manifest = gps.SimpleManifest{}
var lock gps.Lock
im, il := a.importManifestAndLock(dir, pr, true)
if im != nil {
manifest = im
}
if il != nil {
lock = il
}
return manifest, lock, nil
}
return gps.SimpleManifest{}, nil, nil
}
func (a *rootAnalyzer) FinalizeRootManifestAndLock(m *dep.Manifest, l *dep.Lock, ol dep.Lock) {
// Iterate through the new projects in solved lock and add them to manifest
// if they are direct deps and log feedback for all the new projects.
diff := fb.DiffLocks(&ol, l)
bi := fb.NewBrokenImportFeedback(diff)
bi.LogFeedback(a.ctx.Err)
for _, y := range l.Projects() {
var f *fb.ConstraintFeedback
pr := y.Ident().ProjectRoot
// New constraints: in new lock and dir dep but not in manifest
if _, ok := a.directDeps[pr]; ok {
if _, ok := m.Constraints[pr]; !ok {
pp := getProjectPropertiesFromVersion(y.Version())
if pp.Constraint != nil {
m.Constraints[pr] = pp
pc := gps.ProjectConstraint{Ident: y.Ident(), Constraint: pp.Constraint}
f = fb.NewConstraintFeedback(pc, fb.DepTypeDirect)
f.LogFeedback(a.ctx.Err)
}
f = fb.NewLockedProjectFeedback(y, fb.DepTypeDirect)
f.LogFeedback(a.ctx.Err)
}
} else {
// New locked projects: in new lock but not in old lock
newProject := true
for _, opl := range ol.Projects() {
if pr == opl.Ident().ProjectRoot {
newProject = false
}
}
if newProject {
f = fb.NewLockedProjectFeedback(y, fb.DepTypeTransitive)
f.LogFeedback(a.ctx.Err)
}
}
}
}
// Info provides metadata on the analyzer algorithm used during solve.
func (a *rootAnalyzer) Info() gps.ProjectAnalyzerInfo {
return gps.ProjectAnalyzerInfo{
Name: "dep",
Version: 1,
}
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/ensure.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"context"
"flag"
"fmt"
"go/build"
"log"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/paths"
"github.com/golang/dep/gps/pkgtree"
"github.com/golang/dep/gps/verify"
"github.com/pkg/errors"
)
const ensureShortHelp = `Ensure a dependency is safely vendored in the project`
const ensureLongHelp = `
Project spec:
<import path>[:alt source URL][@<constraint>]
Ensure gets a project into a complete, reproducible, and likely compilable state:
* All imports are fulfilled
* All rules in Gopkg.toml are respected
* Gopkg.lock records immutable versions for all dependencies
* vendor/ is populated according to Gopkg.lock
Ensure has fast techniques to determine that some of these steps may be
unnecessary. If that determination is made, ensure may skip some steps. Flags
may be passed to bypass these checks; -vendor-only will allow an out-of-date
Gopkg.lock to populate vendor/, and -no-vendor will update Gopkg.lock (if
needed), but never touch vendor/.
The effect of passing project spec arguments varies slightly depending on the
combination of flags that are passed.
Examples:
dep ensure Populate vendor from existing Gopkg.toml and Gopkg.lock
dep ensure -add github.com/pkg/foo Introduce a named dependency at its newest version
dep ensure -add github.com/pkg/foo@^1.0.1 Introduce a named dependency with a particular constraint
For more detailed usage examples, see dep ensure -examples.
`
const ensureExamples = `
dep ensure
Solve the project's dependency graph, and place all dependencies in the
vendor folder. If a dependency is in the lock file, use the version
specified there. Otherwise, use the most recent version that can satisfy the
constraints in the manifest file.
dep ensure -vendor-only
Write vendor/ from an existing Gopkg.lock file, without first verifying that
the lock is in sync with imports and Gopkg.toml. (This may be useful for
e.g. strategically layering a Docker images)
dep ensure -add github.com/pkg/foo github.com/pkg/foo/bar
Introduce one or more dependencies, at their newest version, ensuring that
specific packages are present in Gopkg.lock and vendor/. Also, append a
corresponding constraint to Gopkg.toml.
Note: packages introduced in this way will disappear on the next "dep
ensure" if an import statement is not added first.
dep ensure -add github.com/pkg/foo/subpkg@1.0.0 bitbucket.org/pkg/bar/baz@master
Append version constraints to Gopkg.toml for one or more packages, if no
such rules already exist.
If the named packages are not already imported, also ensure they are present
in Gopkg.lock and vendor/. As in the preceding example, packages introduced
in this way will disappear on the next "dep ensure" if an import statement
is not added first.
dep ensure -add github.com/pkg/foo:git.internal.com/alt/foo
Specify an alternate location to treat as the upstream source for a dependency.
dep ensure -update github.com/pkg/foo github.com/pkg/bar
Update a list of dependencies to the latest versions allowed by Gopkg.toml,
ignoring any versions recorded in Gopkg.lock. Write the results to
Gopkg.lock and vendor/.
dep ensure -update
Update all dependencies to the latest versions allowed by Gopkg.toml,
ignoring any versions recorded in Gopkg.lock. Update the lock file with any
changes. (NOTE: Not recommended. Updating one/some dependencies at a time is
preferred.)
dep ensure -update -no-vendor
As above, but only modify Gopkg.lock; leave vendor/ unchanged.
dep ensure -no-vendor -dry-run
This fails with a non zero exit code if Gopkg.lock is not up to date with
the Gopkg.toml or the project imports. It can be useful to run this during
CI to check if Gopkg.lock is up to date.
`
var (
errUpdateArgsValidation = errors.New("update arguments validation failed")
errAddDepsFailed = errors.New("adding dependencies failed")
)
func (cmd *ensureCommand) Name() string { return "ensure" }
func (cmd *ensureCommand) Args() string {
return "[-update | -add] [-no-vendor | -vendor-only] [-dry-run] [-v] [<spec>...]"
}
func (cmd *ensureCommand) ShortHelp() string { return ensureShortHelp }
func (cmd *ensureCommand) LongHelp() string { return ensureLongHelp }
func (cmd *ensureCommand) Hidden() bool { return false }
func (cmd *ensureCommand) Register(fs *flag.FlagSet) {
fs.BoolVar(&cmd.examples, "examples", false, "print detailed usage examples")
fs.BoolVar(&cmd.update, "update", false, "update the named dependencies (or all, if none are named) in Gopkg.lock to the latest allowed by Gopkg.toml")
fs.BoolVar(&cmd.add, "add", false, "add new dependencies, or populate Gopkg.toml with constraints for existing dependencies")
fs.BoolVar(&cmd.vendorOnly, "vendor-only", false, "populate vendor/ from Gopkg.lock without updating it first")
fs.BoolVar(&cmd.noVendor, "no-vendor", false, "update Gopkg.lock (if needed), but do not update vendor/")
fs.BoolVar(&cmd.dryRun, "dry-run", false, "only report the changes that would be made")
}
type ensureCommand struct {
examples bool
update bool
add bool
noVendor bool
vendorOnly bool
dryRun bool
}
func (cmd *ensureCommand) Run(ctx *dep.Ctx, args []string) error {
if cmd.examples {
ctx.Err.Println(strings.TrimSpace(ensureExamples))
return nil
}
if err := cmd.validateFlags(); err != nil {
return err
}
p, err := ctx.LoadProject()
if err != nil {
return err
}
sm, err := ctx.SourceManager()
if err != nil {
return err
}
sm.UseDefaultSignalHandling()
defer sm.Release()
if err := dep.ValidateProjectRoots(ctx, p.Manifest, sm); err != nil {
return err
}
params := p.MakeParams()
if ctx.Verbose {
params.TraceLogger = ctx.Err
}
if cmd.vendorOnly {
return cmd.runVendorOnly(ctx, args, p, sm, params)
}
if fatal, err := checkErrors(params.RootPackageTree.Packages, p.Manifest.IgnoredPackages()); err != nil {
if fatal {
return err
} else if ctx.Verbose {
ctx.Out.Println(err)
}
}
if ineffs := p.FindIneffectualConstraints(sm); len(ineffs) > 0 {
ctx.Err.Printf("Warning: the following project(s) have [[constraint]] stanzas in %s:\n\n", dep.ManifestName)
for _, ineff := range ineffs {
ctx.Err.Println(" ✗ ", ineff)
}
// TODO(sdboyer) lazy wording, it does not mention ignores at all
ctx.Err.Printf("\nHowever, these projects are not direct dependencies of the current project:\n")
ctx.Err.Printf("they are not imported in any .go files, nor are they in the 'required' list in\n")
ctx.Err.Printf("%s. Dep only applies [[constraint]] rules to direct dependencies, so\n", dep.ManifestName)
ctx.Err.Printf("these rules will have no effect.\n\n")
ctx.Err.Printf("Either import/require packages from these projects so that they become direct\n")
ctx.Err.Printf("dependencies, or convert each [[constraint]] to an [[override]] to enforce rules\n")
ctx.Err.Printf("on these projects, if they happen to be transitive dependencies.\n\n")
}
// Kick off vendor verification in the background. All of the remaining
// paths from here will need it, whether or not they end up solving.
go p.VerifyVendor()
if cmd.add {
return cmd.runAdd(ctx, args, p, sm, params)
} else if cmd.update {
return cmd.runUpdate(ctx, args, p, sm, params)
}
return cmd.runDefault(ctx, args, p, sm, params)
}
func (cmd *ensureCommand) validateFlags() error {
if cmd.add && cmd.update {
return errors.New("cannot pass both -add and -update")
}
if cmd.vendorOnly {
if cmd.update {
return errors.New("-vendor-only makes -update a no-op; cannot pass them together")
}
if cmd.add {
return errors.New("-vendor-only makes -add a no-op; cannot pass them together")
}
if cmd.noVendor {
// TODO(sdboyer) can't think of anything not snarky right now
return errors.New("really?")
}
}
return nil
}
func (cmd *ensureCommand) vendorBehavior() dep.VendorBehavior {
if cmd.noVendor {
return dep.VendorNever
}
return dep.VendorOnChanged
}
func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error {
// Bare ensure doesn't take any args.
if len(args) != 0 {
return errors.New("dep ensure only takes spec arguments with -add or -update")
}
if err := ctx.ValidateParams(sm, params); err != nil {
return err
}
var solve bool
lock := p.ChangedLock
if lock != nil {
lsat := verify.LockSatisfiesInputs(p.Lock, p.Manifest, params.RootPackageTree)
if !lsat.Satisfied() {
if ctx.Verbose {
ctx.Out.Printf("# Gopkg.lock is out of sync with Gopkg.toml and project imports:\n%s\n\n", sprintLockUnsat(lsat))
}
solve = true
} else if cmd.noVendor {
// The user said not to touch vendor/, so definitely nothing to do.
return nil
}
} else {
solve = true
}
if solve {
solver, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "prepare solver")
}
solution, err := solver.Solve(context.TODO())
if err != nil {
return handleAllTheFailuresOfTheWorld(err)
}
lock = dep.LockFromSolution(solution, p.Manifest.PruneOptions)
}
dw, err := dep.NewDeltaWriter(p, lock, cmd.vendorBehavior())
if err != nil {
return err
}
if cmd.dryRun {
return dw.PrintPreparedActions(ctx.Out, ctx.Verbose)
}
var logger *log.Logger
if ctx.Verbose {
logger = ctx.Err
}
return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor")
}
func (cmd *ensureCommand) runVendorOnly(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error {
if len(args) != 0 {
return errors.Errorf("dep ensure -vendor-only only populates vendor/ from %s; it takes no spec arguments", dep.LockName)
}
if p.Lock == nil {
return errors.Errorf("no %s exists from which to populate vendor/", dep.LockName)
}
// Pass the same lock as old and new so that the writer will observe no
// difference, and write out only ncessary vendor/ changes.
dw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions, nil)
//dw, err := dep.NewDeltaWriter(p.Lock, p.Lock, p.Manifest.PruneOptions, filepath.Join(p.AbsRoot, "vendor"), dep.VendorAlways)
if err != nil {
return err
}
if cmd.dryRun {
return dw.PrintPreparedActions(ctx.Out, ctx.Verbose)
}
var logger *log.Logger
if ctx.Verbose {
logger = ctx.Err
}
return errors.WithMessage(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor")
}
func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error {
if p.Lock == nil {
return errors.Errorf("-update works by updating the versions recorded in %s, but %s does not exist", dep.LockName, dep.LockName)
}
if err := ctx.ValidateParams(sm, params); err != nil {
return err
}
// When -update is specified without args, allow every dependency to change
// versions, regardless of the lock file.
if len(args) == 0 {
params.ChangeAll = true
}
if err := validateUpdateArgs(ctx, args, p, sm, ¶ms); err != nil {
return err
}
// Re-prepare a solver now that our params are complete.
solver, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "fastpath solver prepare")
}
solution, err := solver.Solve(context.TODO())
if err != nil {
// TODO(sdboyer) special handling for warning cases as described in spec
// - e.g., named projects did not upgrade even though newer versions
// were available.
return handleAllTheFailuresOfTheWorld(err)
}
dw, err := dep.NewDeltaWriter(p, dep.LockFromSolution(solution, p.Manifest.PruneOptions), cmd.vendorBehavior())
if err != nil {
return err
}
if cmd.dryRun {
return dw.PrintPreparedActions(ctx.Out, ctx.Verbose)
}
var logger *log.Logger
if ctx.Verbose {
logger = ctx.Err
}
return errors.Wrap(dw.Write(p.AbsRoot, sm, false, logger), "grouped write of manifest, lock and vendor")
}
func (cmd *ensureCommand) runAdd(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params gps.SolveParameters) error {
if len(args) == 0 {
return errors.New("must specify at least one project or package to -add")
}
if err := ctx.ValidateParams(sm, params); err != nil {
return err
}
// Compile unique sets of 1) all external packages imported or required, and
// 2) the project roots under which they fall.
exmap := make(map[string]bool)
if p.ChangedLock != nil {
for _, imp := range p.ChangedLock.InputImports() {
exmap[imp] = true
}
} else {
// We'll only hit this branch if Gopkg.lock did not exist.
rm, _ := p.RootPackageTree.ToReachMap(true, true, false, p.Manifest.IgnoredPackages())
for _, imp := range rm.FlattenFn(paths.IsStandardImportPath) {
exmap[imp] = true
}
for imp := range p.Manifest.RequiredPackages() {
exmap[imp] = true
}
}
// Note: these flags are only partially used by the latter parts of the
// algorithm; rather, it relies on inference. However, they remain in their
// entirety as future needs may make further use of them, being a handy,
// terse way of expressing the original context of the arg inputs.
type addType uint8
const (
// Straightforward case - this induces a temporary require, and thus
// a warning message about it being ephemeral.
isInManifest addType = 1 << iota
// If solving works, we'll pull this constraint from the in-memory
// manifest (where we recorded it earlier) and then append it to the
// manifest on disk.
isInImportsWithConstraint
// If solving works, we'll extract a constraint from the lock and
// append it into the manifest on disk, similar to init's behavior.
isInImportsNoConstraint
// This gets a message AND a hoist from the solution up into the
// manifest on disk.
isInNeither
)
type addInstruction struct {
id gps.ProjectIdentifier
ephReq map[string]bool
constraint gps.Constraint
typ addType
}
addInstructions := make(map[gps.ProjectRoot]addInstruction)
// A mutex for limited access to addInstructions by goroutines.
var mutex sync.Mutex
// Channel for receiving all the errors.
errCh := make(chan error, len(args))
var wg sync.WaitGroup
ctx.Out.Println("Fetching sources...")
for i, arg := range args {
wg.Add(1)
if ctx.Verbose {
ctx.Err.Printf("(%d/%d) %s\n", i+1, len(args), arg)
}
go func(arg string) {
defer wg.Done()
pc, path, err := getProjectConstraint(arg, sm)
if err != nil {
// TODO(sdboyer) ensure these errors are contextualized in a sensible way for -add
errCh <- err
return
}
// check if the the parsed path is the current root path
if strings.EqualFold(string(p.ImportRoot), string(pc.Ident.ProjectRoot)) {
errCh <- errors.New("cannot add current project to itself")
return
}
inManifest := p.Manifest.HasConstraintsOn(pc.Ident.ProjectRoot)
inImports := exmap[string(pc.Ident.ProjectRoot)]
if inManifest && inImports {
errCh <- errors.Errorf("nothing to -add, %s is already in %s and the project's direct imports or required list", pc.Ident.ProjectRoot, dep.ManifestName)
return
}
err = sm.SyncSourceFor(pc.Ident)
if err != nil {
errCh <- errors.Wrapf(err, "failed to fetch source for %s", pc.Ident.ProjectRoot)
return
}
someConstraint := !gps.IsAny(pc.Constraint) || pc.Ident.Source != ""
// Obtain a lock for addInstructions
mutex.Lock()
defer mutex.Unlock()
instr, has := addInstructions[pc.Ident.ProjectRoot]
if has {
// Multiple packages from the same project were specified as
// arguments; make sure they agree on declared constraints.
// TODO(sdboyer) until we have a general method for checking constraint equality, only allow one to declare
if someConstraint {
if !gps.IsAny(instr.constraint) || instr.id.Source != "" {
errCh <- errors.Errorf("can only specify rules once per project being added; rules were given at least twice for %s", pc.Ident.ProjectRoot)
return
}
instr.constraint = pc.Constraint
instr.id = pc.Ident
}
} else {
instr.ephReq = make(map[string]bool)
instr.constraint = pc.Constraint
instr.id = pc.Ident
}
if inManifest {
if someConstraint {
errCh <- errors.Errorf("%s already contains rules for %s, cannot specify a version constraint or alternate source", dep.ManifestName, path)
return
}
instr.ephReq[path] = true
instr.typ |= isInManifest
} else if inImports {
if !someConstraint {
if exmap[path] {
errCh <- errors.Errorf("%s is already imported or required, so -add is only valid with a constraint", path)
return
}
// No constraints, but the package isn't imported; require it.
// TODO(sdboyer) this case seems like it's getting overly specific and risks muddying the water more than it helps
instr.ephReq[path] = true
instr.typ |= isInImportsNoConstraint
} else {
// Don't require on this branch if the path was a ProjectRoot;
// most common here will be the user adding constraints to
// something they already imported, and if they specify the
// root, there's a good chance they don't actually want to
// require the project's root package, but are just trying to
// indicate which project should receive the constraints.
if !exmap[path] && string(pc.Ident.ProjectRoot) != path {
instr.ephReq[path] = true
}
instr.typ |= isInImportsWithConstraint
}
} else {
instr.typ |= isInNeither
instr.ephReq[path] = true
}
addInstructions[pc.Ident.ProjectRoot] = instr
}(arg)
}
wg.Wait()
close(errCh)
// Newline after printing the fetching source output.
ctx.Err.Println()
// Log all the errors.
if len(errCh) > 0 {
ctx.Err.Printf("Failed to add the dependencies:\n\n")
for err := range errCh {
ctx.Err.Println(" ✗", err.Error())
}
ctx.Err.Println()
return errAddDepsFailed
}
// We're now sure all of our add instructions are individually and mutually
// valid, so it's safe to begin modifying the input parameters.
for pr, instr := range addInstructions {
// The arg processing logic above only adds to the ephReq list if
// that package definitely needs to be on that list, so we don't
// need to check instr.typ here - if it's in instr.ephReq, it
// definitely needs to be added to the manifest's required list.
for path := range instr.ephReq {
p.Manifest.Required = append(p.Manifest.Required, path)
}
// Only two branches can possibly be adding rules, though the
// isInNeither case may or may not have an empty constraint.
if instr.typ&(isInNeither|isInImportsWithConstraint) != 0 {
p.Manifest.Constraints[pr] = gps.ProjectProperties{
Source: instr.id.Source,
Constraint: instr.constraint,
}
}
}
// Re-prepare a solver now that our params are complete.
solver, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "fastpath solver prepare")
}
solution, err := solver.Solve(context.TODO())
if err != nil {
// TODO(sdboyer) detect if the failure was specifically about some of the -add arguments
return handleAllTheFailuresOfTheWorld(err)
}
// Prep post-actions and feedback from adds.
var reqlist []string
appender := dep.NewManifest()
for pr, instr := range addInstructions {
for path := range instr.ephReq {
reqlist = append(reqlist, path)
}
if instr.typ&isInManifest == 0 {
var pp gps.ProjectProperties
var found bool
for _, proj := range solution.Projects() {
// We compare just ProjectRoot instead of the whole
// ProjectIdentifier here because an empty source on the input side
// could have been converted into a source by the solver.
if proj.Ident().ProjectRoot == pr {
found = true
pp = getProjectPropertiesFromVersion(proj.Version())
break
}
}
if !found {
panic(fmt.Sprintf("unreachable: solution did not contain -add argument %s, but solver did not fail", pr))
}
pp.Source = instr.id.Source
if !gps.IsAny(instr.constraint) {
pp.Constraint = instr.constraint
}
appender.Constraints[pr] = pp
}
}
extra, err := appender.MarshalTOML()
if err != nil {
return errors.Wrap(err, "could not marshal manifest into TOML")
}
sort.Strings(reqlist)
dw, err := dep.NewDeltaWriter(p, dep.LockFromSolution(solution, p.Manifest.PruneOptions), cmd.vendorBehavior())
if err != nil {
return err
}
if cmd.dryRun {
return dw.PrintPreparedActions(ctx.Out, ctx.Verbose)
}
var logger *log.Logger
if ctx.Verbose {
logger = ctx.Err
}
if err := errors.Wrap(dw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor"); err != nil {
return err
}
// FIXME(sdboyer) manifest writes ABSOLUTELY need verification - follow up!
f, err := os.OpenFile(filepath.Join(p.AbsRoot, dep.ManifestName), os.O_APPEND|os.O_WRONLY, 0666)
if err != nil {
return errors.Wrapf(err, "opening %s failed", dep.ManifestName)
}
if _, err := f.Write(extra); err != nil {
f.Close()
return errors.Wrapf(err, "writing to %s failed", dep.ManifestName)
}
switch len(reqlist) {
case 0:
// nothing to tell the user
case 1:
if cmd.noVendor {
ctx.Out.Printf("%q is not imported by your project, and has been temporarily added to %s.\n", reqlist[0], dep.LockName)
ctx.Out.Printf("If you run \"dep ensure\" again before actually importing it, it will disappear from %s. Running \"dep ensure -vendor-only\" is safe, and will guarantee it is present in vendor/.", dep.LockName)
} else {
ctx.Out.Printf("%q is not imported by your project, and has been temporarily added to %s and vendor/.\n", reqlist[0], dep.LockName)
ctx.Out.Printf("If you run \"dep ensure\" again before actually importing it, it will disappear from %s and vendor/.", dep.LockName)
}
default:
if cmd.noVendor {
ctx.Out.Printf("The following packages are not imported by your project, and have been temporarily added to %s:\n", dep.LockName)
ctx.Out.Printf("\t%s\n", strings.Join(reqlist, "\n\t"))
ctx.Out.Printf("If you run \"dep ensure\" again before actually importing them, they will disappear from %s. Running \"dep ensure -vendor-only\" is safe, and will guarantee they are present in vendor/.", dep.LockName)
} else {
ctx.Out.Printf("The following packages are not imported by your project, and have been temporarily added to %s and vendor/:\n", dep.LockName)
ctx.Out.Printf("\t%s\n", strings.Join(reqlist, "\n\t"))
ctx.Out.Printf("If you run \"dep ensure\" again before actually importing them, they will disappear from %s and vendor/.", dep.LockName)
}
}
return errors.Wrapf(f.Close(), "closing %s", dep.ManifestName)
}
func getProjectConstraint(arg string, sm gps.SourceManager) (gps.ProjectConstraint, string, error) {
emptyPC := gps.ProjectConstraint{
Constraint: gps.Any(), // default to any; avoids panics later
}
// try to split on '@'
// When there is no `@`, use any version
var versionStr string
atIndex := strings.Index(arg, "@")
if atIndex > 0 {
parts := strings.SplitN(arg, "@", 2)
arg = parts[0]
versionStr = parts[1]
}
// TODO: if we decide to keep equals.....
// split on colon if there is a network location
var source string
colonIndex := strings.Index(arg, ":")
if colonIndex > 0 {
parts := strings.SplitN(arg, ":", 2)
arg = parts[0]
source = parts[1]
}
pr, err := sm.DeduceProjectRoot(arg)
if err != nil {
return emptyPC, "", errors.Wrapf(err, "could not infer project root from dependency path: %s", arg) // this should go through to the user
}
pi := gps.ProjectIdentifier{ProjectRoot: pr, Source: source}
c, err := sm.InferConstraint(versionStr, pi)
if err != nil {
return emptyPC, "", err
}
return gps.ProjectConstraint{Ident: pi, Constraint: c}, arg, nil
}
func checkErrors(m map[string]pkgtree.PackageOrErr, ignore *pkgtree.IgnoredRuleset) (fatal bool, err error) {
var (
noGoErrors int
pkgtreeErrors = make(pkgtreeErrs, 0, len(m))
)
for ip, poe := range m {
if ignore.IsIgnored(ip) {
continue
}
if poe.Err != nil {
switch poe.Err.(type) {
case *build.NoGoError:
noGoErrors++
default:
pkgtreeErrors = append(pkgtreeErrors, poe.Err)
}
}
}
// If pkgtree was empty or all dirs lacked any Go code, return an error.
if len(m) == 0 || len(m) == noGoErrors {
return true, errors.New("no dirs contained any Go code")
}
// If all dirs contained build errors, return an error.
if len(m) == len(pkgtreeErrors) {
return true, errors.New("all dirs contained build errors")
}
// If all directories either had no Go files or caused a build error, return an error.
if len(m) == len(pkgtreeErrors)+noGoErrors {
return true, pkgtreeErrors
}
// If m contained some errors, return a warning with those errors.
if len(pkgtreeErrors) > 0 {
return false, pkgtreeErrors
}
return false, nil
}
type pkgtreeErrs []error
func (e pkgtreeErrs) Error() string {
errs := make([]string, 0, len(e))
for _, err := range e {
errs = append(errs, err.Error())
}
return fmt.Sprintf("found %d errors in the package tree:\n%s", len(e), strings.Join(errs, "\n"))
}
func validateUpdateArgs(ctx *dep.Ctx, args []string, p *dep.Project, sm gps.SourceManager, params *gps.SolveParameters) error {
// Channel for receiving all the valid arguments.
argsCh := make(chan string, len(args))
// Channel for receiving all the validation errors.
errCh := make(chan error, len(args))
var wg sync.WaitGroup
// Allow any of specified project versions to change, regardless of the lock
// file.
for _, arg := range args {
wg.Add(1)
go func(arg string) {
defer wg.Done()
// Ensure the provided path has a deducible project root.
pc, path, err := getProjectConstraint(arg, sm)
if err != nil {
// TODO(sdboyer) ensure these errors are contextualized in a sensible way for -update
errCh <- err
return
}
if path != string(pc.Ident.ProjectRoot) {
// TODO(sdboyer): does this really merit an abortive error?
errCh <- errors.Errorf("%s is not a project root, try %s instead", path, pc.Ident.ProjectRoot)
return
}
if !p.Lock.HasProjectWithRoot(pc.Ident.ProjectRoot) {
errCh <- errors.Errorf("%s is not present in %s, cannot -update it", pc.Ident.ProjectRoot, dep.LockName)
return
}
if pc.Ident.Source != "" {
errCh <- errors.Errorf("cannot specify alternate sources on -update (%s)", pc.Ident.Source)
return
}
if !gps.IsAny(pc.Constraint) {
// TODO(sdboyer) constraints should be allowed to allow solves that
// target particular versions while remaining within declared constraints.
errCh <- errors.Errorf("version constraint %s passed for %s, but -update follows constraints declared in %s, not CLI arguments", pc.Constraint, pc.Ident.ProjectRoot, dep.ManifestName)
return
}
// Valid argument.
argsCh <- arg
}(arg)
}
wg.Wait()
close(errCh)
close(argsCh)
// Log all the errors.
if len(errCh) > 0 {
ctx.Err.Printf("Invalid arguments passed to ensure -update:\n\n")
for err := range errCh {
ctx.Err.Println(" ✗", err.Error())
}
ctx.Err.Println()
return errUpdateArgsValidation
}
// Add all the valid arguments to solve params.
for arg := range argsCh {
params.ToChange = append(params.ToChange, gps.ProjectRoot(arg))
}
return nil
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/feature_flags.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"strconv"
)
const (
flagImportDuringSolveKey = "ImportDuringSolve"
)
var (
flagImportDuringSolve = "false"
)
var featureFlags = map[string]bool{
flagImportDuringSolveKey: parseFeatureFlag(flagImportDuringSolve),
}
func parseFeatureFlag(flag string) bool {
flagValue, _ := strconv.ParseBool(flag)
return flagValue
}
func readFeatureFlag(flag string) (bool, error) {
if flagValue, ok := featureFlags[flag]; ok {
return flagValue, nil
}
return false, fmt.Errorf("undefined feature flag: %s", flag)
}
func importDuringSolve() bool {
return featureFlags[flagImportDuringSolveKey]
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/graphviz.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"hash/fnv"
"sort"
"strings"
)
type graphviz struct {
ps []*gvnode
b bytes.Buffer
h map[string]uint32
// clusters is a map of project name and subgraph object. This can be used
// to refer the subgraph by project name.
clusters map[string]*gvsubgraph
}
type gvnode struct {
project string
version string
children []string
}
// Sort gvnode(s).
type byGvnode []gvnode
func (n byGvnode) Len() int { return len(n) }
func (n byGvnode) Swap(i, j int) { n[i], n[j] = n[j], n[i] }
func (n byGvnode) Less(i, j int) bool { return n[i].project < n[j].project }
func (g graphviz) New() *graphviz {
ga := &graphviz{
ps: []*gvnode{},
h: make(map[string]uint32),
clusters: make(map[string]*gvsubgraph),
}
return ga
}
func (g *graphviz) output(project string) bytes.Buffer {
if project == "" {
// Project relations graph.
g.b.WriteString("digraph {\n\tnode [shape=box];")
for _, gvp := range g.ps {
// Create node string
g.b.WriteString(fmt.Sprintf("\n\t%d [label=\"%s\"];", gvp.hash(), gvp.label()))
}
g.createProjectRelations()
} else {
// Project-Package relations graph.
g.b.WriteString("digraph {\n\tnode [shape=box];\n\tcompound=true;\n\tedge [minlen=2];")
// Declare all the nodes with labels.
for _, gvp := range g.ps {
g.b.WriteString(fmt.Sprintf("\n\t%d [label=\"%s\"];", gvp.hash(), gvp.label()))
}
// Sort the clusters for a consistent output.
clusters := sortClusters(g.clusters)
// Declare all the subgraphs with labels.
for _, gsg := range clusters {
g.b.WriteString(fmt.Sprintf("\n\tsubgraph cluster_%d {", gsg.index))
g.b.WriteString(fmt.Sprintf("\n\t\tlabel = \"%s\";", gsg.project))
nhashes := []string{}
for _, pkg := range gsg.packages {
nhashes = append(nhashes, fmt.Sprint(g.h[pkg]))
}
g.b.WriteString(fmt.Sprintf("\n\t\t%s;", strings.Join(nhashes, " ")))
g.b.WriteString("\n\t}")
}
g.createProjectPackageRelations(project, clusters)
}
g.b.WriteString("\n}\n")
return g.b
}
func (g *graphviz) createProjectRelations() {
// Store relations to avoid duplication
rels := make(map[string]bool)
// Create relations
for _, dp := range g.ps {
for _, bsc := range dp.children {
for pr, hsh := range g.h {
if isPathPrefix(bsc, pr) {
r := fmt.Sprintf("\n\t%d -> %d", g.h[dp.project], hsh)
if _, ex := rels[r]; !ex {
g.b.WriteString(r + ";")
rels[r] = true
}
}
}
}
}
}
func (g *graphviz) createProjectPackageRelations(project string, clusters []*gvsubgraph) {
// This function takes a child package/project, target project, subgraph meta, from
// and to of the edge and write a relation.
linkRelation := func(child, project string, meta []string, from, to uint32) {
if child == project {
// Check if it's a cluster.
target, ok := g.clusters[project]
if ok {
// It's a cluster. Point to the Project Root. Use lhead.
meta = append(meta, fmt.Sprintf("lhead=cluster_%d", target.index))
// When the head points to a cluster root, use the first
// node in the cluster as to.
to = g.h[target.packages[0]]
}
}
if len(meta) > 0 {
g.b.WriteString(fmt.Sprintf("\n\t%d -> %d [%s];", from, to, strings.Join(meta, " ")))
} else {
g.b.WriteString(fmt.Sprintf("\n\t%d -> %d;", from, to))
}
}
// Create relations from nodes.
for _, node := range g.ps {
for _, child := range node.children {
// Only if it points to the target project, proceed further.
if isPathPrefix(child, project) {
meta := []string{}
from := g.h[node.project]
to := g.h[child]
linkRelation(child, project, meta, from, to)
}
}
}
// Create relations from clusters.
for _, cluster := range clusters {
for _, child := range cluster.children {
// Only if it points to the target project, proceed further.
if isPathPrefix(child, project) {
meta := []string{fmt.Sprintf("ltail=cluster_%d", cluster.index)}
// When the tail is from a cluster, use the first node in the
// cluster as from.
from := g.h[cluster.packages[0]]
to := g.h[child]
linkRelation(child, project, meta, from, to)
}
}
}
}
func (g *graphviz) createNode(project, version string, children []string) {
pr := &gvnode{
project: project,
version: version,
children: children,
}
g.h[pr.project] = pr.hash()
g.ps = append(g.ps, pr)
}
func (dp gvnode) hash() uint32 {
h := fnv.New32a()
h.Write([]byte(dp.project))
return h.Sum32()
}
func (dp gvnode) label() string {
label := []string{dp.project}
if dp.version != "" {
label = append(label, dp.version)
}
return strings.Join(label, "\\n")
}
// isPathPrefix ensures that the literal string prefix is a path tree match and
// guards against possibilities like this:
//
// github.com/sdboyer/foo
// github.com/sdboyer/foobar/baz
//
// Verify that prefix is path match and either the input is the same length as
// the match (in which case we know they're equal), or that the next character
// is a "/". (Import paths are defined to always use "/", not the OS-specific
// path separator.)
func isPathPrefix(path, pre string) bool {
pathlen, prflen := len(path), len(pre)
if pathlen < prflen || path[0:prflen] != pre {
return false
}
return prflen == pathlen || strings.Index(path[prflen:], "/") == 0
}
// gvsubgraph is a graphviz subgraph with at least one node(package) in it.
type gvsubgraph struct {
project string // Project root name of a project.
packages []string // List of subpackages in the project.
index int // Index of the subgraph cluster. This is used to refer the subgraph in the dot file.
children []string // Dependencies of the project root package.
}
func (sg gvsubgraph) hash() uint32 {
h := fnv.New32a()
h.Write([]byte(sg.project))
return h.Sum32()
}
// createSubgraph creates a graphviz subgraph with nodes in it. This should only
// be created when a project has more than one package. A single package project
// should be just a single node.
// First nodes are created using the provided packages and their imports. Then
// a subgraph is created with all the nodes in it.
func (g *graphviz) createSubgraph(project string, packages map[string][]string) {
// If there's only a single package and that's the project root, do not
// create a subgraph. Just create a node.
if children, ok := packages[project]; ok && len(packages) == 1 {
g.createNode(project, "", children)
return
}
// Sort and use the packages for consistent output.
pkgs := []gvnode{}
for name, children := range packages {
pkgs = append(pkgs, gvnode{project: name, children: children})
}
sort.Sort(byGvnode(pkgs))
subgraphPkgs := []string{}
rootChildren := []string{}
for _, p := range pkgs {
if p.project == project {
// Do not create a separate node for the root package.
rootChildren = append(rootChildren, p.children...)
continue
}
g.createNode(p.project, "", p.children)
subgraphPkgs = append(subgraphPkgs, p.project)
}
sg := &gvsubgraph{
project: project,
packages: subgraphPkgs,
index: len(g.clusters),
children: rootChildren,
}
g.h[project] = sg.hash()
g.clusters[project] = sg
}
// sortCluster takes a map of all the clusters and returns a list of cluster
// names sorted by the cluster index.
func sortClusters(clusters map[string]*gvsubgraph) []*gvsubgraph {
result := []*gvsubgraph{}
for _, cluster := range clusters {
result = append(result, cluster)
}
sort.Slice(result, func(i, j int) bool {
return result[i].index < result[j].index
})
return result
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/version.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"flag"
"runtime"
"github.com/golang/dep"
)
var (
version = "devel"
buildDate string
commitHash string
)
const versionHelp = `Show the dep version information`
func (cmd *versionCommand) Name() string { return "version" }
func (cmd *versionCommand) Args() string {
return ""
}
func (cmd *versionCommand) ShortHelp() string { return versionHelp }
func (cmd *versionCommand) LongHelp() string { return versionHelp }
func (cmd *versionCommand) Hidden() bool { return false }
func (cmd *versionCommand) Register(fs *flag.FlagSet) {}
type versionCommand struct{}
func (cmd *versionCommand) Run(ctx *dep.Ctx, args []string) error {
ctx.Out.Printf(`dep:
version : %s
build date : %s
git hash : %s
go version : %s
go compiler : %s
platform : %s/%s
features : ImportDuringSolve=%v
`, version, buildDate, commitHash,
runtime.Version(), runtime.Compiler, runtime.GOOS, runtime.GOARCH,
importDuringSolve())
return nil
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/gopath_scanner.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/paths"
"github.com/golang/dep/gps/pkgtree"
fb "github.com/golang/dep/internal/feedback"
"github.com/golang/dep/internal/fs"
"github.com/pkg/errors"
)
// gopathScanner supplies manifest/lock data by scanning the contents of GOPATH
// It uses its results to fill-in any missing details left by the rootAnalyzer.
type gopathScanner struct {
ctx *dep.Ctx
directDeps map[gps.ProjectRoot]bool
sm gps.SourceManager
pd projectData
origM *dep.Manifest
origL *dep.Lock
}
func newGopathScanner(ctx *dep.Ctx, directDeps map[gps.ProjectRoot]bool, sm gps.SourceManager) *gopathScanner {
return &gopathScanner{
ctx: ctx,
directDeps: directDeps,
sm: sm,
}
}
// InitializeRootManifestAndLock performs analysis of the filesystem tree rooted
// at path, with the root import path importRoot, to determine the project's
// constraints. Respect any initial constraints defined in the root manifest and
// lock.
func (g *gopathScanner) InitializeRootManifestAndLock(rootM *dep.Manifest, rootL *dep.Lock) error {
var err error
g.ctx.Err.Println("Searching GOPATH for projects...")
g.pd, err = g.scanGopathForDependencies()
if err != nil {
return err
}
g.origM = dep.NewManifest()
g.origM.Constraints = g.pd.constraints
g.origL = &dep.Lock{
P: make([]gps.LockedProject, 0, len(g.pd.ondisk)),
}
for pr, v := range g.pd.ondisk {
// That we have to chop off these path prefixes is a symptom of
// a problem in gps itself
pkgs := make([]string, 0, len(g.pd.dependencies[pr]))
prslash := string(pr) + "/"
for _, pkg := range g.pd.dependencies[pr] {
if pkg == string(pr) {
pkgs = append(pkgs, ".")
} else {
pkgs = append(pkgs, trimPathPrefix(pkg, prslash))
}
}
g.origL.P = append(g.origL.P, gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: pr}, v, pkgs),
)
}
g.overlay(rootM, rootL)
return nil
}
// Fill in gaps in the root manifest/lock with data found from the GOPATH.
func (g *gopathScanner) overlay(rootM *dep.Manifest, rootL *dep.Lock) {
for pkg, prj := range g.origM.Constraints {
if _, has := rootM.Constraints[pkg]; has {
continue
}
rootM.Constraints[pkg] = prj
v := g.pd.ondisk[pkg]
pi := gps.ProjectIdentifier{ProjectRoot: pkg, Source: prj.Source}
f := fb.NewConstraintFeedback(gps.ProjectConstraint{Ident: pi, Constraint: v}, fb.DepTypeDirect)
f.LogFeedback(g.ctx.Err)
f = fb.NewLockedProjectFeedback(gps.NewLockedProject(pi, v, nil), fb.DepTypeDirect)
f.LogFeedback(g.ctx.Err)
}
// Keep track of which projects have been locked
lockedProjects := map[gps.ProjectRoot]bool{}
for _, lp := range rootL.P {
lockedProjects[lp.Ident().ProjectRoot] = true
}
for _, lp := range g.origL.P {
pkg := lp.Ident().ProjectRoot
if _, isLocked := lockedProjects[pkg]; isLocked {
continue
}
rootL.P = append(rootL.P, lp)
lockedProjects[pkg] = true
if _, isDirect := g.directDeps[pkg]; !isDirect {
f := fb.NewLockedProjectFeedback(lp, fb.DepTypeTransitive)
f.LogFeedback(g.ctx.Err)
}
}
// Identify projects whose version is unknown and will have to be solved for
var missing []string // all project roots missing from GOPATH
var missingVCS []string // all project roots missing VCS information
for pr := range g.pd.notondisk {
if _, isLocked := lockedProjects[pr]; isLocked {
continue
}
if g.pd.invalidSVC[pr] {
missingVCS = append(missingVCS, string(pr))
} else {
missing = append(missing, string(pr))
}
}
missingStr := ""
missingVCSStr := ""
if len(missing) > 0 {
missingStr = fmt.Sprintf("The following dependencies were not found in GOPATH:\n %s\n\n",
strings.Join(missing, "\n "))
}
if len(missingVCS) > 0 {
missingVCSStr = fmt.Sprintf("The following dependencies found in GOPATH were missing VCS information (a remote source is required):\n %s\n\n",
strings.Join(missingVCS, "\n "))
}
if len(missingVCS)+len(missing) > 0 {
g.ctx.Err.Printf("\n%s%sThe most recent version of these projects will be used.\n\n", missingStr, missingVCSStr)
}
}
func trimPathPrefix(p1, p2 string) string {
if isPrefix, _ := fs.HasFilepathPrefix(p1, p2); isPrefix {
return p1[len(p2):]
}
return p1
}
// contains checks if a array of strings contains a value
func contains(a []string, b string) bool {
for _, v := range a {
if b == v {
return true
}
}
return false
}
// getProjectPropertiesFromVersion takes a Version and returns a proper
// ProjectProperties with Constraint value based on the provided version.
func getProjectPropertiesFromVersion(v gps.Version) gps.ProjectProperties {
pp := gps.ProjectProperties{}
// extract version and ignore if it's revision only
switch tv := v.(type) {
case gps.PairedVersion:
v = tv.Unpair()
case gps.Revision:
return pp
}
switch v.Type() {
case gps.IsBranch, gps.IsVersion:
pp.Constraint = v
case gps.IsSemver:
c, err := gps.NewSemverConstraintIC(v.String())
if err != nil {
panic(err)
}
pp.Constraint = c
}
return pp
}
type projectData struct {
constraints gps.ProjectConstraints // constraints that could be found
dependencies map[gps.ProjectRoot][]string // all dependencies (imports) found by project root
notondisk map[gps.ProjectRoot]bool // projects that were not found on disk
invalidSVC map[gps.ProjectRoot]bool // projects that were found on disk but SVC data could not be read
ondisk map[gps.ProjectRoot]gps.Version // projects that were found on disk
}
func (g *gopathScanner) scanGopathForDependencies() (projectData, error) {
constraints := make(gps.ProjectConstraints)
dependencies := make(map[gps.ProjectRoot][]string)
packages := make(map[string]bool)
notondisk := make(map[gps.ProjectRoot]bool)
invalidSVC := make(map[gps.ProjectRoot]bool)
ondisk := make(map[gps.ProjectRoot]gps.Version)
var syncDepGroup sync.WaitGroup
syncDep := func(pr gps.ProjectRoot, sm gps.SourceManager) {
if err := sm.SyncSourceFor(gps.ProjectIdentifier{ProjectRoot: pr}); err != nil {
g.ctx.Err.Printf("%+v", errors.Wrapf(err, "Unable to cache %s", pr))
}
syncDepGroup.Done()
}
if len(g.directDeps) == 0 {
return projectData{}, nil
}
for ippr := range g.directDeps {
// TODO(sdboyer) these are not import paths by this point, they've
// already been worked down to project roots.
ip := string(ippr)
pr, err := g.sm.DeduceProjectRoot(ip)
if err != nil {
return projectData{}, errors.Wrap(err, "sm.DeduceProjectRoot")
}
packages[ip] = true
if _, has := dependencies[pr]; has {
dependencies[pr] = append(dependencies[pr], ip)
continue
}
syncDepGroup.Add(1)
go syncDep(pr, g.sm)
dependencies[pr] = []string{ip}
abs, err := g.ctx.AbsForImport(string(pr))
if err != nil {
notondisk[pr] = true
continue
}
v, err := gps.VCSVersion(abs)
if err != nil {
invalidSVC[pr] = true
notondisk[pr] = true
continue
}
ondisk[pr] = v
pp := getProjectPropertiesFromVersion(v)
if pp.Constraint != nil || pp.Source != "" {
constraints[pr] = pp
}
}
// Explore the packages we've found for transitive deps, either
// completing the lock or identifying (more) missing projects that we'll
// need to ask gps to solve for us.
colors := make(map[string]uint8)
const (
white uint8 = iota
grey
black
)
// cache of PackageTrees, so we don't parse projects more than once
ptrees := make(map[gps.ProjectRoot]pkgtree.PackageTree)
// depth-first traverser
var dft func(string) error
dft = func(pkg string) error {
switch colors[pkg] {
case white:
colors[pkg] = grey
pr, err := g.sm.DeduceProjectRoot(pkg)
if err != nil {
return errors.Wrap(err, "could not deduce project root for "+pkg)
}
// We already visited this project root earlier via some other
// pkg within it, and made the decision that it's not on disk.
// Respect that decision, and pop the stack.
if notondisk[pr] {
colors[pkg] = black
return nil
}
ptree, has := ptrees[pr]
if !has {
// It's fine if the root does not exist - it indicates that this
// project is not present in the workspace, and so we need to
// solve to deal with this dep.
r := filepath.Join(g.ctx.GOPATH, "src", string(pr))
fi, err := os.Stat(r)
if os.IsNotExist(err) || !fi.IsDir() {
colors[pkg] = black
notondisk[pr] = true
return nil
}
// We know the project is on disk; the question is whether we're
// first seeing it here, in the transitive exploration, or if it
// was found in the initial pass on direct imports. We know it's
// the former if there's no entry for it in the ondisk map.
if _, in := ondisk[pr]; !in {
abs, err := g.ctx.AbsForImport(string(pr))
if err != nil {
colors[pkg] = black
notondisk[pr] = true
return nil
}
v, err := gps.VCSVersion(abs)
if err != nil {
// Even if we know it's on disk, errors are still
// possible when trying to deduce version. If we
// encounter such an error, just treat the project as
// not being on disk; the solver will work it out.
colors[pkg] = black
notondisk[pr] = true
return nil
}
ondisk[pr] = v
}
ptree, err = pkgtree.ListPackages(r, string(pr))
if err != nil {
// Any error here other than an a nonexistent dir (which
// can't happen because we covered that case above) is
// probably critical, so bail out.
return errors.Wrap(err, "gps.ListPackages")
}
ptrees[pr] = ptree
}
// Get a reachmap that includes main pkgs (even though importing
// them is an error, what we're checking right now is simply whether
// there's a package with go code present on disk), and does not
// backpropagate errors (again, because our only concern right now
// is package existence).
rm, errmap := ptree.ToReachMap(true, false, false, nil)
reached, ok := rm[pkg]
if !ok {
colors[pkg] = black
// not on disk...
notondisk[pr] = true
return nil
}
if _, ok := errmap[pkg]; ok {
// The package is on disk, but contains some errors.
colors[pkg] = black
return nil
}
if deps, has := dependencies[pr]; has {
if !contains(deps, pkg) {
dependencies[pr] = append(deps, pkg)
}
} else {
dependencies[pr] = []string{pkg}
syncDepGroup.Add(1)
go syncDep(pr, g.sm)
}
// recurse
for _, rpkg := range reached.External {
if paths.IsStandardImportPath(rpkg) {
continue
}
err := dft(rpkg)
if err != nil {
// Bubble up any errors we encounter
return err
}
}
colors[pkg] = black
case grey:
return errors.Errorf("Import cycle detected on %s", pkg)
}
return nil
}
// run the depth-first traversal from the set of immediate external
// package imports we found in the current project
for pkg := range packages {
err := dft(pkg)
if err != nil {
return projectData{}, err // already errors.Wrap()'d internally
}
}
syncDepGroup.Wait()
pd := projectData{
constraints: constraints,
dependencies: dependencies,
invalidSVC: invalidSVC,
notondisk: notondisk,
ondisk: ondisk,
}
return pd, nil
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/doc.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// DO NOT EDIT THIS FILE. GENERATED BY mkdoc.sh.
// Edit the documentation in other files and rerun mkdoc.sh to generate this one.
// Dep is a tool for managing dependencies for Go projects
//
// Usage: "dep [command]"
//
// Commands:
//
// init Initialize a new project with manifest and lock files
// status Report the status of the project's dependencies
// ensure Ensure a dependency is safely vendored in the project
// prune Prune the vendor tree of unused packages
// version Show the dep version information
//
// Examples:
// dep init set up a new project
// dep ensure install the project's dependencies
// dep ensure -update update the locked versions of all dependencies
// dep ensure -add github.com/pkg/errors add a dependency to the project
//
// Use "dep help [command]" for more information about a command.
//
// Initialize a new project with manifest and lock files
//
// Usage:
//
// init [root]
//
// Initialize the project at filepath root by parsing its dependencies, writing
// manifest and lock files, and vendoring the dependencies. If root isn't
// specified, use the current directory.
//
// When configuration for another dependency management tool is detected, it is
// imported into the initial manifest and lock. Use the -skip-tools flag to
// disable this behavior. The following external tools are supported:
// glide, godep, vndr, govend, gb, gvt, glock.
//
// Any dependencies that are not constrained by external configuration use the
// GOPATH analysis below.
//
// By default, the dependencies are resolved over the network. A version will be
// selected from the versions available from the upstream source per the following
// algorithm:
//
// - Tags conforming to semver (sorted by semver rules)
// - Default branch(es) (sorted lexicographically)
// - Non-semver tags (sorted lexicographically)
//
// An alternate mode can be activated by passing -gopath. In this mode, the version
// of each dependency will reflect the current state of the GOPATH. If a dependency
// doesn't exist in the GOPATH, a version will be selected based on the above
// network version selection algorithm.
//
// A Gopkg.toml file will be written with inferred version constraints for all
// direct dependencies. Gopkg.lock will be written with precise versions, and
// vendor/ will be populated with the precise versions written to Gopkg.lock.
//
//
// Report the status of the project's dependencies
//
// Usage:
//
// status [package...]
//
// With no arguments, print the status of each dependency of the project.
//
// PROJECT Import path
// CONSTRAINT Version constraint, from the manifest
// VERSION Version chosen, from the lock
// REVISION VCS revision of the chosen version
// LATEST Latest VCS revision available
// PKGS USED Number of packages from this project that are actually used
//
// With one or more explicitly specified packages, or with the -detailed flag,
// print an extended status output for each dependency of the project.
//
// TODO Another column description
// FOOBAR Another column description
//
// Status returns exit code zero if all dependencies are in a "good state".
//
//
// Ensure a dependency is safely vendored in the project
//
// Usage:
//
// ensure [-update | -add] [-no-vendor | -vendor-only] [-dry-run] [<spec>...]
//
// Project spec:
//
// <import path>[:alt source URL][@<constraint>]
//
//
// Ensure gets a project into a complete, reproducible, and likely compilable state:
//
// * All non-stdlib imports are fulfilled
// * All rules in Gopkg.toml are respected
// * Gopkg.lock records precise versions for all dependencies
// * vendor/ is populated according to Gopkg.lock
//
// Ensure has fast techniques to determine that some of these steps may be
// unnecessary. If that determination is made, ensure may skip some steps. Flags
// may be passed to bypass these checks; -vendor-only will allow an out-of-date
// Gopkg.lock to populate vendor/, and -no-vendor will update Gopkg.lock (if
// needed), but never touch vendor/.
//
// The effect of passing project spec arguments varies slightly depending on the
// combination of flags that are passed.
//
//
// Examples:
//
// dep ensure Populate vendor from existing Gopkg.toml and Gopkg.lock
// dep ensure -add github.com/pkg/foo Introduce a named dependency at its newest version
// dep ensure -add github.com/pkg/foo@^1.0.1 Introduce a named dependency with a particular constraint
//
// For more detailed usage examples, see dep ensure -examples.
//
//
// Prune the vendor tree of unused packages
//
// Usage:
//
// prune
//
// Prune is used to remove unused packages from your vendor tree.
//
// STABILITY NOTICE: this command creates problems for vendor/ verification. As
// such, it may be removed and/or moved out into a separate project later on.
//
//
// Show the dep version information
//
// Usage:
//
// version
//
package main
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/graphviz_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"reflect"
"testing"
"github.com/golang/dep/internal/test"
)
func TestEmptyProject(t *testing.T) {
h := test.NewHelper(t)
h.Parallel()
defer h.Cleanup()
g := new(graphviz).New()
b := g.output("")
want := h.GetTestFileString("graphviz/empty.dot")
if b.String() != want {
t.Fatalf("expected '%v', got '%v'", want, b.String())
}
}
func TestSimpleProject(t *testing.T) {
h := test.NewHelper(t)
h.Parallel()
defer h.Cleanup()
g := new(graphviz).New()
g.createNode("project", "", []string{"foo", "bar"})
g.createNode("foo", "master", []string{"bar"})
g.createNode("bar", "dev", []string{})
b := g.output("")
want := h.GetTestFileString("graphviz/case1.dot")
if b.String() != want {
t.Fatalf("expected '%v', got '%v'", want, b.String())
}
}
func TestNoLinks(t *testing.T) {
h := test.NewHelper(t)
h.Parallel()
defer h.Cleanup()
g := new(graphviz).New()
g.createNode("project", "", []string{})
b := g.output("")
want := h.GetTestFileString("graphviz/case2.dot")
if b.String() != want {
t.Fatalf("expected '%v', got '%v'", want, b.String())
}
}
func TestIsPathPrefix(t *testing.T) {
t.Parallel()
tcs := []struct {
path string
pre string
want bool
}{
{"github.com/sdboyer/foo/bar", "github.com/sdboyer/foo", true},
{"github.com/sdboyer/foobar", "github.com/sdboyer/foo", false},
{"github.com/sdboyer/bar/foo", "github.com/sdboyer/foo", false},
{"golang.org/sdboyer/bar/foo", "github.com/sdboyer/foo", false},
{"golang.org/sdboyer/FOO", "github.com/sdboyer/foo", false},
}
for _, tc := range tcs {
r := isPathPrefix(tc.path, tc.pre)
if tc.want != r {
t.Fatalf("expected '%v', got '%v'", tc.want, r)
}
}
}
func TestSimpleSubgraphs(t *testing.T) {
type testProject struct {
name string
packages map[string][]string
}
testCases := []struct {
name string
projects []testProject
targetProject string
outputfile string
}{
{
name: "simple graph",
projects: []testProject{
{
name: "ProjectA",
packages: map[string][]string{
"ProjectA/pkgX": []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
"ProjectA/pkgY": []string{"ProjectC/pkgX"},
},
},
{
name: "ProjectB",
packages: map[string][]string{
"ProjectB/pkgX": []string{},
"ProjectB/pkgY": []string{"ProjectA/pkgY", "ProjectC/pkgZ"},
},
},
{
name: "ProjectC",
packages: map[string][]string{
"ProjectC/pkgX": []string{},
"ProjectC/pkgY": []string{},
"ProjectC/pkgZ": []string{},
},
},
},
targetProject: "ProjectC",
outputfile: "graphviz/subgraph1.dot",
},
{
name: "edges from and to root projects",
projects: []testProject{
{
name: "ProjectB",
packages: map[string][]string{
"ProjectB": []string{"ProjectC/pkgX", "ProjectC"},
"ProjectB/pkgX": []string{},
"ProjectB/pkgY": []string{"ProjectA/pkgY", "ProjectC/pkgZ"},
"ProjectB/pkgZ": []string{"ProjectC"},
},
},
{
name: "ProjectC",
packages: map[string][]string{
"ProjectC/pkgX": []string{},
"ProjectC/pkgY": []string{},
"ProjectC/pkgZ": []string{},
},
},
},
targetProject: "ProjectC",
outputfile: "graphviz/subgraph2.dot",
},
{
name: "multi and single package projects",
projects: []testProject{
{
name: "ProjectA",
packages: map[string][]string{
"ProjectA": []string{"ProjectC/pkgX"},
},
},
{
name: "ProjectB",
packages: map[string][]string{
"ProjectB": []string{"ProjectC/pkgX", "ProjectC"},
"ProjectB/pkgX": []string{},
"ProjectB/pkgY": []string{"ProjectA/pkgY", "ProjectC/pkgZ"},
"ProjectB/pkgZ": []string{"ProjectC"},
},
},
{
name: "ProjectC",
packages: map[string][]string{
"ProjectC/pkgX": []string{},
"ProjectC/pkgY": []string{},
"ProjectC/pkgZ": []string{},
},
},
},
targetProject: "ProjectC",
outputfile: "graphviz/subgraph3.dot",
},
{
name: "relation from a cluster to a node",
projects: []testProject{
{
name: "ProjectB",
packages: map[string][]string{
"ProjectB": []string{"ProjectC/pkgX", "ProjectA"},
"ProjectB/pkgX": []string{},
"ProjectB/pkgY": []string{"ProjectA", "ProjectC/pkgZ"},
"ProjectB/pkgZ": []string{"ProjectC"},
},
},
{
name: "ProjectA",
packages: map[string][]string{
"ProjectA": []string{"ProjectC/pkgX"},
},
},
},
targetProject: "ProjectA",
outputfile: "graphviz/subgraph4.dot",
},
}
h := test.NewHelper(t)
h.Parallel()
defer h.Cleanup()
for _, tc := range testCases {
g := new(graphviz).New()
for _, project := range tc.projects {
g.createSubgraph(project.name, project.packages)
}
output := g.output(tc.targetProject)
want := h.GetTestFileString(tc.outputfile)
if output.String() != want {
t.Fatalf("expected '%v', got '%v'", want, output.String())
}
}
}
func TestCreateSubgraph(t *testing.T) {
testCases := []struct {
name string
project string
pkgs map[string][]string
wantNodes []*gvnode
wantClusters map[string]*gvsubgraph
}{
{
name: "Project with subpackages",
project: "ProjectA",
pkgs: map[string][]string{
"ProjectA/pkgX": []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
"ProjectA/pkgY": []string{"ProjectC/pkgX"},
},
wantNodes: []*gvnode{
&gvnode{
project: "ProjectA/pkgX",
children: []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
},
&gvnode{
project: "ProjectA/pkgY",
children: []string{"ProjectC/pkgX"},
},
},
wantClusters: map[string]*gvsubgraph{
"ProjectA": &gvsubgraph{
project: "ProjectA",
packages: []string{"ProjectA/pkgX", "ProjectA/pkgY"},
index: 0,
children: []string{},
},
},
},
{
name: "Project with single subpackage at root",
project: "ProjectA",
pkgs: map[string][]string{
"ProjectA": []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
},
wantNodes: []*gvnode{
&gvnode{
project: "ProjectA",
children: []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
},
},
wantClusters: map[string]*gvsubgraph{},
},
{
name: "Project with subpackages and no children",
project: "ProjectX",
pkgs: map[string][]string{
"ProjectX/pkgA": []string{},
},
wantNodes: []*gvnode{
&gvnode{
project: "ProjectX/pkgA",
children: []string{},
},
},
wantClusters: map[string]*gvsubgraph{
"ProjectX": &gvsubgraph{
project: "ProjectX",
packages: []string{"ProjectX/pkgA"},
index: 0,
children: []string{},
},
},
},
{
name: "Project with subpackage and root package with children",
project: "ProjectA",
pkgs: map[string][]string{
"ProjectA": []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
"ProjectA/pkgX": []string{"ProjectC/pkgA"},
},
wantNodes: []*gvnode{
&gvnode{
project: "ProjectA/pkgX",
children: []string{"ProjectC/pkgA"},
},
},
wantClusters: map[string]*gvsubgraph{
"ProjectA": &gvsubgraph{
project: "ProjectA",
packages: []string{"ProjectA/pkgX"},
index: 0,
children: []string{"ProjectC/pkgZ", "ProjectB/pkgX"},
},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
g := new(graphviz).New()
g.createSubgraph(tc.project, tc.pkgs)
// Check the number of created nodes.
if len(g.ps) != len(tc.wantNodes) {
t.Errorf("unexpected number of nodes: \n\t(GOT) %v\n\t(WNT) %v", len(g.ps), len(tc.wantNodes))
}
// Check if the expected nodes are created.
for i, v := range tc.wantNodes {
if v.project != g.ps[i].project {
t.Errorf("found unexpected node: \n\t(GOT) %v\n\t(WNT) %v", g.ps[i].project, v.project)
}
}
// Check the number of created clusters.
if len(g.clusters) != len(tc.wantClusters) {
t.Errorf("unexpected number of clusters: \n\t(GOT) %v\n\t(WNT) %v", len(g.clusters), len(tc.wantClusters))
}
// Check if the expected clusters are created.
if !reflect.DeepEqual(g.clusters, tc.wantClusters) {
t.Errorf("unexpected clusters: \n\t(GOT) %v\n\t(WNT) %v", g.clusters, tc.wantClusters)
}
})
}
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/status_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"log"
"path/filepath"
"reflect"
"strings"
"testing"
"text/tabwriter"
"text/template"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/test"
"github.com/pkg/errors"
)
func TestStatusFormatVersion(t *testing.T) {
t.Parallel()
tests := map[gps.Version]string{
nil: "",
gps.NewBranch("master"): "branch master",
gps.NewVersion("1.0.0"): "1.0.0",
gps.Revision("flooboofoobooo"): "flooboo",
}
for version, expected := range tests {
str := formatVersion(version)
if str != expected {
t.Fatalf("expected '%v', got '%v'", expected, str)
}
}
}
func TestBasicLine(t *testing.T) {
project := dep.Project{}
aSemverConstraint, _ := gps.NewSemverConstraint("1.2.3")
templateString := "PR:{{.ProjectRoot}}, Const:{{.Constraint}}, Ver:{{.Version}}, Rev:{{.Revision}}, Lat:{{.Latest}}, PkgCt:{{.PackageCount}}"
equalityTestTemplate := `{{if eq .Constraint "1.2.3"}}Constraint is 1.2.3{{end}}|{{if eq .Version "flooboo"}}Version is flooboo{{end}}|{{if eq .Latest "unknown"}}Latest is unknown{{end}}`
tests := []struct {
name string
status BasicStatus
wantDotStatus []string
wantJSONStatus []string
wantTableStatus []string
wantTemplateStatus []string
wantEqTemplateStatus []string
}{
{
name: "BasicStatus with ProjectRoot only",
status: BasicStatus{
ProjectRoot: "github.com/foo/bar",
},
wantDotStatus: []string{`[label="github.com/foo/bar"];`},
wantJSONStatus: []string{`"Version":""`, `"Revision":""`},
wantTableStatus: []string{`github.com/foo/bar 0`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Const:, Ver:, Rev:, Lat:, PkgCt:0`},
wantEqTemplateStatus: []string{`||`},
},
{
name: "BasicStatus with Revision",
status: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Revision: gps.Revision("flooboofoobooo"),
},
wantDotStatus: []string{`[label="github.com/foo/bar\nflooboo"];`},
wantJSONStatus: []string{`"Version":""`, `"Revision":"flooboofoobooo"`, `"Constraint":""`},
wantTableStatus: []string{`github.com/foo/bar flooboo 0`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Const:, Ver:flooboo, Rev:flooboofoobooo, Lat:, PkgCt:0`},
wantEqTemplateStatus: []string{`|Version is flooboo|`},
},
{
name: "BasicStatus with Version and Revision",
status: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("flooboofoobooo"),
},
wantDotStatus: []string{`[label="github.com/foo/bar\n1.0.0"];`},
wantJSONStatus: []string{`"Version":"1.0.0"`, `"Revision":"flooboofoobooo"`, `"Constraint":""`},
wantTableStatus: []string{`github.com/foo/bar 1.0.0 flooboo 0`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Const:, Ver:1.0.0, Rev:flooboofoobooo, Lat:, PkgCt:0`},
wantEqTemplateStatus: []string{`||`},
},
{
name: "BasicStatus with Constraint, Version and Revision",
status: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Constraint: aSemverConstraint,
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("revxyz"),
},
wantDotStatus: []string{`[label="github.com/foo/bar\n1.0.0"];`},
wantJSONStatus: []string{`"Revision":"revxyz"`, `"Constraint":"1.2.3"`, `"Version":"1.0.0"`},
wantTableStatus: []string{`github.com/foo/bar 1.2.3 1.0.0 revxyz 0`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Const:1.2.3, Ver:1.0.0, Rev:revxyz, Lat:, PkgCt:0`},
wantEqTemplateStatus: []string{`Constraint is 1.2.3||`},
},
{
name: "BasicStatus with update error",
status: BasicStatus{
ProjectRoot: "github.com/foo/bar",
hasError: true,
},
wantDotStatus: []string{`[label="github.com/foo/bar"];`},
wantJSONStatus: []string{`"Version":""`, `"Revision":""`, `"Latest":"unknown"`},
wantTableStatus: []string{`github.com/foo/bar unknown 0`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Const:, Ver:, Rev:, Lat:unknown, PkgCt:0`},
wantEqTemplateStatus: []string{`||Latest is unknown`},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var buf bytes.Buffer
dotout := &dotOutput{
p: &project,
w: &buf,
}
dotout.BasicHeader()
dotout.BasicLine(&test.status)
dotout.BasicFooter()
for _, wantStatus := range test.wantDotStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected node status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
buf.Reset()
jsonout := &jsonOutput{w: &buf}
jsonout.BasicHeader()
jsonout.BasicLine(&test.status)
jsonout.BasicFooter()
for _, wantStatus := range test.wantJSONStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected JSON status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
buf.Reset()
tabw := tabwriter.NewWriter(&buf, 0, 4, 2, ' ', 0)
tableout := &tableOutput{w: tabw}
tableout.BasicHeader()
tableout.BasicLine(&test.status)
tableout.BasicFooter()
for _, wantStatus := range test.wantTableStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected Table status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
buf.Reset()
template, _ := template.New("status").Parse(templateString)
templateout := &templateOutput{w: &buf, tmpl: template}
templateout.BasicHeader()
templateout.BasicLine(&test.status)
templateout.BasicFooter()
for _, wantStatus := range test.wantTemplateStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected template status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
// The following test is to ensure that certain fields usable with string operations such as .eq
buf.Reset()
template, _ = template.New("status").Parse(equalityTestTemplate)
templateout = &templateOutput{w: &buf, tmpl: template}
templateout.BasicHeader()
templateout.BasicLine(&test.status)
templateout.BasicFooter()
for _, wantStatus := range test.wantEqTemplateStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected template status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
})
}
}
func TestDetailLine(t *testing.T) {
project := dep.Project{}
aSemverConstraint, _ := gps.NewSemverConstraint("1.2.3")
templateString := "{{range $p := .Projects}}PR:{{$p.ProjectRoot}}, Src:{{$p.Source}}, Const:{{$p.Constraint}}, Ver:{{$p.Locked.Version}}, Rev:{{$p.Locked.Revision}}, Lat:{{$p.Latest.Revision}}, PkgCt:{{$p.PackageCount}}, Pkgs:{{$p.Packages}}{{end}}"
equalityTestTemplate := `{{range $p := .Projects}}{{if eq $p.Constraint "1.2.3"}}Constraint is 1.2.3{{end}}|{{if eq $p.Locked.Version "flooboo"}}Version is flooboo{{end}}|{{if eq $p.Locked.Revision "flooboofoobooo"}}Revision is flooboofoobooo{{end}}|{{if eq $p.Latest.Revision "unknown"}}Latest is unknown{{end}}{{end}}`
tests := []struct {
name string
status DetailStatus
wantDotStatus []string
wantJSONStatus []string
wantTableStatus []string
wantTemplateStatus []string
wantEqTemplateStatus []string
}{
{
name: "DetailStatus with ProjectRoot only",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
},
Packages: []string{},
},
wantDotStatus: []string{`[label="github.com/foo/bar"];`},
wantJSONStatus: []string{`"Locked":{}`},
wantTableStatus: []string{`github.com/foo/bar []`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:, Ver:, Rev:, Lat:, PkgCt:0, Pkgs:[]`},
wantEqTemplateStatus: []string{`||`},
},
{
name: "DetailStatus with Revision",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Revision: gps.Revision("flooboofoobooo"),
},
Packages: []string{},
},
wantDotStatus: []string{`[label="github.com/foo/bar\nflooboo"];`},
wantJSONStatus: []string{`"Locked":{"Revision":"flooboofoobooo"}`, `"Constraint":""`},
wantTableStatus: []string{`github.com/foo/bar flooboo []`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:, Ver:, Rev:flooboofoobooo, Lat:, PkgCt:0, Pkgs:[]`},
wantEqTemplateStatus: []string{`|Revision is flooboofoobooo|`},
},
{
name: "DetailStatus with Source",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
},
Packages: []string{},
Source: "github.com/baz/bar",
},
wantDotStatus: []string{`[label="github.com/foo/bar"];`},
wantJSONStatus: []string{`"Locked":{}`, `"Source":"github.com/baz/bar"`, `"Constraint":""`},
wantTableStatus: []string{`github.com/foo/bar github.com/baz/bar []`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:github.com/baz/bar, Const:, Ver:, Rev:, Lat:, PkgCt:0, Pkgs:[]`},
wantEqTemplateStatus: []string{`||`},
},
{
name: "DetailStatus with Version and Revision",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("flooboofoobooo"),
},
Packages: []string{},
},
wantDotStatus: []string{`[label="github.com/foo/bar\n1.0.0"];`},
wantJSONStatus: []string{`"Version":"1.0.0"`, `"Revision":"flooboofoobooo"`, `"Constraint":""`},
wantTableStatus: []string{`github.com/foo/bar 1.0.0 flooboo []`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:, Ver:1.0.0, Rev:flooboofoobooo, Lat:, PkgCt:0, Pkgs:[]`},
wantEqTemplateStatus: []string{`||`},
},
{
name: "DetailStatus with Constraint, Version and Revision",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Constraint: aSemverConstraint,
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("revxyz"),
},
Packages: []string{},
},
wantDotStatus: []string{`[label="github.com/foo/bar\n1.0.0"];`},
wantJSONStatus: []string{`"Revision":"revxyz"`, `"Constraint":"1.2.3"`, `"Version":"1.0.0"`},
wantTableStatus: []string{`github.com/foo/bar 1.2.3 1.0.0 revxyz []`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:1.2.3, Ver:1.0.0, Rev:revxyz, Lat:, PkgCt:0, Pkgs:[]`},
wantEqTemplateStatus: []string{`Constraint is 1.2.3||`},
},
{
name: "DetailStatus with Constraint, Version, Revision, and Package",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Constraint: aSemverConstraint,
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("revxyz"),
PackageCount: 1,
},
Packages: []string{"."},
},
wantDotStatus: []string{`[label="github.com/foo/bar\n1.0.0"];`},
wantJSONStatus: []string{`"Revision":"revxyz"`, `"Constraint":"1.2.3"`, `"Version":"1.0.0"`, `"PackageCount":1`, `"Packages":["."]`},
wantTableStatus: []string{`github.com/foo/bar 1.2.3 1.0.0 revxyz [.]`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:1.2.3, Ver:1.0.0, Rev:revxyz, Lat:, PkgCt:1, Pkgs:[.]`},
wantEqTemplateStatus: []string{`Constraint is 1.2.3||`},
},
{
name: "DetailStatus with Constraint, Version, Revision, and Packages",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
Constraint: aSemverConstraint,
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("revxyz"),
PackageCount: 3,
},
Packages: []string{".", "foo", "bar"},
},
wantDotStatus: []string{`[label="github.com/foo/bar\n1.0.0"];`},
wantJSONStatus: []string{`"Revision":"revxyz"`, `"Constraint":"1.2.3"`, `"Version":"1.0.0"`, `"PackageCount":3`, `"Packages":[".","foo","bar"]`},
wantTableStatus: []string{`github.com/foo/bar 1.2.3 1.0.0 revxyz [., foo, bar]`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:1.2.3, Ver:1.0.0, Rev:revxyz, Lat:, PkgCt:3, Pkgs:[. foo bar]`},
wantEqTemplateStatus: []string{`Constraint is 1.2.3||`},
},
{
name: "DetailStatus with update error",
status: DetailStatus{
BasicStatus: BasicStatus{
ProjectRoot: "github.com/foo/bar",
hasError: true,
},
Packages: []string{},
},
wantDotStatus: []string{`[label="github.com/foo/bar"];`},
wantJSONStatus: []string{`"Locked":{}`, `"Latest":{"Revision":"unknown"}`},
wantTableStatus: []string{`github.com/foo/bar unknown []`},
wantTemplateStatus: []string{`PR:github.com/foo/bar, Src:, Const:, Ver:, Rev:, Lat:unknown, PkgCt:0, Pkgs:[]`},
wantEqTemplateStatus: []string{`||Latest is unknown`},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var buf bytes.Buffer
dotout := &dotOutput{
p: &project,
w: &buf,
}
dotout.DetailHeader(nil)
dotout.DetailLine(&test.status)
dotout.DetailFooter(nil)
for _, wantStatus := range test.wantDotStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected node status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
buf.Reset()
jsonout := &jsonOutput{w: &buf}
jsonout.DetailHeader(nil)
jsonout.DetailLine(&test.status)
jsonout.DetailFooter(nil)
for _, wantStatus := range test.wantJSONStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected JSON status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
buf.Reset()
tabw := tabwriter.NewWriter(&buf, 0, 4, 2, ' ', 0)
tableout := &tableOutput{w: tabw}
tableout.DetailHeader(nil)
tableout.DetailLine(&test.status)
tableout.DetailFooter(nil)
for _, wantStatus := range test.wantTableStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected Table status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
buf.Reset()
template, _ := template.New("status").Parse(templateString)
templateout := &templateOutput{w: &buf, tmpl: template}
templateout.DetailHeader(nil)
templateout.DetailLine(&test.status)
templateout.DetailFooter(nil)
for _, wantStatus := range test.wantTemplateStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected template status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
// The following test is to ensure that certain fields usable with string operations such as .eq
buf.Reset()
template, _ = template.New("status").Parse(equalityTestTemplate)
templateout = &templateOutput{w: &buf, tmpl: template}
templateout.DetailHeader(nil)
templateout.DetailLine(&test.status)
templateout.DetailFooter(nil)
for _, wantStatus := range test.wantEqTemplateStatus {
if ok := strings.Contains(buf.String(), wantStatus); !ok {
t.Errorf("Did not find expected template status: \n\t(GOT) %v \n\t(WNT) %v", buf.String(), wantStatus)
}
}
})
}
}
func TestBasicStatusGetConsolidatedConstraint(t *testing.T) {
aSemverConstraint, _ := gps.NewSemverConstraint("1.2.1")
testCases := []struct {
name string
basicStatus BasicStatus
wantConstraint string
}{
{
name: "empty BasicStatus",
basicStatus: BasicStatus{},
wantConstraint: "",
},
{
name: "BasicStatus with Any Constraint",
basicStatus: BasicStatus{
Constraint: gps.Any(),
},
wantConstraint: "*",
},
{
name: "BasicStatus with Semver Constraint",
basicStatus: BasicStatus{
Constraint: aSemverConstraint,
},
wantConstraint: "1.2.1",
},
{
name: "BasicStatus with Override",
basicStatus: BasicStatus{
Constraint: aSemverConstraint,
hasOverride: true,
},
wantConstraint: "1.2.1 (override)",
},
{
name: "BasicStatus with Revision Constraint",
basicStatus: BasicStatus{
Constraint: gps.Revision("ddeb6f5d27091ff291b16232e99076a64fb375b8"),
},
wantConstraint: "ddeb6f5",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if tc.basicStatus.getConsolidatedConstraint() != tc.wantConstraint {
t.Errorf("unexpected consolidated constraint: \n\t(GOT) %v \n\t(WNT) %v", tc.basicStatus.getConsolidatedConstraint(), tc.wantConstraint)
}
})
}
}
func TestBasicStatusGetConsolidatedVersion(t *testing.T) {
testCases := []struct {
name string
basicStatus BasicStatus
wantVersion string
}{
{
name: "empty BasicStatus",
basicStatus: BasicStatus{},
wantVersion: "",
},
{
name: "BasicStatus with Version and Revision",
basicStatus: BasicStatus{
Version: gps.NewVersion("1.0.0"),
Revision: gps.Revision("revxyz"),
},
wantVersion: "1.0.0",
},
{
name: "BasicStatus with only Revision",
basicStatus: BasicStatus{
Revision: gps.Revision("revxyz"),
},
wantVersion: "revxyz",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if tc.basicStatus.getConsolidatedVersion() != tc.wantVersion {
t.Errorf("unexpected consolidated version: \n\t(GOT) %v \n\t(WNT) %v", tc.basicStatus.getConsolidatedVersion(), tc.wantVersion)
}
})
}
}
func TestBasicStatusGetConsolidatedLatest(t *testing.T) {
testCases := []struct {
name string
basicStatus BasicStatus
revSize uint8
wantLatest string
}{
{
name: "empty BasicStatus",
basicStatus: BasicStatus{},
revSize: shortRev,
wantLatest: "",
},
{
name: "nil latest",
basicStatus: BasicStatus{
Latest: nil,
},
revSize: shortRev,
wantLatest: "",
},
{
name: "with error",
basicStatus: BasicStatus{
hasError: true,
},
revSize: shortRev,
wantLatest: "unknown",
},
{
name: "short latest",
basicStatus: BasicStatus{
Latest: gps.Revision("adummylonglongrevision"),
},
revSize: shortRev,
wantLatest: "adummyl",
},
{
name: "long latest",
basicStatus: BasicStatus{
Latest: gps.Revision("adummylonglongrevision"),
},
revSize: longRev,
wantLatest: "adummylonglongrevision",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
gotRev := tc.basicStatus.getConsolidatedLatest(tc.revSize)
if gotRev != tc.wantLatest {
t.Errorf("unexpected consolidated latest: \n\t(GOT) %v \n\t(WNT) %v", gotRev, tc.wantLatest)
}
})
}
}
func TestCollectConstraints(t *testing.T) {
ver1, _ := gps.NewSemverConstraintIC("v1.0.0")
ver08, _ := gps.NewSemverConstraintIC("v0.8.0")
ver2, _ := gps.NewSemverConstraintIC("v2.0.0")
cases := []struct {
name string
lock dep.Lock
manifest dep.Manifest
wantConstraints constraintsCollection
wantErr bool
}{
{
name: "without any constraints",
lock: dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")},
gps.NewVersion("v1.0.0"),
[]string{"."},
),
},
},
wantConstraints: constraintsCollection{},
},
{
name: "with multiple constraints from dependencies",
lock: dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")},
gps.NewVersion("v1.0.0"),
[]string{"."},
),
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/deptest-project-1")},
gps.NewVersion("v0.1.0"),
[]string{"."},
),
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/deptest-project-2")},
gps.NewBranch("master").Pair(gps.Revision("824a8d56a4c6b2f4718824a98cd6d70d3dbd4c3e")),
[]string{"."},
),
},
},
wantConstraints: constraintsCollection{
"github.com/sdboyer/deptestdos": []projectConstraint{
{"github.com/darkowlzz/deptest-project-2", ver2},
},
"github.com/sdboyer/dep-test": []projectConstraint{
{"github.com/darkowlzz/deptest-project-2", ver1},
},
"github.com/sdboyer/deptest": []projectConstraint{
{"github.com/darkowlzz/deptest-project-1", ver1},
{"github.com/darkowlzz/deptest-project-2", ver08},
},
},
},
{
name: "with multiple constraints from dependencies and root project",
lock: dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")},
gps.NewVersion("v1.0.0"),
[]string{"."},
),
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/deptest-project-1")},
gps.NewVersion("v0.1.0"),
[]string{"."},
),
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/deptest-project-2")},
gps.NewBranch("master").Pair(gps.Revision("824a8d56a4c6b2f4718824a98cd6d70d3dbd4c3e")),
[]string{"."},
),
},
},
manifest: dep.Manifest{
Constraints: map[gps.ProjectRoot]gps.ProjectProperties{
gps.ProjectRoot("github.com/sdboyer/deptest"): {
Constraint: gps.Revision("3f4c3bea144e112a69bbe5d8d01c1b09a544253f"),
},
},
Ovr: make(gps.ProjectConstraints),
PruneOptions: gps.CascadingPruneOptions{
DefaultOptions: gps.PruneNestedVendorDirs,
PerProjectOptions: make(map[gps.ProjectRoot]gps.PruneOptionSet),
},
},
wantConstraints: constraintsCollection{
"github.com/sdboyer/deptestdos": []projectConstraint{
{"github.com/darkowlzz/deptest-project-2", ver2},
},
"github.com/sdboyer/dep-test": []projectConstraint{
{"github.com/darkowlzz/deptest-project-2", ver1},
},
"github.com/sdboyer/deptest": []projectConstraint{
{"github.com/darkowlzz/deptest-project-1", ver1},
{"github.com/darkowlzz/deptest-project-2", ver08},
{"root", gps.Revision("3f4c3bea144e112a69bbe5d8d01c1b09a544253f")},
},
},
},
{
name: "skip projects with invalid versions",
lock: dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/deptest-project-1")},
gps.NewVersion("v0.1.0"),
[]string{"."},
),
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/deptest-project-2")},
gps.NewVersion("v1.0.0"),
[]string{"."},
),
},
},
wantConstraints: constraintsCollection{
"github.com/sdboyer/deptest": []projectConstraint{
{"github.com/darkowlzz/deptest-project-1", ver1},
},
},
wantErr: true,
},
{
name: "collect only applicable constraints",
lock: dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/darkowlzz/dep-applicable-constraints")},
gps.NewVersion("v1.0.0"),
[]string{"."},
),
},
},
wantConstraints: constraintsCollection{
"github.com/boltdb/bolt": []projectConstraint{
{"github.com/darkowlzz/dep-applicable-constraints", gps.NewBranch("master")},
},
"github.com/sdboyer/deptest": []projectConstraint{
{"github.com/darkowlzz/dep-applicable-constraints", ver08},
},
},
},
{
name: "skip ineffective constraint from manifest",
lock: dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(
gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")},
gps.NewVersion("v1.0.0"),
[]string{"."},
),
},
},
manifest: dep.Manifest{
Constraints: map[gps.ProjectRoot]gps.ProjectProperties{
gps.ProjectRoot("github.com/darkowlzz/deptest-project-1"): {
Constraint: ver1,
},
},
Ovr: make(gps.ProjectConstraints),
PruneOptions: gps.CascadingPruneOptions{
DefaultOptions: gps.PruneNestedVendorDirs,
PerProjectOptions: make(map[gps.ProjectRoot]gps.PruneOptionSet),
},
},
wantConstraints: constraintsCollection{},
},
}
h := test.NewHelper(t)
defer h.Cleanup()
testdir := filepath.Join("src", "collect_constraints_test")
h.TempDir(testdir)
h.TempCopy(filepath.Join(testdir, "main.go"), filepath.Join("status", "collect_constraints", "main.go"))
testProjPath := h.Path(testdir)
discardLogger := log.New(ioutil.Discard, "", 0)
ctx := &dep.Ctx{
GOPATH: testProjPath,
Out: discardLogger,
Err: discardLogger,
}
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
// Create new project and set root. Setting root is required for PackageList
// to run properly.
p := new(dep.Project)
p.SetRoot(testProjPath)
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
p.Lock = &c.lock
p.Manifest = &c.manifest
gotConstraints, err := collectConstraints(ctx, p, sm)
if len(err) > 0 && !c.wantErr {
t.Fatalf("unexpected errors while collecting constraints: %v", err)
} else if len(err) == 0 && c.wantErr {
t.Fatalf("expected errors while collecting constraints, but got none")
}
if !reflect.DeepEqual(gotConstraints, c.wantConstraints) {
t.Fatalf("unexpected collected constraints: \n\t(GOT): %v\n\t(WNT): %v", gotConstraints, c.wantConstraints)
}
})
}
}
func TestValidateFlags(t *testing.T) {
testCases := []struct {
name string
cmd statusCommand
wantErr error
}{
{
name: "no flags",
cmd: statusCommand{},
wantErr: nil,
},
{
name: "-dot only",
cmd: statusCommand{dot: true},
wantErr: nil,
},
{
name: "-dot with template",
cmd: statusCommand{dot: true, template: "foo"},
wantErr: errors.New("cannot pass template string with -dot"),
},
{
name: "-dot with -json",
cmd: statusCommand{dot: true, json: true},
wantErr: errors.New("cannot pass multiple output format flags"),
},
{
name: "-dot with operating mode",
cmd: statusCommand{dot: true, old: true},
wantErr: errors.New("-dot generates dependency graph; cannot pass other flags"),
},
{
name: "single operating mode",
cmd: statusCommand{old: true},
wantErr: nil,
},
{
name: "multiple operating modes",
cmd: statusCommand{missing: true, old: true},
wantErr: errors.Wrapf(errors.New("cannot pass multiple operating mode flags"), "[-old -missing]"),
},
{
name: "old with -dot",
cmd: statusCommand{dot: true, old: true},
wantErr: errors.New("-dot generates dependency graph; cannot pass other flags"),
},
{
name: "old with template",
cmd: statusCommand{old: true, template: "foo"},
wantErr: nil,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
err := tc.cmd.validateFlags()
if err == nil {
if tc.wantErr != nil {
t.Errorf("unexpected error: \n\t(GOT): %v\n\t(WNT): %v", err, tc.wantErr)
}
} else if err.Error() != tc.wantErr.Error() {
t.Errorf("unexpected error: \n\t(GOT): %v\n\t(WNT): %v", err, tc.wantErr)
}
})
}
}
func execStatusTemplate(w io.Writer, format string, data interface{}) error {
tpl, err := parseStatusTemplate(format)
if err != nil {
return err
}
return tpl.Execute(w, data)
}
const expectedStatusDetail = `# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
%s[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = %s
solver-name = "gps-cdcl"
solver-version = 1
`
func TestStatusDetailTemplates(t *testing.T) {
expectedStatusMetadata := rawDetailMetadata{
AnalyzerName: "dep",
AnalyzerVersion: 1,
SolverName: "gps-cdcl",
SolverVersion: 1,
}
expectWithInputs := expectedStatusMetadata
expectWithInputs.InputImports = []string{"github.com/akutz/one", "github.com/akutz/three/a"}
testCases := []struct {
name string
tpl string
exp string
data rawDetail
}{
{
name: "Lock Template No Projects",
tpl: statusLockTemplate,
exp: fmt.Sprintf(expectedStatusDetail, "", tomlStrSplit(nil)),
data: rawDetail{
Metadata: expectedStatusMetadata,
},
},
{
name: "Lock Template",
tpl: statusLockTemplate,
exp: fmt.Sprintf(expectedStatusDetail, `[[projects]]
branch = "master"
digest = "1:cbcdef1234"
name = "github.com/akutz/one"
packages = ["."]
pruneopts = "UT"
revision = "b78744579491c1ceeaaa3b40205e56b0591b93a3"
[[projects]]
digest = "1:dbcdef1234"
name = "github.com/akutz/two"
packages = [
".",
"helloworld",
]
pruneopts = "NUT"
revision = "12bd96e66386c1960ab0f74ced1362f66f552f7b"
version = "v1.0.0"
[[projects]]
branch = "feature/morning"
digest = "1:abcdef1234"
name = "github.com/akutz/three"
packages = [
"a",
"b",
"c",
]
pruneopts = "NUT"
revision = "890a5c3458b43e6104ff5da8dfa139d013d77544"
source = "https://github.com/mandy/three"
`, tomlStrSplit([]string{"github.com/akutz/one", "github.com/akutz/three/a"})),
data: rawDetail{
Projects: []rawDetailProject{
rawDetailProject{
Locked: rawDetailVersion{
Branch: "master",
Revision: "b78744579491c1ceeaaa3b40205e56b0591b93a3",
},
Packages: []string{"."},
ProjectRoot: "github.com/akutz/one",
PruneOpts: "UT",
Digest: "1:cbcdef1234",
},
rawDetailProject{
Locked: rawDetailVersion{
Revision: "12bd96e66386c1960ab0f74ced1362f66f552f7b",
Version: "v1.0.0",
},
ProjectRoot: "github.com/akutz/two",
Packages: []string{
".",
"helloworld",
},
PruneOpts: "NUT",
Digest: "1:dbcdef1234",
},
rawDetailProject{
Locked: rawDetailVersion{
Branch: "feature/morning",
Revision: "890a5c3458b43e6104ff5da8dfa139d013d77544",
},
ProjectRoot: "github.com/akutz/three",
Packages: []string{
"a",
"b",
"c",
},
Source: "https://github.com/mandy/three",
PruneOpts: "NUT",
Digest: "1:abcdef1234",
},
},
Metadata: expectWithInputs,
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
w := &bytes.Buffer{}
if err := execStatusTemplate(w, tc.tpl, tc.data); err != nil {
t.Error(err)
}
act := w.String()
if act != tc.exp {
t.Errorf(
"unexpected error: \n"+
"(GOT):\n=== BEGIN ===\n%v\n=== END ===\n"+
"(WNT):\n=== BEGIN ===\n%v\n=== END ===\n",
act,
tc.exp)
}
})
}
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/prune.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"flag"
"io/ioutil"
"log"
"os"
"path/filepath"
"sort"
"strings"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/pkgtree"
"github.com/golang/dep/internal/fs"
"github.com/pkg/errors"
)
const pruneShortHelp = `Pruning is now performed automatically by dep ensure.`
const pruneLongHelp = `
Prune was merged into the ensure command.
Set prune options in the manifest and it will be applied after every ensure.
dep prune will be removed in a future version of dep, causing this command to exit non-0.
`
type pruneCommand struct {
}
func (cmd *pruneCommand) Name() string { return "prune" }
func (cmd *pruneCommand) Args() string { return "" }
func (cmd *pruneCommand) ShortHelp() string { return pruneShortHelp }
func (cmd *pruneCommand) LongHelp() string { return pruneLongHelp }
func (cmd *pruneCommand) Hidden() bool { return true }
func (cmd *pruneCommand) Register(fs *flag.FlagSet) {
}
func (cmd *pruneCommand) Run(ctx *dep.Ctx, args []string) error {
ctx.Err.Printf("Pruning is now performed automatically by dep ensure.\n")
ctx.Err.Printf("Set prune settings in %s and it will be applied when running ensure.\n", dep.ManifestName)
ctx.Err.Printf("\nThis command currently still prunes as it always has, to ease the transition.\n")
ctx.Err.Printf("However, it will be removed in a future version of dep.\n")
ctx.Err.Printf("\nNow is the time to update your Gopkg.toml and remove `dep prune` from any scripts.\n")
ctx.Err.Printf("\nFor more information, see: https://golang.github.io/dep/docs/Gopkg.toml.html#prune\n")
p, err := ctx.LoadProject()
if err != nil {
return err
}
sm, err := ctx.SourceManager()
if err != nil {
return err
}
sm.UseDefaultSignalHandling()
defer sm.Release()
// While the network churns on ListVersions() requests, statically analyze
// code from the current project.
ptree, err := pkgtree.ListPackages(p.ResolvedAbsRoot, string(p.ImportRoot))
if err != nil {
return errors.Wrap(err, "analysis of local packages failed: %v")
}
// Set up a solver in order to check the InputHash.
params := p.MakeParams()
params.RootPackageTree = ptree
if ctx.Verbose {
params.TraceLogger = ctx.Err
}
if p.Lock == nil {
return errors.Errorf("Gopkg.lock must exist for prune to know what files are safe to remove.")
}
pruneLogger := ctx.Err
if !ctx.Verbose {
pruneLogger = log.New(ioutil.Discard, "", 0)
}
return pruneProject(p, sm, pruneLogger)
}
// pruneProject removes unused packages from a project.
func pruneProject(p *dep.Project, sm gps.SourceManager, logger *log.Logger) error {
td, err := ioutil.TempDir(os.TempDir(), "dep")
if err != nil {
return errors.Wrap(err, "error while creating temp dir for writing manifest/lock/vendor")
}
defer os.RemoveAll(td)
onWrite := func(progress gps.WriteProgress) {
logger.Println(progress)
}
if err := gps.WriteDepTree(td, p.Lock, sm, gps.CascadingPruneOptions{DefaultOptions: gps.PruneNestedVendorDirs}, onWrite); err != nil {
return err
}
var toKeep []string
for _, project := range p.Lock.Projects() {
projectRoot := string(project.Ident().ProjectRoot)
for _, pkg := range project.Packages() {
toKeep = append(toKeep, filepath.Join(projectRoot, pkg))
}
}
toDelete, err := calculatePrune(td, toKeep, logger)
if err != nil {
return err
}
if len(toDelete) > 0 {
logger.Println("Calculated the following directories to prune:")
for _, d := range toDelete {
logger.Printf(" %s\n", d)
}
} else {
logger.Println("No directories found to prune")
}
if err := deleteDirs(toDelete); err != nil {
return err
}
vpath := filepath.Join(p.AbsRoot, "vendor")
vendorbak := vpath + ".orig"
var failerr error
if _, err := os.Stat(vpath); err == nil {
// Move out the old vendor dir. just do it into an adjacent dir, to
// try to mitigate the possibility of a pointless cross-filesystem
// move with a temp directory.
if _, err := os.Stat(vendorbak); err == nil {
// If the adjacent dir already exists, bite the bullet and move
// to a proper tempdir.
vendorbak = filepath.Join(td, "vendor.orig")
}
failerr = fs.RenameWithFallback(vpath, vendorbak)
if failerr != nil {
goto fail
}
}
// Move in the new one.
failerr = fs.RenameWithFallback(td, vpath)
if failerr != nil {
goto fail
}
os.RemoveAll(vendorbak)
return nil
fail:
fs.RenameWithFallback(vendorbak, vpath)
return failerr
}
func calculatePrune(vendorDir string, keep []string, logger *log.Logger) ([]string, error) {
logger.Println("Calculating prune. Checking the following packages:")
sort.Strings(keep)
var toDelete []string
err := filepath.Walk(vendorDir, func(path string, info os.FileInfo, err error) error {
if _, err := os.Lstat(path); err != nil {
return nil
}
if !info.IsDir() {
return nil
}
if path == vendorDir {
return nil
}
name := strings.TrimPrefix(path, vendorDir+string(filepath.Separator))
logger.Printf(" %s", name)
i := sort.Search(len(keep), func(i int) bool {
return name <= keep[i]
})
if i >= len(keep) || !strings.HasPrefix(keep[i], name) {
toDelete = append(toDelete, path)
}
return nil
})
return toDelete, err
}
func deleteDirs(toDelete []string) error {
// sort by length so we delete sub dirs first
sort.Sort(byLen(toDelete))
for _, path := range toDelete {
if err := os.RemoveAll(path); err != nil {
return err
}
}
return nil
}
type byLen []string
func (a byLen) Len() int { return len(a) }
func (a byLen) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byLen) Less(i, j int) bool { return len(a[i]) > len(a[j]) }
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/failures.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"context"
"github.com/golang/dep/gps"
"github.com/pkg/errors"
)
// TODO solve failures can be really creative - we need to be similarly creative
// in handling them and informing the user appropriately
func handleAllTheFailuresOfTheWorld(err error) error {
switch errors.Cause(err) {
case context.Canceled, context.DeadlineExceeded, gps.ErrSourceManagerIsReleased:
return nil
}
return errors.Wrap(err, "Solving failure")
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/gopath_scanner_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"io/ioutil"
"log"
"reflect"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/internal/test"
)
const testProject1 = "github.com/sdboyer/deptest"
const testProject2 = "github.com/sdboyer/deptestdos"
// NewTestContext creates a unique context with its own GOPATH for a single test.
func NewTestContext(h *test.Helper) *dep.Ctx {
h.TempDir("src")
pwd := h.Path(".")
discardLogger := log.New(ioutil.Discard, "", 0)
return &dep.Ctx{
GOPATH: pwd,
Out: discardLogger,
Err: discardLogger,
}
}
func TestGopathScanner_OverlayManifestConstraints(t *testing.T) {
h := test.NewHelper(t)
h.Parallel()
defer h.Cleanup()
ctx := NewTestContext(h)
pi1 := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(testProject1)}
pi2 := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(testProject2)}
v1 := gps.NewVersion("v1.0.0")
v2 := gps.NewVersion("v2.0.0")
v3 := gps.NewVersion("v3.0.0")
rootM := dep.NewManifest()
rootM.Constraints[pi1.ProjectRoot] = gps.ProjectProperties{Constraint: v1}
rootL := &dep.Lock{}
origM := dep.NewManifest()
origM.Constraints[pi1.ProjectRoot] = gps.ProjectProperties{Constraint: v2}
origM.Constraints[pi2.ProjectRoot] = gps.ProjectProperties{Constraint: v3}
gs := gopathScanner{
origM: origM,
origL: &dep.Lock{},
ctx: ctx,
pd: projectData{
ondisk: map[gps.ProjectRoot]gps.Version{
pi1.ProjectRoot: v2,
pi2.ProjectRoot: v3,
},
},
}
gs.overlay(rootM, rootL)
dep, has := rootM.Constraints[pi1.ProjectRoot]
if !has {
t.Fatalf("Expected the root manifest to contain %s", pi1.ProjectRoot)
}
wantC := v1.String()
gotC := dep.Constraint.String()
if wantC != gotC {
t.Fatalf("Expected %s to be constrained to '%s', got '%s'", pi1.ProjectRoot, wantC, gotC)
}
dep, has = rootM.Constraints[pi2.ProjectRoot]
if !has {
t.Fatalf("Expected the root manifest to contain %s", pi2.ProjectRoot)
}
wantC = v3.String()
gotC = dep.Constraint.String()
if wantC != gotC {
t.Fatalf("Expected %s to be constrained to '%s', got '%s'", pi2.ProjectRoot, wantC, gotC)
}
}
func TestGopathScanner_OverlayLockProjects(t *testing.T) {
h := test.NewHelper(t)
h.Parallel()
defer h.Cleanup()
ctx := NewTestContext(h)
rootM := dep.NewManifest()
pi1 := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(testProject1)}
pi2 := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(testProject2)}
v1 := gps.NewVersion("v1.0.0")
v2 := gps.NewVersion("v2.0.0")
v3 := gps.NewVersion("v3.0.0")
rootL := &dep.Lock{
P: []gps.LockedProject{gps.NewLockedProject(pi1, v1, []string{})},
}
gs := gopathScanner{
origM: dep.NewManifest(),
origL: &dep.Lock{
P: []gps.LockedProject{
gps.NewLockedProject(pi1, v2, []string{}), // ignored, already exists in lock
gps.NewLockedProject(pi2, v3, []string{}), // should be added to the lock
},
},
ctx: ctx,
pd: projectData{
ondisk: map[gps.ProjectRoot]gps.Version{
pi1.ProjectRoot: v2,
pi2.ProjectRoot: v3,
},
},
}
gs.overlay(rootM, rootL)
if len(rootL.P) != 2 {
t.Fatalf("Expected the root manifest to contain 2 packages, got %d", len(rootL.P))
}
if rootL.P[0].Version() != v1 {
t.Fatalf("Expected %s to be locked to '%s', got '%s'", rootL.P[0].Ident().ProjectRoot, v1, rootL.P[0].Version())
}
if rootL.P[1].Version() != v3 {
t.Fatalf("Expected %s to be locked to '%s', got '%s'", rootL.P[1].Ident().ProjectRoot, v3, rootL.P[1].Version())
}
}
func TestContains(t *testing.T) {
t.Parallel()
a := []string{"a", "b", "abcd"}
if !contains(a, "a") {
t.Fatal("expected array to contain 'a'")
}
if contains(a, "d") {
t.Fatal("expected array to not contain 'd'")
}
}
func TestGetProjectPropertiesFromVersion(t *testing.T) {
t.Parallel()
wantSemver, _ := gps.NewSemverConstraintIC("v1.0.0")
cases := []struct {
version, want gps.Constraint
}{
{
version: gps.NewBranch("foo-branch"),
want: gps.NewBranch("foo-branch"),
},
{
version: gps.NewVersion("foo-version"),
want: gps.NewVersion("foo-version"),
},
{
version: gps.NewVersion("v1.0.0"),
want: wantSemver,
},
{
version: gps.NewBranch("foo-branch").Pair("some-revision"),
want: gps.NewBranch("foo-branch"),
},
{
version: gps.NewVersion("foo-version").Pair("some-revision"),
want: gps.NewVersion("foo-version"),
},
{
version: gps.Revision("some-revision"),
want: nil,
},
{
version: gps.NewVersion("v1.0.0").Pair("some-revision"),
want: wantSemver,
},
}
for _, c := range cases {
actualProp := getProjectPropertiesFromVersion(c.version.(gps.Version))
if !reflect.DeepEqual(c.want, actualProp.Constraint) {
t.Fatalf("Constraints are not as expected: \n\t(GOT) %v\n\t(WNT) %v", actualProp.Constraint, c.want)
}
}
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/main.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate ./mkdoc.sh
package main
import (
"bytes"
"flag"
"fmt"
"io"
"log"
"os"
"path/filepath"
"runtime"
"runtime/pprof"
"strings"
"text/tabwriter"
"time"
"github.com/golang/dep"
"github.com/golang/dep/internal/fs"
)
var (
successExitCode = 0
errorExitCode = 1
)
type command interface {
Name() string // "foobar"
Args() string // "<baz> [quux...]"
ShortHelp() string // "Foo the first bar"
LongHelp() string // "Foo the first bar meeting the following conditions..."
Register(*flag.FlagSet) // command-specific flags
Hidden() bool // indicates whether the command should be hidden from help output
Run(*dep.Ctx, []string) error
}
// Helper type so that commands can fail without generating any additional
// ouptut.
type silentfail struct{}
func (silentfail) Error() string {
return ""
}
func main() {
p := &profile{}
// Redefining Usage() customizes the output of `dep -h`
flag.CommandLine.Usage = func() {
fprintUsage(os.Stderr)
}
flag.StringVar(&p.cpuProfile, "cpuprofile", "", "Writes a CPU profile to the specified file before exiting.")
flag.StringVar(&p.memProfile, "memprofile", "", "Writes a memory profile to the specified file before exiting.")
flag.IntVar(&p.memProfileRate, "memprofilerate", 0, "Enable more precise memory profiles by setting runtime.MemProfileRate.")
flag.StringVar(&p.mutexProfile, "mutexprofile", "", "Writes a mutex profile to the specified file before exiting.")
flag.IntVar(&p.mutexProfileFraction, "mutexprofilefraction", 0, "Enable more precise mutex profiles by runtime.SetMutexProfileFraction.")
flag.Parse()
wd, err := os.Getwd()
if err != nil {
fmt.Fprintln(os.Stderr, "failed to get working directory", err)
os.Exit(1)
}
args := append([]string{os.Args[0]}, flag.Args()...)
c := &Config{
Args: args,
Stdout: os.Stdout,
Stderr: os.Stderr,
WorkingDir: wd,
Env: os.Environ(),
}
if err := p.start(); err != nil {
fmt.Fprintf(os.Stderr, "failed to profile: %v\n", err)
os.Exit(1)
}
exit := c.Run()
if err := p.finish(); err != nil {
fmt.Fprintf(os.Stderr, "failed to finish the profile: %v\n", err)
os.Exit(1)
}
os.Exit(exit)
}
// A Config specifies a full configuration for a dep execution.
type Config struct {
WorkingDir string // Where to execute
Args []string // Command-line arguments, starting with the program name.
Env []string // Environment variables
Stdout, Stderr io.Writer // Log output
}
// Run executes a configuration and returns an exit code.
func (c *Config) Run() int {
commands := commandList()
cmdName, printCommandHelp, exit := parseArgs(c.Args)
if exit {
fprintUsage(c.Stderr)
return errorExitCode
}
// 'dep help documentation' generates doc.go.
if printCommandHelp && cmdName == "documentation" {
fmt.Println("// Copyright 2017 The Go Authors. All rights reserved.")
fmt.Println("// Use of this source code is governed by a BSD-style")
fmt.Println("// license that can be found in the LICENSE file.")
fmt.Println()
fmt.Println("// DO NOT EDIT THIS FILE. GENERATED BY mkdoc.sh.")
fmt.Println("// Edit the documentation in other files and rerun mkdoc.sh to generate this one.")
fmt.Println()
var cw io.Writer = &commentWriter{W: c.Stdout}
fprintUsage(cw)
for _, cmd := range commands {
if !cmd.Hidden() {
fmt.Fprintln(cw)
short := cmd.ShortHelp()
fmt.Fprintln(cw, short)
fmt.Fprintln(cw)
fmt.Fprintln(cw, "Usage:")
fmt.Fprintln(cw)
fmt.Fprintln(cw, "", cmd.Name(), cmd.Args())
if long := cmd.LongHelp(); long != short {
fmt.Fprintln(cw, long)
}
}
}
fmt.Println("//")
fmt.Println("package main")
return successExitCode
}
outLogger := log.New(c.Stdout, "", 0)
errLogger := log.New(c.Stderr, "", 0)
for _, cmd := range commands {
if cmd.Name() == cmdName {
// Build flag set with global flags in there.
flags := flag.NewFlagSet(cmdName, flag.ContinueOnError)
flags.SetOutput(c.Stderr)
var verbose bool
// No verbose for verify
if cmdName != "check" {
flags.BoolVar(&verbose, "v", false, "enable verbose logging")
}
// Register the subcommand flags in there, too.
cmd.Register(flags)
// Override the usage text to something nicer.
resetUsage(errLogger, flags, cmdName, cmd.Args(), cmd.LongHelp())
if printCommandHelp {
flags.Usage()
return errorExitCode
}
// Parse the flags the user gave us.
// flag package automatically prints usage and error message in err != nil
// or if '-h' flag provided
if err := flags.Parse(c.Args[2:]); err != nil {
return errorExitCode
}
// Cachedir is loaded from env if present. `$GOPATH/pkg/dep` is used as the
// default cache location.
cachedir := getEnv(c.Env, "DEPCACHEDIR")
if cachedir != "" {
if err := fs.EnsureDir(cachedir, 0777); err != nil {
errLogger.Printf(
"dep: $DEPCACHEDIR set to an invalid or inaccessible path: %q\n", cachedir,
)
errLogger.Printf("dep: failed to ensure cache directory: %v\n", err)
return errorExitCode
}
}
var cacheAge time.Duration
if env := getEnv(c.Env, "DEPCACHEAGE"); env != "" {
var err error
cacheAge, err = time.ParseDuration(env)
if err != nil {
errLogger.Printf("dep: failed to parse $DEPCACHEAGE duration %q: %v\n", env, err)
return errorExitCode
}
}
// Set up dep context.
ctx := &dep.Ctx{
Out: outLogger,
Err: errLogger,
Verbose: verbose,
DisableLocking: getEnv(c.Env, "DEPNOLOCK") != "",
Cachedir: cachedir,
CacheAge: cacheAge,
}
GOPATHS := filepath.SplitList(getEnv(c.Env, "GOPATH"))
ctx.SetPaths(c.WorkingDir, GOPATHS...)
// Run the command with the post-flag-processing args.
if err := cmd.Run(ctx, flags.Args()); err != nil {
if _, ok := err.(silentfail); !ok {
errLogger.Printf("%v\n", err)
}
return errorExitCode
}
// Easy peasy livin' breezy.
return successExitCode
}
}
errLogger.Printf("dep: %s: no such command\n", cmdName)
fprintUsage(c.Stderr)
return errorExitCode
}
// Build the list of available commands.
//
// Note that these commands are mutable, but parts of this file
// use them for their immutable characteristics (help strings, etc).
func commandList() []command {
return []command{
&initCommand{},
&statusCommand{},
&ensureCommand{},
&pruneCommand{},
&versionCommand{},
&checkCommand{},
}
}
var examples = [...][2]string{
{
"dep init",
"set up a new project",
},
{
"dep ensure",
"install the project's dependencies",
},
{
"dep ensure -update",
"update the locked versions of all dependencies",
},
{
"dep ensure -add github.com/pkg/errors",
"add a dependency to the project",
},
}
func fprintUsage(w io.Writer) {
fmt.Fprintln(w, "Dep is a tool for managing dependencies for Go projects")
fmt.Fprintln(w)
fmt.Fprintln(w, "Usage: \"dep [command]\"")
fmt.Fprintln(w)
fmt.Fprintln(w, "Commands:")
fmt.Fprintln(w)
tw := tabwriter.NewWriter(w, 0, 0, 2, ' ', 0)
commands := commandList()
for _, cmd := range commands {
if !cmd.Hidden() {
fmt.Fprintf(tw, "\t%s\t%s\n", cmd.Name(), cmd.ShortHelp())
}
}
tw.Flush()
fmt.Fprintln(w)
fmt.Fprintln(w, "Examples:")
for _, example := range examples {
fmt.Fprintf(tw, "\t%s\t%s\n", example[0], example[1])
}
tw.Flush()
fmt.Fprintln(w)
fmt.Fprintln(w, "Use \"dep help [command]\" for more information about a command.")
}
func resetUsage(logger *log.Logger, fs *flag.FlagSet, name, args, longHelp string) {
var (
hasFlags bool
flagBlock bytes.Buffer
flagWriter = tabwriter.NewWriter(&flagBlock, 0, 4, 2, ' ', 0)
)
fs.VisitAll(func(f *flag.Flag) {
hasFlags = true
// Default-empty string vars should read "(default: <none>)"
// rather than the comparatively ugly "(default: )".
defValue := f.DefValue
if defValue == "" {
defValue = "<none>"
}
fmt.Fprintf(flagWriter, "\t-%s\t%s (default: %s)\n", f.Name, f.Usage, defValue)
})
flagWriter.Flush()
fs.Usage = func() {
logger.Printf("Usage: dep %s %s\n", name, args)
logger.Println()
logger.Println(strings.TrimSpace(longHelp))
logger.Println()
if hasFlags {
logger.Println("Flags:")
logger.Println()
logger.Println(flagBlock.String())
}
}
}
// parseArgs determines the name of the dep command and whether the user asked for
// help to be printed.
func parseArgs(args []string) (cmdName string, printCmdUsage bool, exit bool) {
isHelpArg := func() bool {
return strings.Contains(strings.ToLower(args[1]), "help") || strings.ToLower(args[1]) == "-h"
}
switch len(args) {
case 0, 1:
exit = true
case 2:
if isHelpArg() {
exit = true
} else {
cmdName = args[1]
}
default:
if isHelpArg() {
cmdName = args[2]
printCmdUsage = true
} else {
cmdName = args[1]
}
}
return cmdName, printCmdUsage, exit
}
// getEnv returns the last instance of an environment variable.
func getEnv(env []string, key string) string {
for i := len(env) - 1; i >= 0; i-- {
v := env[i]
kv := strings.SplitN(v, "=", 2)
if kv[0] == key {
if len(kv) > 1 {
return kv[1]
}
return ""
}
}
return ""
}
// commentWriter writes a Go comment to the underlying io.Writer,
// using line comment form (//).
//
// Copied from cmd/go/internal/help/help.go.
type commentWriter struct {
W io.Writer
wroteSlashes bool // Wrote "//" at the beginning of the current line.
}
func (c *commentWriter) Write(p []byte) (int, error) {
var n int
for i, b := range p {
if !c.wroteSlashes {
s := "//"
if b != '\n' {
s = "// "
}
if _, err := io.WriteString(c.W, s); err != nil {
return n, err
}
c.wroteSlashes = true
}
n0, err := c.W.Write(p[i : i+1])
n += n0
if err != nil {
return n, err
}
if b == '\n' {
c.wroteSlashes = false
}
}
return len(p), nil
}
type profile struct {
cpuProfile string
memProfile string
memProfileRate int
mutexProfile string
mutexProfileFraction int
// TODO(jbd): Add block profile and -trace.
f *os.File // file to write the profiling output to
}
func (p *profile) start() error {
switch {
case p.cpuProfile != "":
if err := p.createOutput(p.cpuProfile); err != nil {
return err
}
return pprof.StartCPUProfile(p.f)
case p.memProfile != "":
if p.memProfileRate > 0 {
runtime.MemProfileRate = p.memProfileRate
}
return p.createOutput(p.memProfile)
case p.mutexProfile != "":
if p.mutexProfileFraction > 0 {
runtime.SetMutexProfileFraction(p.mutexProfileFraction)
}
return p.createOutput(p.mutexProfile)
}
return nil
}
func (p *profile) finish() error {
if p.f == nil {
return nil
}
switch {
case p.cpuProfile != "":
pprof.StopCPUProfile()
case p.memProfile != "":
if err := pprof.WriteHeapProfile(p.f); err != nil {
return err
}
case p.mutexProfile != "":
if err := pprof.Lookup("mutex").WriteTo(p.f, 2); err != nil {
return err
}
}
return p.f.Close()
}
func (p *profile) createOutput(name string) error {
f, err := os.Create(name)
if err != nil {
return err
}
p.f = f
return nil
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/mkdoc.sh
|
#!/bin/bash
# Copyright 2017 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
set -e
go build -o dep.latest
./dep.latest help documentation >doc.go
gofmt -w doc.go
rm dep.latest
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/integration_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/internal/test"
"github.com/golang/dep/internal/test/integration"
)
func TestIntegration(t *testing.T) {
t.Parallel()
test.NeedsExternalNetwork(t)
test.NeedsGit(t)
wd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
relPath := filepath.Join("testdata", "harness_tests")
filepath.Walk(relPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
t.Fatal("error walking filepath")
}
if filepath.Base(path) != "testcase.json" {
return nil
}
parse := strings.Split(path, string(filepath.Separator))
testName := strings.Join(parse[2:len(parse)-1], "/")
t.Run(testName, func(t *testing.T) {
t.Parallel()
t.Run("external", testIntegration(testName, relPath, wd, execCmd))
t.Run("internal", testIntegration(testName, relPath, wd, runMain))
})
return nil
})
}
func TestDepCachedir(t *testing.T) {
if runtime.GOOS == "windows" {
// This test is unreliable on Windows and fails at random which makes it very
// difficult to debug. It might have something to do with parallel execution.
// Since the test doesn't test any specific behavior of Windows, it should be okay
// to skip.
t.Skip("skipping on windows")
}
t.Parallel()
test.NeedsExternalNetwork(t)
test.NeedsGit(t)
wd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
initPath := filepath.Join("testdata", "cachedir")
t.Run("env-invalid-cachedir", func(t *testing.T) {
t.Parallel()
testProj := integration.NewTestProject(t, initPath, wd, runMain)
defer testProj.Cleanup()
var d []byte
tmpFp := testProj.Path("tmp-file")
ioutil.WriteFile(tmpFp, d, 0644)
cases := []string{
// invalid path
"\000",
// parent directory does not exist
testProj.Path("non-existent-fldr", "cachedir"),
// path is a regular file
tmpFp,
// invalid path, tmp-file is a regular file
testProj.Path("tmp-file", "cachedir"),
}
wantErr := "dep: $DEPCACHEDIR set to an invalid or inaccessible path"
for _, c := range cases {
testProj.Setenv("DEPCACHEDIR", c)
err = testProj.DoRun([]string{"ensure"})
if err == nil {
// Log the output from running `dep ensure`, could be useful.
t.Logf("test run output: \n%s\n%s", testProj.GetStdout(), testProj.GetStderr())
t.Error("unexpected result: \n\t(GOT) nil\n\t(WNT) exit status 1")
} else if stderr := testProj.GetStderr(); !strings.Contains(stderr, wantErr) {
t.Errorf(
"unexpected error output: \n\t(GOT) %s\n\t(WNT) %s",
strings.TrimSpace(stderr), wantErr,
)
}
}
})
}
// execCmd is a test.RunFunc which runs the program in another process.
func execCmd(prog string, args []string, stdout, stderr io.Writer, dir string, env []string) error {
cmd := exec.Command(prog, args...)
cmd.Stdout = stdout
cmd.Stderr = stderr
cmd.Env = env
cmd.Dir = dir
return cmd.Run()
}
// runMain is a test.RunFunc which runs the program in-process.
func runMain(prog string, args []string, stdout, stderr io.Writer, dir string, env []string) (err error) {
defer func() {
if r := recover(); r != nil {
switch r := r.(type) {
case error:
err = r
default:
err = fmt.Errorf("%v", r)
}
}
}()
m := &Config{
Args: append([]string{prog}, args...),
Stdout: stdout,
Stderr: stderr,
WorkingDir: dir,
Env: env,
}
if exitCode := m.Run(); exitCode != 0 {
err = fmt.Errorf("exit status %d", exitCode)
}
return
}
// testIntegration runs the test specified by <wd>/<relPath>/<name>/testcase.json
func testIntegration(name, relPath, wd string, run integration.RunFunc) func(t *testing.T) {
return func(t *testing.T) {
t.Parallel()
testCase := integration.NewTestCase(t, filepath.Join(wd, relPath), name)
// Skip tests for disabled features
if testCase.RequiredFeatureFlag != "" {
featureEnabled, err := readFeatureFlag(testCase.RequiredFeatureFlag)
if err != nil {
t.Fatal(err)
}
if !featureEnabled {
t.Skipf("skipping %s, %s feature flag not enabled", name, testCase.RequiredFeatureFlag)
}
}
// Set up environment
testProj := integration.NewTestProject(t, testCase.InitialPath(), wd, run)
defer testProj.Cleanup()
// Create and checkout the vendor revisions
for ip, rev := range testCase.VendorInitial {
testProj.GetVendorGit(ip)
testProj.RunGit(testProj.VendorPath(ip), "checkout", rev)
}
// Create and checkout the import revisions
for ip, rev := range testCase.GopathInitial {
testProj.RunGo("get", ip)
testProj.RunGit(testProj.Path("src", ip), "checkout", rev)
}
// Run commands
testProj.RecordImportPaths()
var err error
for i, args := range testCase.Commands {
err = testProj.DoRun(args)
if err != nil && i < len(testCase.Commands)-1 {
t.Fatalf("cmd %s raised an unexpected error: %s", args[0], err.Error())
}
}
if err != nil {
t.Log(err)
}
// Check error raised in final command
testCase.CompareCmdFailure(err != nil)
testCase.CompareError(err, testProj.GetStderr())
if *test.UpdateGolden {
testCase.UpdateOutput(testProj.GetStdout())
} else {
// Check output
testCase.CompareOutput(testProj.GetStdout())
}
// Check vendor paths
testProj.CompareImportPaths()
testCase.CompareVendorPaths(testProj.GetVendorPaths())
if *test.UpdateGolden {
// Update manifest and lock
testCase.UpdateFile(dep.ManifestName, testProj.ProjPath(dep.ManifestName))
testCase.UpdateFile(dep.LockName, testProj.ProjPath(dep.LockName))
} else {
// Check final manifest and lock
testCase.CompareFile(dep.ManifestName, testProj.ProjPath(dep.ManifestName))
testCase.CompareFile(dep.LockName, testProj.ProjPath(dep.LockName))
}
}
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/dep_test.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"testing"
"github.com/golang/dep/internal/test"
)
// The TestMain function creates a dep command for testing purposes and
// deletes it after the tests have been run.
// Most of this is taken from https://github.com/golang/go/blob/master/src/cmd/go/go_test.go and reused here.
func TestMain(m *testing.M) {
args := []string{"build", "-o", "testdep" + test.ExeSuffix}
out, err := exec.Command("go", args...).CombinedOutput()
if err != nil {
fmt.Fprintf(os.Stderr, "building testdep failed: %v\n%s", err, out)
os.Exit(2)
}
// Don't let these environment variables confuse the test.
os.Unsetenv("GOPATH")
os.Unsetenv("GIT_ALLOW_PROTOCOL")
if home, ccacheDir := os.Getenv("HOME"), os.Getenv("CCACHE_DIR"); home != "" && ccacheDir == "" {
// On some systems the default C compiler is ccache.
// Setting HOME to a non-existent directory will break
// those systems. Set CCACHE_DIR to cope. Issue 17668.
os.Setenv("CCACHE_DIR", filepath.Join(home, ".ccache"))
}
os.Setenv("HOME", "/test-dep-home-does-not-exist")
r := m.Run()
os.Remove("testdep" + test.ExeSuffix)
os.Exit(r)
}
|
dep
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/ensure_test.go
|
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"errors"
"go/build"
"io/ioutil"
"log"
"strings"
"testing"
"github.com/golang/dep"
"github.com/golang/dep/gps"
"github.com/golang/dep/gps/pkgtree"
"github.com/golang/dep/internal/test"
)
func TestInvalidEnsureFlagCombinations(t *testing.T) {
ec := &ensureCommand{
update: true,
add: true,
}
if err := ec.validateFlags(); err == nil {
t.Error("-add and -update together should fail validation")
}
ec.vendorOnly, ec.add = true, false
if err := ec.validateFlags(); err == nil {
t.Error("-vendor-only with -update should fail validation")
}
ec.add, ec.update = true, false
if err := ec.validateFlags(); err == nil {
t.Error("-vendor-only with -add should fail validation")
}
ec.noVendor, ec.add = true, false
if err := ec.validateFlags(); err == nil {
t.Error("-vendor-only with -no-vendor should fail validation")
}
ec.noVendor = false
// Also verify that the plain ensure path takes no args. This is a shady
// test, as lots of other things COULD return errors, and we don't check
// anything other than the error being non-nil. For now, it works well
// because a panic will quickly result if the initial arg length validation
// checks are incorrectly handled.
if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}); err == nil {
t.Errorf("no args to plain ensure with -vendor-only")
}
ec.vendorOnly = false
if err := ec.runDefault(nil, []string{"foo"}, nil, nil, gps.SolveParameters{}); err == nil {
t.Errorf("no args to plain ensure")
}
}
func TestCheckErrors(t *testing.T) {
tt := []struct {
name string
fatal bool
pkgOrErrMap map[string]pkgtree.PackageOrErr
}{
{
name: "noErrors",
fatal: false,
pkgOrErrMap: map[string]pkgtree.PackageOrErr{
"mypkg": {
P: pkgtree.Package{},
},
},
},
{
name: "hasErrors",
fatal: true,
pkgOrErrMap: map[string]pkgtree.PackageOrErr{
"github.com/me/pkg": {
Err: &build.NoGoError{},
},
"github.com/someone/pkg": {
Err: errors.New("code is busted"),
},
},
},
{
name: "onlyGoErrors",
fatal: false,
pkgOrErrMap: map[string]pkgtree.PackageOrErr{
"github.com/me/pkg": {
Err: &build.NoGoError{},
},
"github.com/someone/pkg": {
P: pkgtree.Package{},
},
},
},
{
name: "onlyBuildErrors",
fatal: false,
pkgOrErrMap: map[string]pkgtree.PackageOrErr{
"github.com/me/pkg": {
Err: &build.NoGoError{},
},
"github.com/someone/pkg": {
P: pkgtree.Package{},
},
},
},
{
name: "allGoErrors",
fatal: true,
pkgOrErrMap: map[string]pkgtree.PackageOrErr{
"github.com/me/pkg": {
Err: &build.NoGoError{},
},
},
},
{
name: "allMixedErrors",
fatal: true,
pkgOrErrMap: map[string]pkgtree.PackageOrErr{
"github.com/me/pkg": {
Err: &build.NoGoError{},
},
"github.com/someone/pkg": {
Err: errors.New("code is busted"),
},
},
},
}
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
fatal, err := checkErrors(tc.pkgOrErrMap, nil)
if tc.fatal != fatal {
t.Fatalf("expected fatal flag to be %T, got %T", tc.fatal, fatal)
}
if err == nil && fatal {
t.Fatal("unexpected fatal flag value while err is nil")
}
})
}
}
func TestValidateUpdateArgs(t *testing.T) {
cases := []struct {
name string
args []string
wantError error
wantWarn []string
lockedProjects []string
}{
{
name: "empty args",
args: []string{},
wantError: nil,
},
{
name: "not project root",
args: []string{"github.com/golang/dep/cmd"},
wantError: errUpdateArgsValidation,
wantWarn: []string{
"github.com/golang/dep/cmd is not a project root, try github.com/golang/dep instead",
},
},
{
name: "not present in lock",
args: []string{"github.com/golang/dep"},
wantError: errUpdateArgsValidation,
wantWarn: []string{
"github.com/golang/dep is not present in Gopkg.lock, cannot -update it",
},
},
{
name: "cannot specify alternate sources",
args: []string{"github.com/golang/dep:github.com/example/dep"},
wantError: errUpdateArgsValidation,
wantWarn: []string{
"cannot specify alternate sources on -update (github.com/example/dep)",
},
lockedProjects: []string{"github.com/golang/dep"},
},
{
name: "version constraint passed",
args: []string{"github.com/golang/dep@master"},
wantError: errUpdateArgsValidation,
wantWarn: []string{
"version constraint master passed for github.com/golang/dep, but -update follows constraints declared in Gopkg.toml, not CLI arguments",
},
lockedProjects: []string{"github.com/golang/dep"},
},
{
name: "flags after spec",
args: []string{"github.com/golang/dep@master", "-v"},
wantError: errUpdateArgsValidation,
wantWarn: []string{
"could not infer project root from dependency path",
},
lockedProjects: []string{"github.com/golang/dep"},
},
}
h := test.NewHelper(t)
defer h.Cleanup()
h.TempDir("src")
pwd := h.Path(".")
stderrOutput := &bytes.Buffer{}
errLogger := log.New(stderrOutput, "", 0)
ctx := &dep.Ctx{
GOPATH: pwd,
Out: log.New(ioutil.Discard, "", 0),
Err: errLogger,
}
sm, err := ctx.SourceManager()
h.Must(err)
defer sm.Release()
p := new(dep.Project)
params := p.MakeParams()
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
// Empty the buffer for every case
stderrOutput.Reset()
// Fill up the locked projects
lockedProjects := make([]gps.LockedProject, 0, len(c.lockedProjects))
for _, lp := range c.lockedProjects {
pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(lp)}
lockedProjects = append(lockedProjects, gps.NewLockedProject(pi, gps.NewVersion("v1.0.0"), []string{}))
}
// Add lock to project
p.Lock = &dep.Lock{P: lockedProjects}
err := validateUpdateArgs(ctx, c.args, p, sm, ¶ms)
if err != c.wantError {
t.Fatalf("Unexpected error while validating update args:\n\t(GOT): %v\n\t(WNT): %v", err, c.wantError)
}
warnings := stderrOutput.String()
for _, warn := range c.wantWarn {
if !strings.Contains(warnings, warn) {
t.Fatalf("Expected validateUpdateArgs errors to contain: %q", warn)
}
}
})
}
}
|
cachedir
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/cachedir/Gopkg.toml
|
[[constraint]]
name = "github.com/sdboyer/deptest"
version = "1.0.0"
|
cachedir
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/cachedir/Gopkg.lock
|
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/sdboyer/deptest"
packages = ["."]
revision = "ff2948a2ac8f538c4ecd55962e919d1e13e74baf"
version = "v1.0.0"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "14b07b05e0f01051b03887ab2bf80b516bc5510ea92f75f76c894b1745d8850c"
solver-name = "gps-cdcl"
solver-version = 1
|
cachedir
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/cachedir/main.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
_ "github.com/sdboyer/deptest"
)
func main() {
}
|
graphviz
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/graphviz/case2.dot
|
digraph {
node [shape=box];
4106060478 [label="project"];
}
|
graphviz
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/graphviz/subgraph3.dot
|
digraph {
node [shape=box];
compound=true;
edge [minlen=2];
1459457741 [label="ProjectA"];
2062426895 [label="ProjectB/pkgX"];
2045649276 [label="ProjectB/pkgY"];
2095982133 [label="ProjectB/pkgZ"];
990902230 [label="ProjectC/pkgX"];
1007679849 [label="ProjectC/pkgY"];
957346992 [label="ProjectC/pkgZ"];
subgraph cluster_0 {
label = "ProjectB";
2062426895 2045649276 2095982133;
}
subgraph cluster_1 {
label = "ProjectC";
990902230 1007679849 957346992;
}
1459457741 -> 990902230;
2045649276 -> 957346992;
2095982133 -> 990902230 [lhead=cluster_1];
2062426895 -> 990902230 [ltail=cluster_0];
2062426895 -> 990902230 [ltail=cluster_0 lhead=cluster_1];
}
|
graphviz
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/graphviz/subgraph4.dot
|
digraph {
node [shape=box];
compound=true;
edge [minlen=2];
2062426895 [label="ProjectB/pkgX"];
2045649276 [label="ProjectB/pkgY"];
2095982133 [label="ProjectB/pkgZ"];
1459457741 [label="ProjectA"];
subgraph cluster_0 {
label = "ProjectB";
2062426895 2045649276 2095982133;
}
2045649276 -> 1459457741;
2062426895 -> 1459457741 [ltail=cluster_0];
}
|
graphviz
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/graphviz/subgraph1.dot
|
digraph {
node [shape=box];
compound=true;
edge [minlen=2];
552838292 [label="ProjectA/pkgX"];
569615911 [label="ProjectA/pkgY"];
2062426895 [label="ProjectB/pkgX"];
2045649276 [label="ProjectB/pkgY"];
990902230 [label="ProjectC/pkgX"];
1007679849 [label="ProjectC/pkgY"];
957346992 [label="ProjectC/pkgZ"];
subgraph cluster_0 {
label = "ProjectA";
552838292 569615911;
}
subgraph cluster_1 {
label = "ProjectB";
2062426895 2045649276;
}
subgraph cluster_2 {
label = "ProjectC";
990902230 1007679849 957346992;
}
552838292 -> 957346992;
569615911 -> 990902230;
2045649276 -> 957346992;
}
|
graphviz
|
/home/linuxreitt/Michinereitt/Tuning/Workshop_Scripts/hf-codegen/data/golang_public_repos/dep/cmd/dep/testdata/graphviz/empty.dot
|
digraph {
node [shape=box];
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.