_id
stringlengths
2
7
title
stringlengths
1
118
partition
stringclasses
3 values
text
stringlengths
52
85.5k
language
stringclasses
1 value
meta_information
dict
q12200
AddFlag
train
func (f VerifyFlag) AddFlag(fset *pflag.FlagSet) (interface{}, error) { switch f.Type { case StringFlag: return fset.String(f.Name, "", f.Description), nil case BoolFlag: return fset.Bool(f.Name, false, f.Description), nil default: return nil, errors.Errorf("unrecognized flag type: %v", f.Type) } }
go
{ "resource": "" }
q12201
ConfigVersion
train
func ConfigVersion(in []byte) (string, error) { var cfgWithVersion ConfigWithVersion if err := yaml.Unmarshal(in, &cfgWithVersion); err != nil { return "", errors.Wrapf(err, "failed to unmarshal YAML") } return cfgWithVersion.Version, nil }
go
{ "resource": "" }
q12202
Move
train
func Move(src, dst string) error { if err := verifyDstPathSafe(dst); err != nil { return errors.Wrapf(err, "cannot move directory to path %s", dst) } if err := os.Rename(src, dst); err != nil { return errors.Wrapf(err, "failed to rename %s to %s", src, dst) } return nil }
go
{ "resource": "" }
q12203
CopyDir
train
func CopyDir(src, dst string) error { if err := verifyDstPathSafe(dst); err != nil { return errors.Wrapf(err, "cannot copy directory to path %s", dst) } srcInfo, err := os.Stat(src) if err != nil { return errors.Wrapf(err, "failed to stat source directory %s", src) } if err := os.Mkdir(dst, srcInfo.Mode()); err != nil { return errors.Wrapf(err, "failed to create destination directory %s", dst) } files, err := ioutil.ReadDir(src) if err != nil { return errors.Wrapf(err, "failed to read directory %s", src) } for _, f := range files { srcPath := path.Join(src, f.Name()) dstPath := path.Join(dst, f.Name()) if f.IsDir() { err = CopyDir(srcPath, dstPath) } else { err = CopyFile(srcPath, dstPath) } if err != nil { return errors.Wrapf(err, "failed to copy %s to %s", srcPath, dstPath) } } return nil }
go
{ "resource": "" }
q12204
SyncDirAdditive
train
func SyncDirAdditive(src, dst string) error { srcInfos, err := ioutil.ReadDir(src) if err != nil { return errors.Wrapf(err, "failed to open %s", src) } for _, srcInfo := range srcInfos { srcPath := path.Join(src, srcInfo.Name()) dstPath := path.Join(dst, srcInfo.Name()) if dstInfo, err := os.Stat(dstPath); os.IsNotExist(err) { // safe to copy if srcInfo.IsDir() { err = CopyDir(srcPath, dstPath) } else { err = CopyFile(srcPath, dstPath) } if err != nil { return errors.Wrapf(err, "failed to copy %s to %s", srcPath, dstPath) } } else if err != nil { return errors.Wrapf(err, "failed to stat %s", dstPath) } else if srcInfo.IsDir() && dstInfo.IsDir() { // if source and destination are both directories, sync recursively if err = SyncDirAdditive(srcPath, dstPath); err != nil { return errors.Wrapf(err, "failed to sync %s to %s", srcPath, dstPath) } } } return nil }
go
{ "resource": "" }
q12205
verifyDstPathSafe
train
func verifyDstPathSafe(dst string) error { if _, err := os.Stat(dst); !os.IsNotExist(err) { return errors.Wrapf(err, "destination path %s already exists", dst) } if _, err := os.Stat(path.Dir(dst)); os.IsNotExist(err) { return errors.Wrapf(err, "parent directory of destination path %s does not exist", dst) } return nil }
go
{ "resource": "" }
q12206
Download
train
func Download(pkgSrc PkgSrc, dstFilePath string, w io.Writer) (rErr error) { wantChecksum := pkgSrc.Checksum() if info, err := os.Stat(dstFilePath); err == nil { if info.IsDir() { return errors.Errorf("destination path %s already exists and is a directory", dstFilePath) } if wantChecksum != "" { // if file already exists at destination and checksum is known, verify checksum of existing file. // If it matches, use existing file. checksum, err := computeSHA256Checksum(dstFilePath) if err != nil { return errors.Wrapf(err, "failed to compute checksum of %s", dstFilePath) } if checksum == wantChecksum { return nil } } } // open reader from source r, size, err := pkgSrc.Reader() if err != nil { return err } defer func() { if err := r.Close(); err != nil && rErr == nil { rErr = errors.Wrapf(err, "failed to close reader for %s in defer", pkgSrc.Path()) } }() // create new file for package (overwrite any existing file) dstFile, err := os.Create(dstFilePath) if err != nil { return errors.Wrapf(err, "failed to create file %s", dstFilePath) } defer func() { if err := dstFile.Close(); err != nil && rErr == nil { rErr = errors.Wrapf(err, "failed to close file %s in defer", dstFilePath) } }() h := sha256.New() mw := io.MultiWriter(h, dstFile) _, _ = fmt.Fprintf(w, "Getting package from %v...\n", pkgSrc.Path()) if err := copyWithProgress(mw, r, size, w); err != nil { return errors.Wrapf(err, "failed to copy package %s to %s", pkgSrc.Path(), dstFilePath) } // verify checksum if provided if wantChecksum != "" { actualChecksum := hex.EncodeToString(h.Sum(nil)) if wantChecksum != actualChecksum { return errors.Errorf("SHA-256 checksum of downloaded package did not match expected checksum: expected %s, was %s", wantChecksum, actualChecksum) } } return nil }
go
{ "resource": "" }
q12207
ListProjectPaths
train
func ListProjectPaths(projectDir string, include, exclude matcher.Matcher) ([]string, error) { wd, err := os.Getwd() if err != nil { return nil, errors.Wrapf(err, "failed to determine working directory") } if !filepath.IsAbs(projectDir) { projectDir = path.Join(wd, projectDir) } relPathPrefix, err := filepath.Rel(wd, projectDir) if err != nil { return nil, errors.Wrapf(err, "failed to determine relative path") } files, err := matcher.ListFiles(projectDir, include, exclude) if err != nil { return nil, err } if relPathPrefix != "" { for i, file := range files { files[i] = path.Join(relPathPrefix, file) } } return files, nil }
go
{ "resource": "" }
q12208
AllPaths
train
func AllPaths(dir string) (map[string]bool, error) { m := make(map[string]bool) return m, allPaths(m, nil, dir) }
go
{ "resource": "" }
q12209
pluginTGZContentHash
train
func pluginTGZContentHash(tgzContentReader io.Reader) (string, error) { hasher := sha256.New() if err := CopySingleFileTGZContent(hasher, tgzContentReader); err != nil { return "", err } return hex.EncodeToString(hasher.Sum(nil)), nil }
go
{ "resource": "" }
q12210
ProjectDir
train
func (g GlobalConfig) ProjectDir() (string, error) { if g.Wrapper == "" { return "", errors.Errorf("wrapper must be specified to determine project directory") } return path.Dir(g.Wrapper), nil }
go
{ "resource": "" }
q12211
getIndex
train
func (h *HyperLogLogPlus) getIndex(k uint32) uint32 { if k&1 == 1 { return eb32(k, 32, 32-h.p) } return eb32(k, pPrime+1, pPrime-h.p+1) }
go
{ "resource": "" }
q12212
mergeSparse
train
func (h *HyperLogLogPlus) mergeSparse() { keys := make(sortableSlice, 0, len(h.tmpSet)) for k := range h.tmpSet { keys = append(keys, k) } sort.Sort(keys) newList := newCompressedList(int(h.m)) for iter, i := h.sparseList.Iter(), 0; iter.HasNext() || i < len(keys); { if !iter.HasNext() { newList.Append(keys[i]) i++ continue } if i >= len(keys) { newList.Append(iter.Next()) continue } x1, x2 := iter.Peek(), keys[i] if x1 == x2 { newList.Append(iter.Next()) i++ } else if x1 > x2 { newList.Append(x2) i++ } else { newList.Append(iter.Next()) } } h.sparseList = newList h.tmpSet = set{} if uint32(h.sparseList.Len()) > h.m { h.toNormal() } }
go
{ "resource": "" }
q12213
NewPlus
train
func NewPlus(precision uint8) (*HyperLogLogPlus, error) { if precision > 18 || precision < 4 { return nil, errors.New("precision must be between 4 and 18") } h := &HyperLogLogPlus{} h.p = precision h.m = 1 << precision h.sparse = true h.tmpSet = set{} h.sparseList = newCompressedList(int(h.m)) return h, nil }
go
{ "resource": "" }
q12214
Clear
train
func (h *HyperLogLogPlus) Clear() { h.sparse = true h.tmpSet = set{} h.sparseList = newCompressedList(int(h.m)) h.reg = nil }
go
{ "resource": "" }
q12215
toNormal
train
func (h *HyperLogLogPlus) toNormal() { h.reg = make([]uint8, h.m) for iter := h.sparseList.Iter(); iter.HasNext(); { i, r := h.decodeHash(iter.Next()) if h.reg[i] < r { h.reg[i] = r } } h.sparse = false h.tmpSet = nil h.sparseList = nil }
go
{ "resource": "" }
q12216
Add
train
func (h *HyperLogLogPlus) Add(item Hash64) { x := item.Sum64() if h.sparse { h.tmpSet.Add(h.encodeHash(x)) h.maybeMerge() } else { i := eb64(x, 64, 64-h.p) // {x63,...,x64-p} w := x<<h.p | 1<<(h.p-1) // {x63-p,...,x0} zeroBits := clz64(w) + 1 if zeroBits > h.reg[i] { h.reg[i] = zeroBits } } }
go
{ "resource": "" }
q12217
Merge
train
func (h *HyperLogLogPlus) Merge(other *HyperLogLogPlus) error { if h.p != other.p { return errors.New("precisions must be equal") } if h.sparse && other.sparse { for k := range other.tmpSet { h.tmpSet.Add(k) } for iter := other.sparseList.Iter(); iter.HasNext(); { h.tmpSet.Add(iter.Next()) } h.maybeMerge() return nil } if h.sparse { h.mergeSparseAndToNormal() } if other.sparse { for k := range other.tmpSet { i, r := other.decodeHash(k) if r > h.reg[i] { h.reg[i] = r } } for iter := other.sparseList.Iter(); iter.HasNext(); { i, r := other.decodeHash(iter.Next()) if r > h.reg[i] { h.reg[i] = r } } } else { for i, v := range other.reg { if v > h.reg[i] { h.reg[i] = v } } } return nil }
go
{ "resource": "" }
q12218
maybeMerge
train
func (h *HyperLogLogPlus) maybeMerge() { if uint32(len(h.tmpSet))*100 > h.m { h.mergeSparse() } }
go
{ "resource": "" }
q12219
estimateBias
train
func (h *HyperLogLogPlus) estimateBias(est float64) float64 { estTable, biasTable := rawEstimateData[h.p-4], biasData[h.p-4] if estTable[0] > est { return biasTable[0] } lastEstimate := estTable[len(estTable)-1] if lastEstimate < est { return biasTable[len(biasTable)-1] } var i int for i = 0; i < len(estTable) && estTable[i] < est; i++ { } e1, b1 := estTable[i-1], biasTable[i-1] e2, b2 := estTable[i], biasTable[i] c := (est - e1) / (e2 - e1) return b1*(1-c) + b2*c }
go
{ "resource": "" }
q12220
GobEncode
train
func (h *HyperLogLogPlus) GobEncode() ([]byte, error) { buf := bytes.Buffer{} enc := gob.NewEncoder(&buf) if err := enc.Encode(h.reg); err != nil { return nil, err } if err := enc.Encode(h.m); err != nil { return nil, err } if err := enc.Encode(h.p); err != nil { return nil, err } if err := enc.Encode(h.sparse); err != nil { return nil, err } if h.sparse { if err := enc.Encode(h.tmpSet); err != nil { return nil, err } if err := enc.Encode(h.sparseList.Count); err != nil { return nil, err } if err := enc.Encode(h.sparseList.b); err != nil { return nil, err } if err := enc.Encode(h.sparseList.last); err != nil { return nil, err } } return buf.Bytes(), nil }
go
{ "resource": "" }
q12221
GobDecode
train
func (h *HyperLogLogPlus) GobDecode(b []byte) error { dec := gob.NewDecoder(bytes.NewBuffer(b)) if err := dec.Decode(&h.reg); err != nil { return err } if err := dec.Decode(&h.m); err != nil { return err } if err := dec.Decode(&h.p); err != nil { return err } if err := dec.Decode(&h.sparse); err != nil { return err } if h.sparse { if err := dec.Decode(&h.tmpSet); err != nil { return err } h.sparseList = newCompressedList(int(h.m)) if err := dec.Decode(&h.sparseList.Count); err != nil { return err } if err := dec.Decode(&h.sparseList.b); err != nil { return err } if err := dec.Decode(&h.sparseList.last); err != nil { return err } } return nil }
go
{ "resource": "" }
q12222
eb32
train
func eb32(bits uint32, hi uint8, lo uint8) uint32 { m := uint32(((1 << (hi - lo)) - 1) << lo) return (bits & m) >> lo }
go
{ "resource": "" }
q12223
eb64
train
func eb64(bits uint64, hi uint8, lo uint8) uint64 { m := uint64(((1 << (hi - lo)) - 1) << lo) return (bits & m) >> lo }
go
{ "resource": "" }
q12224
New
train
func New(precision uint8) (*HyperLogLog, error) { if precision > 16 || precision < 4 { return nil, errors.New("precision must be between 4 and 16") } h := &HyperLogLog{} h.p = precision h.m = 1 << precision h.reg = make([]uint8, h.m) return h, nil }
go
{ "resource": "" }
q12225
Add
train
func (h *HyperLogLog) Add(item Hash32) { x := item.Sum32() i := eb32(x, 32, 32-h.p) // {x31,...,x32-p} w := x<<h.p | 1<<(h.p-1) // {x32-p,...,x0} zeroBits := clz32(w) + 1 if zeroBits > h.reg[i] { h.reg[i] = zeroBits } }
go
{ "resource": "" }
q12226
Merge
train
func (h *HyperLogLog) Merge(other *HyperLogLog) error { if h.p != other.p { return errors.New("precisions must be equal") } for i, v := range other.reg { if v > h.reg[i] { h.reg[i] = v } } return nil }
go
{ "resource": "" }
q12227
GobEncode
train
func (h *HyperLogLog) GobEncode() ([]byte, error) { buf := bytes.Buffer{} enc := gob.NewEncoder(&buf) if err := enc.Encode(h.reg); err != nil { return nil, err } if err := enc.Encode(h.m); err != nil { return nil, err } if err := enc.Encode(h.p); err != nil { return nil, err } return buf.Bytes(), nil }
go
{ "resource": "" }
q12228
GobDecode
train
func (h *HyperLogLog) GobDecode(b []byte) error { dec := gob.NewDecoder(bytes.NewBuffer(b)) if err := dec.Decode(&h.reg); err != nil { return err } if err := dec.Decode(&h.m); err != nil { return err } if err := dec.Decode(&h.p); err != nil { return err } return nil }
go
{ "resource": "" }
q12229
String
train
func (i Hash) String() string { start := uint32(i >> 8) n := uint32(i & 0xff) if start+n > uint32(len(_Hash_text)) { return "" } return _Hash_text[start : start+n] }
go
{ "resource": "" }
q12230
ToHash
train
func ToHash(s []byte) Hash { if len(s) == 0 || len(s) > _Hash_maxLen { return 0 } h := uint32(_Hash_hash0) for i := 0; i < len(s); i++ { h ^= uint32(s[i]) h *= 16777619 } if i := _Hash_table[h&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) { t := _Hash_text[i>>8 : i>>8+i&0xff] for i := 0; i < len(s); i++ { if t[i] != s[i] { goto NEXT } } return i } NEXT: if i := _Hash_table[(h>>16)&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) { t := _Hash_text[i>>8 : i>>8+i&0xff] for i := 0; i < len(s); i++ { if t[i] != s[i] { return 0 } } return i } return 0 }
go
{ "resource": "" }
q12231
NewStreamLexerSize
train
func NewStreamLexerSize(r io.Reader, size int) *StreamLexer { // if reader has the bytes in memory already, use that instead if buffer, ok := r.(interface { Bytes() []byte }); ok { return &StreamLexer{ err: io.EOF, buf: buffer.Bytes(), } } return &StreamLexer{ r: r, buf: make([]byte, 0, size), } }
go
{ "resource": "" }
q12232
Err
train
func (z *StreamLexer) Err() error { if z.err == io.EOF && z.pos < len(z.buf) { return nil } return z.err }
go
{ "resource": "" }
q12233
Shift
train
func (z *StreamLexer) Shift() []byte { if z.pos > len(z.buf) { // make sure we peeked at least as much as we shift z.read(z.pos - 1) } b := z.buf[z.start:z.pos] z.start = z.pos return b }
go
{ "resource": "" }
q12234
ShiftLen
train
func (z *StreamLexer) ShiftLen() int { n := z.start - z.prevStart z.prevStart = z.start return n }
go
{ "resource": "" }
q12235
NewLexer
train
func NewLexer(r io.Reader) *Lexer { var b []byte if r != nil { if buffer, ok := r.(interface { Bytes() []byte }); ok { b = buffer.Bytes() } else { var err error b, err = ioutil.ReadAll(r) if err != nil { return &Lexer{ buf: []byte{0}, err: err, } } } } return NewLexerBytes(b) }
go
{ "resource": "" }
q12236
NewLexerBytes
train
func NewLexerBytes(b []byte) *Lexer { z := &Lexer{ buf: b, } n := len(b) if n == 0 { z.buf = nullBuffer } else if b[n-1] != 0 { // Append NULL to buffer, but try to avoid reallocation if cap(b) > n { // Overwrite next byte but restore when done b = b[:n+1] c := b[n] b[n] = 0 z.buf = b z.restore = func() { b[n] = c } } else { z.buf = append(b, 0) } } return z }
go
{ "resource": "" }
q12237
Restore
train
func (z *Lexer) Restore() { if z.restore != nil { z.restore() z.restore = nil } }
go
{ "resource": "" }
q12238
Peek
train
func (z *Lexer) Peek(pos int) byte { pos += z.pos return z.buf[pos] }
go
{ "resource": "" }
q12239
Shift
train
func (z *Lexer) Shift() []byte { b := z.buf[z.start:z.pos] z.start = z.pos return b }
go
{ "resource": "" }
q12240
Dimension
train
func Dimension(b []byte) (int, int) { num := Number(b) if num == 0 || num == len(b) { return num, 0 } else if b[num] == '%' { return num, 1 } else if b[num] >= 'a' && b[num] <= 'z' || b[num] >= 'A' && b[num] <= 'Z' { i := num + 1 for i < len(b) && (b[i] >= 'a' && b[i] <= 'z' || b[i] >= 'A' && b[i] <= 'Z') { i++ } return num, i - num } return num, 0 }
go
{ "resource": "" }
q12241
Mediatype
train
func Mediatype(b []byte) ([]byte, map[string]string) { i := 0 for i < len(b) && b[i] == ' ' { i++ } b = b[i:] n := len(b) mimetype := b var params map[string]string for i := 3; i < n; i++ { // mimetype is at least three characters long if b[i] == ';' || b[i] == ' ' { mimetype = b[:i] if b[i] == ' ' { i++ for i < n && b[i] == ' ' { i++ } if i < n && b[i] != ';' { break } } params = map[string]string{} s := string(b) PARAM: i++ for i < n && s[i] == ' ' { i++ } start := i for i < n && s[i] != '=' && s[i] != ';' && s[i] != ' ' { i++ } key := s[start:i] for i < n && s[i] == ' ' { i++ } if i < n && s[i] == '=' { i++ for i < n && s[i] == ' ' { i++ } start = i for i < n && s[i] != ';' && s[i] != ' ' { i++ } } else { start = i } params[key] = s[start:i] for i < n && s[i] == ' ' { i++ } if i < n && s[i] == ';' { goto PARAM } break } } return mimetype, params }
go
{ "resource": "" }
q12242
DataURI
train
func DataURI(dataURI []byte) ([]byte, []byte, error) { if len(dataURI) > 5 && bytes.Equal(dataURI[:5], []byte("data:")) { dataURI = dataURI[5:] inBase64 := false var mediatype []byte i := 0 for j := 0; j < len(dataURI); j++ { c := dataURI[j] if c == '=' || c == ';' || c == ',' { if c != '=' && bytes.Equal(TrimWhitespace(dataURI[i:j]), []byte("base64")) { if len(mediatype) > 0 { mediatype = mediatype[:len(mediatype)-1] } inBase64 = true i = j } else if c != ',' { mediatype = append(append(mediatype, TrimWhitespace(dataURI[i:j])...), c) i = j + 1 } else { mediatype = append(mediatype, TrimWhitespace(dataURI[i:j])...) } if c == ',' { if len(mediatype) == 0 || mediatype[0] == ';' { mediatype = []byte("text/plain") } data := dataURI[j+1:] if inBase64 { decoded := make([]byte, base64.StdEncoding.DecodedLen(len(data))) n, err := base64.StdEncoding.Decode(decoded, data) if err != nil { return nil, nil, err } data = decoded[:n] } else if unescaped, err := url.QueryUnescape(string(data)); err == nil { data = []byte(unescaped) } return mediatype, data, nil } } } } return nil, nil, ErrBadDataURI }
go
{ "resource": "" }
q12243
NewParser
train
func NewParser(r io.Reader, isInline bool) *Parser { l := NewLexer(r) p := &Parser{ l: l, state: make([]State, 0, 4), } if isInline { p.state = append(p.state, (*Parser).parseDeclarationList) } else { p.state = append(p.state, (*Parser).parseStylesheet) } return p }
go
{ "resource": "" }
q12244
Err
train
func (p *Parser) Err() error { if p.err != nil { return p.err } return p.l.Err() }
go
{ "resource": "" }
q12245
Write
train
func (w *Writer) Write(b []byte) (int, error) { n := len(b) end := len(w.buf) if end+n > cap(w.buf) { buf := make([]byte, end, 2*cap(w.buf)+n) copy(buf, w.buf) w.buf = buf } w.buf = w.buf[:end+n] return copy(w.buf[end:], b), nil }
go
{ "resource": "" }
q12246
IsIdent
train
func IsIdent(b []byte) bool { l := NewLexer(buffer.NewReader(b)) l.consumeIdentToken() l.r.Restore() return l.r.Pos() == len(b) }
go
{ "resource": "" }
q12247
IsURLUnquoted
train
func IsURLUnquoted(b []byte) bool { l := NewLexer(buffer.NewReader(b)) l.consumeUnquotedURL() l.r.Restore() return l.r.Pos() == len(b) }
go
{ "resource": "" }
q12248
EscapeAttrVal
train
func EscapeAttrVal(buf *[]byte, b []byte) []byte { singles := 0 doubles := 0 for i, c := range b { if c == '&' { if quote, n := parse.QuoteEntity(b[i:]); n > 0 { if quote == '"' { doubles++ } else { singles++ } } } else if c == '"' { doubles++ } else if c == '\'' { singles++ } } n := len(b) + 2 var quote byte var escapedQuote []byte if doubles > singles { n += singles * 4 quote = '\'' escapedQuote = singleQuoteEntityBytes } else { n += doubles * 4 quote = '"' escapedQuote = doubleQuoteEntityBytes } if n > cap(*buf) { *buf = make([]byte, 0, n) // maximum size, not actual size } t := (*buf)[:n] // maximum size, not actual size t[0] = quote j := 1 start := 0 for i, c := range b { if c == '&' { if entityQuote, n := parse.QuoteEntity(b[i:]); n > 0 { j += copy(t[j:], b[start:i]) if entityQuote != quote { t[j] = entityQuote j++ } else { j += copy(t[j:], escapedQuote) } start = i + n } } else if c == quote { j += copy(t[j:], b[start:i]) j += copy(t[j:], escapedQuote) start = i + 1 } } j += copy(t[j:], b[start:]) t[j] = quote return t[:j+1] }
go
{ "resource": "" }
q12249
EscapeCDATAVal
train
func EscapeCDATAVal(buf *[]byte, b []byte) ([]byte, bool) { n := 0 for _, c := range b { if c == '<' || c == '&' { if c == '<' { n += 3 // &lt; } else { n += 4 // &amp; } if n > len("<![CDATA[]]>") { return b, false } } } if len(b)+n > cap(*buf) { *buf = make([]byte, 0, len(b)+n) } t := (*buf)[:len(b)+n] j := 0 start := 0 for i, c := range b { if c == '<' { j += copy(t[j:], b[start:i]) j += copy(t[j:], ltEntityBytes) start = i + 1 } else if c == '&' { j += copy(t[j:], b[start:i]) j += copy(t[j:], ampEntityBytes) start = i + 1 } } j += copy(t[j:], b[start:]) return t[:j], true }
go
{ "resource": "" }
q12250
Copy
train
func Copy(src []byte) (dst []byte) { dst = make([]byte, len(src)) copy(dst, src) return }
go
{ "resource": "" }
q12251
ToLower
train
func ToLower(src []byte) []byte { for i, c := range src { if c >= 'A' && c <= 'Z' { src[i] = c + ('a' - 'A') } } return src }
go
{ "resource": "" }
q12252
IsAllWhitespace
train
func IsAllWhitespace(b []byte) bool { for _, c := range b { if !IsWhitespace(c) { return false } } return true }
go
{ "resource": "" }
q12253
TrimWhitespace
train
func TrimWhitespace(b []byte) []byte { n := len(b) start := n for i := 0; i < n; i++ { if !IsWhitespace(b[i]) { start = i break } } end := n for i := n - 1; i >= start; i-- { if !IsWhitespace(b[i]) { end = i + 1 break } } return b[start:end] }
go
{ "resource": "" }
q12254
AppendPrice
train
func AppendPrice(b []byte, price int64, dec bool, milSeparator byte, decSeparator byte) []byte { if price < 0 { if price == -9223372036854775808 { x := []byte("92 233 720 368 547 758 08") x[2] = milSeparator x[6] = milSeparator x[10] = milSeparator x[14] = milSeparator x[18] = milSeparator x[22] = decSeparator return append(b, x...) } price = -price } // rounding if !dec { firstDec := (price / 10) % 10 if firstDec >= 5 { price += 100 } } // calculate size n := LenInt(price) - 2 if n > 0 { n += (n - 1) / 3 // mil separator } else { n = 1 } if dec { n += 2 + 1 // decimals + dec separator } // resize byte slice i := len(b) if i+n > cap(b) { b = append(b, make([]byte, n)...) } else { b = b[:i+n] } // print fractional-part i += n - 1 if dec { for j := 0; j < 2; j++ { c := byte(price%10) + '0' price /= 10 b[i] = c i-- } b[i] = decSeparator i-- } else { price /= 100 } if price == 0 { b[i] = '0' return b } // print integer-part j := 0 for price > 0 { if j == 3 { b[i] = milSeparator i-- j = 0 } c := byte(price%10) + '0' price /= 10 b[i] = c i-- j++ } return b }
go
{ "resource": "" }
q12255
Err
train
func (l *Lexer) Err() error { if l.err != nil { return l.err } return l.r.Err() }
go
{ "resource": "" }
q12256
shiftXml
train
func (l *Lexer) shiftXml(rawTag Hash) []byte { inQuote := false for { c := l.r.Peek(0) if c == '"' { inQuote = !inQuote l.r.Move(1) } else if c == '<' && !inQuote && l.r.Peek(1) == '/' { mark := l.r.Pos() l.r.Move(2) for { if c = l.r.Peek(0); !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') { break } l.r.Move(1) } if h := ToHash(parse.ToLower(parse.Copy(l.r.Lexeme()[mark+2:]))); h == rawTag { // copy so that ToLower doesn't change the case of the underlying slice break } } else if c == 0 { if l.r.Err() == nil { l.err = parse.NewErrorLexer("unexpected null character", l.r) } return l.r.Shift() } else { l.r.Move(1) } } for { c := l.r.Peek(0) if c == '>' { l.r.Move(1) break } else if c == 0 { if l.r.Err() == nil { l.err = parse.NewErrorLexer("unexpected null character", l.r) } return l.r.Shift() } l.r.Move(1) } return l.r.Shift() }
go
{ "resource": "" }
q12257
Position
train
func Position(r io.Reader, offset int) (line, col int, context string) { l := buffer.NewLexer(r) line = 1 for { c := l.Peek(0) if c == 0 && l.Err() != nil || offset == l.Pos() { col = l.Pos() + 1 context = positionContext(l, line, col) return } nNewline := 0 if c == '\n' { nNewline = 1 } else if c == '\r' { if l.Peek(1) == '\n' { nNewline = 2 } else { nNewline = 1 } } else if c >= 0xC0 { if r, n := l.PeekRune(0); r == '\u2028' || r == '\u2029' { nNewline = n } } else { l.Move(1) } if nNewline > 0 { if offset < l.Pos()+nNewline { // move onto offset position, let next iteration handle it l.Move(offset - l.Pos()) continue } l.Move(nNewline) line++ offset -= l.Pos() l.Skip() } } }
go
{ "resource": "" }
q12258
Read
train
func (r *Reader) Read(b []byte) (n int, err error) { if len(b) == 0 { return 0, nil } if r.pos >= len(r.buf) { return 0, io.EOF } n = copy(b, r.buf[r.pos:]) r.pos += n return }
go
{ "resource": "" }
q12259
ParseInt
train
func ParseInt(b []byte) (int64, int) { i := 0 neg := false if len(b) > 0 && (b[0] == '+' || b[0] == '-') { neg = b[0] == '-' i++ } n := uint64(0) for i < len(b) { c := b[i] if n > math.MaxUint64/10 { return 0, 0 } else if c >= '0' && c <= '9' { n *= 10 n += uint64(c - '0') } else { break } i++ } if !neg && n > uint64(math.MaxInt64) || n > uint64(math.MaxInt64)+1 { return 0, 0 } else if neg { return -int64(n), i } return int64(n), i }
go
{ "resource": "" }
q12260
ParseFloat
train
func ParseFloat(b []byte) (float64, int) { i := 0 neg := false if i < len(b) && (b[i] == '+' || b[i] == '-') { neg = b[i] == '-' i++ } dot := -1 trunk := -1 n := uint64(0) for ; i < len(b); i++ { c := b[i] if c >= '0' && c <= '9' { if trunk == -1 { if n > math.MaxUint64/10 { trunk = i } else { n *= 10 n += uint64(c - '0') } } } else if dot == -1 && c == '.' { dot = i } else { break } } f := float64(n) if neg { f = -f } mantExp := int64(0) if dot != -1 { if trunk == -1 { trunk = i } mantExp = int64(trunk - dot - 1) } else if trunk != -1 { mantExp = int64(trunk - i) } expExp := int64(0) if i < len(b) && (b[i] == 'e' || b[i] == 'E') { i++ if e, expLen := ParseInt(b[i:]); expLen > 0 { expExp = e i += expLen } } exp := expExp - mantExp // copied from strconv/atof.go if exp == 0 { return f, i } else if exp > 0 && exp <= 15+22 { // int * 10^k // If exponent is big but number of digits is not, // can move a few zeros into the integer part. if exp > 22 { f *= float64pow10[exp-22] exp = 22 } if f <= 1e15 && f >= -1e15 { return f * float64pow10[exp], i } } else if exp < 0 && exp >= -22 { // int / 10^k return f / float64pow10[-exp], i } f *= math.Pow10(int(-mantExp)) return f * math.Pow10(int(expExp)), i }
go
{ "resource": "" }
q12261
NewError
train
func NewError(msg string, r io.Reader, offset int) *Error { return &Error{ Message: msg, r: r, Offset: offset, } }
go
{ "resource": "" }
q12262
NewErrorLexer
train
func NewErrorLexer(msg string, l *buffer.Lexer) *Error { r := buffer.NewReader(l.Bytes()) offset := l.Offset() return NewError(msg, r, offset) }
go
{ "resource": "" }
q12263
Position
train
func (e *Error) Position() (int, int, string) { if e.line == 0 { e.line, e.column, e.context = Position(e.r, e.Offset) } return e.line, e.column, e.context }
go
{ "resource": "" }
q12264
Error
train
func (e *Error) Error() string { line, column, context := e.Position() return fmt.Sprintf("parse error:%d:%d: %s\n%s", line, column, e.Message, context) }
go
{ "resource": "" }
q12265
consumeBracket
train
func (l *Lexer) consumeBracket() TokenType { switch l.r.Peek(0) { case '(': l.r.Move(1) return LeftParenthesisToken case ')': l.r.Move(1) return RightParenthesisToken case '[': l.r.Move(1) return LeftBracketToken case ']': l.r.Move(1) return RightBracketToken case '{': l.r.Move(1) return LeftBraceToken case '}': l.r.Move(1) return RightBraceToken } return ErrorToken }
go
{ "resource": "" }
q12266
consumeNumeric
train
func (l *Lexer) consumeNumeric() TokenType { if l.consumeNumberToken() { if l.consumeByte('%') { return PercentageToken } else if l.consumeIdentToken() { return DimensionToken } return NumberToken } return ErrorToken }
go
{ "resource": "" }
q12267
consumeString
train
func (l *Lexer) consumeString() TokenType { // assume to be on " or ' delim := l.r.Peek(0) l.r.Move(1) for { c := l.r.Peek(0) if c == 0 && l.r.Err() != nil { break } else if c == '\n' || c == '\r' || c == '\f' { l.r.Move(1) return BadStringToken } else if c == delim { l.r.Move(1) break } else if c == '\\' { if !l.consumeEscape() { l.r.Move(1) l.consumeNewline() } } else { l.r.Move(1) } } return StringToken }
go
{ "resource": "" }
q12268
consumeRemnantsBadURL
train
func (l *Lexer) consumeRemnantsBadURL() { for { if l.consumeByte(')') || l.r.Err() != nil { break } else if !l.consumeEscape() { l.r.Move(1) } } }
go
{ "resource": "" }
q12269
consumeIdentlike
train
func (l *Lexer) consumeIdentlike() TokenType { if l.consumeIdentToken() { if l.r.Peek(0) != '(' { return IdentToken } else if !parse.EqualFold(bytes.Replace(l.r.Lexeme(), []byte{'\\'}, nil, -1), []byte{'u', 'r', 'l'}) { l.r.Move(1) return FunctionToken } l.r.Move(1) // consume url for l.consumeWhitespace() { } if c := l.r.Peek(0); c == '"' || c == '\'' { if l.consumeString() == BadStringToken { l.consumeRemnantsBadURL() return BadURLToken } } else if !l.consumeUnquotedURL() && !l.consumeWhitespace() { // if unquoted URL fails due to encountering whitespace, continue l.consumeRemnantsBadURL() return BadURLToken } for l.consumeWhitespace() { } if !l.consumeByte(')') && l.r.Err() != io.EOF { l.consumeRemnantsBadURL() return BadURLToken } return URLToken } return ErrorToken }
go
{ "resource": "" }
q12270
String
train
func (state State) String() string { switch state { case ValueState: return "Value" case ObjectKeyState: return "ObjectKey" case ObjectValueState: return "ObjectValue" case ArrayState: return "Array" } return "Invalid(" + strconv.Itoa(int(state)) + ")" }
go
{ "resource": "" }
q12271
NewParser
train
func NewParser(r io.Reader) *Parser { return &Parser{ r: buffer.NewLexer(r), state: []State{ValueState}, } }
go
{ "resource": "" }
q12272
Err
train
func (p *Parser) Err() error { if p.err != nil { return p.err } return p.r.Err() }
go
{ "resource": "" }
q12273
EscapeAttrVal
train
func EscapeAttrVal(buf *[]byte, orig, b []byte, isXML bool) []byte { singles := 0 doubles := 0 unquoted := true entities := false for i, c := range b { if charTable[c] { if c == '&' { entities = true if quote, n := parse.QuoteEntity(b[i:]); n > 0 { if quote == '"' { unquoted = false doubles++ } else { unquoted = false singles++ } } } else { unquoted = false if c == '"' { doubles++ } else if c == '\'' { singles++ } } } } if unquoted && !isXML { return b } else if !entities && len(orig) == len(b)+2 && (singles == 0 && orig[0] == '\'' || doubles == 0 && orig[0] == '"') { return orig } n := len(b) + 2 var quote byte var escapedQuote []byte if singles >= doubles || isXML { n += doubles * 4 quote = '"' escapedQuote = doubleQuoteEntityBytes } else { n += singles * 4 quote = '\'' escapedQuote = singleQuoteEntityBytes } if n > cap(*buf) { *buf = make([]byte, 0, n) // maximum size, not actual size } t := (*buf)[:n] // maximum size, not actual size t[0] = quote j := 1 start := 0 for i, c := range b { if c == '&' { if entityQuote, n := parse.QuoteEntity(b[i:]); n > 0 { j += copy(t[j:], b[start:i]) if entityQuote != quote { t[j] = entityQuote j++ } else { j += copy(t[j:], escapedQuote) } start = i + n } } else if c == quote { j += copy(t[j:], b[start:i]) j += copy(t[j:], escapedQuote) start = i + 1 } } j += copy(t[j:], b[start:]) t[j] = quote return t[:j+1] }
go
{ "resource": "" }
q12274
NewKeymap
train
func NewKeymap(config map[string]string, actions map[string][]string) Keymap { return Keymap{ Config: config, Action: actions, seq: keyseq.New(), } }
go
{ "resource": "" }
q12275
LookupAction
train
func (km Keymap) LookupAction(ev termbox.Event) Action { modifier := keyseq.ModNone if (ev.Mod & termbox.ModAlt) != 0 { modifier = keyseq.ModAlt } key := keyseq.Key{ Modifier: modifier, Key: ev.Key, Ch: ev.Ch, } action, err := km.seq.AcceptKey(key) switch err { case nil: // Found an action! if pdebug.Enabled { pdebug.Printf("Keymap.Handler: Fetched action") } return wrapClearSequence(action.(Action)) case keyseq.ErrInSequence: if pdebug.Enabled { pdebug.Printf("Keymap.Handler: Waiting for more commands...") } return wrapRememberSequence(ActionFunc(doNothing)) default: if pdebug.Enabled { pdebug.Printf("Keymap.Handler: Defaulting to doAcceptChar") } return wrapClearSequence(ActionFunc(doAcceptChar)) } }
go
{ "resource": "" }
q12276
ApplyKeybinding
train
func (km *Keymap) ApplyKeybinding() error { k := km.seq k.Clear() // Copy the map kb := map[string]Action{} for s, a := range defaultKeyBinding { kb[s] = a } // munge the map using config for s, as := range km.Config { if as == "-" { delete(kb, s) continue } v, err := km.resolveActionName(as, 0) if err != nil { return errors.Wrapf(err, "failed to resolve action name %s", as) } kb[s] = v } // now compile using kb // there's no need to do this, but we sort keys here just to make // debugging easier keys := make([]string, 0, len(kb)) for s := range kb { keys = append(keys, s) } sort.Strings(keys) for _, s := range keys { a := kb[s] list, err := keyseq.ToKeyList(s) if err != nil { return errors.Wrapf(err, "urnknown key %s: %s", s, err) } k.Add(list, a) } return errors.Wrap(k.Compile(), "failed to compile key binding patterns") }
go
{ "resource": "" }
q12277
LineAt
train
func (flb FilteredBuffer) LineAt(i int) (line.Line, error) { if i >= len(flb.selection) { return nil, errors.Errorf("specified index %d is out of range", len(flb.selection)) } return flb.src.LineAt(flb.selection[i]) }
go
{ "resource": "" }
q12278
EventToString
train
func EventToString(ev termbox.Event) (string, error) { s := "" if ev.Key == 0 { s = string([]rune{ev.Ch}) } else { var ok bool s, ok = keyToString[ev.Key] if !ok { return "", errors.Errorf("no such key %#v", ev) } // Special case for ArrowUp/Down/Left/Right switch s { case "ArrowUp": s = "^" case "ArrowDown": s = "v" case "ArrowLeft": s = "<" case "ArrowRight": s = ">" } } if ev.Mod&termbox.ModAlt == 1 { return "M-" + s, nil } return s, nil }
go
{ "resource": "" }
q12279
NewExternalCmd
train
func NewExternalCmd(name string, cmd string, args []string, threshold int, idgen line.IDGenerator, enableSep bool) *ExternalCmd { if len(args) == 0 { args = []string{"$QUERY"} } if threshold <= 0 { threshold = DefaultCustomFilterBufferThreshold } return &ExternalCmd{ args: args, cmd: cmd, enableSep: enableSep, idgen: idgen, name: name, outCh: pipeline.ChanOutput(make(chan interface{})), thresholdBufsiz: threshold, } }
go
{ "resource": "" }
q12280
New
train
func New(bufsiz int) *Hub { return &Hub{ isSync: false, queryCh: make(chan Payload, bufsiz), drawCh: make(chan Payload, bufsiz), statusMsgCh: make(chan Payload, bufsiz), pagingCh: make(chan Payload, bufsiz), } }
go
{ "resource": "" }
q12281
send
train
func send(ch chan Payload, r *payload, needReply bool) { if needReply { r.done = make(chan struct{}) defer func() { <-r.done }() } ch <- r }
go
{ "resource": "" }
q12282
SendQuery
train
func (h *Hub) SendQuery(q string) { send(h.QueryCh(), NewPayload(q), h.isSync) }
go
{ "resource": "" }
q12283
SendDraw
train
func (h *Hub) SendDraw(options interface{}) { pdebug.Printf("START Hub.SendDraw %v", options) defer pdebug.Printf("END Hub.SendDraw %v", options) send(h.DrawCh(), NewPayload(options), h.isSync) }
go
{ "resource": "" }
q12284
SendStatusMsgAndClear
train
func (h *Hub) SendStatusMsgAndClear(q string, clearDelay time.Duration) { msg := newStatusMsgReq(q, clearDelay) send(h.StatusMsgCh(), NewPayload(msg), h.isSync) }
go
{ "resource": "" }
q12285
SendPaging
train
func (h *Hub) SendPaging(x interface{}) { send(h.PagingCh(), NewPayload(x), h.isSync) }
go
{ "resource": "" }
q12286
Flush
train
func (t *Termbox) Flush() error { t.mutex.Lock() defer t.mutex.Unlock() return errors.Wrap(termbox.Flush(), "failed to flush termbox") }
go
{ "resource": "" }
q12287
PollEvent
train
func (t *Termbox) PollEvent(ctx context.Context) chan termbox.Event { // XXX termbox.PollEvent() can get stuck on unexpected signal // handling cases. We still would like to wait until the user // (termbox) has some event for us to process, but we don't // want to allow termbox to control/block our input loop. // // Solution: put termbox polling in a separate goroutine, // and we just watch for a channel. The loop can now // safely be implemented in terms of select {} which is // safe from being stuck. evCh := make(chan termbox.Event) go func() { // keep listening to suspend requests here for { select { case <-ctx.Done(): return case <-t.suspendCh: if pdebug.Enabled { pdebug.Printf("poll event suspended!") } t.Close() } } }() go func() { defer func() { recover() }() defer func() { close(evCh) }() for { ev := termbox.PollEvent() if ev.Type != termbox.EventInterrupt { evCh <- ev continue } select { case <-ctx.Done(): return case replyCh := <-t.resumeCh: t.Init() close(replyCh) } } }() return evCh }
go
{ "resource": "" }
q12288
SetCell
train
func (t *Termbox) SetCell(x, y int, ch rune, fg, bg termbox.Attribute) { t.mutex.Lock() defer t.mutex.Unlock() termbox.SetCell(x, y, ch, fg, bg) }
go
{ "resource": "" }
q12289
Size
train
func (t *Termbox) Size() (int, int) { t.mutex.Lock() defer t.mutex.Unlock() return termbox.Size() }
go
{ "resource": "" }
q12290
Runes
train
func (q *Query) Runes() <-chan rune { q.mutex.Lock() defer q.mutex.Unlock() c := make(chan rune, len(q.query)) go func() { defer close(c) q.mutex.Lock() defer q.mutex.Unlock() for _, r := range q.query { c <- r } }() return c }
go
{ "resource": "" }
q12291
Execute
train
func (a ActionFunc) Execute(ctx context.Context, state *Peco, e termbox.Event) { a(ctx, state, e) }
go
{ "resource": "" }
q12292
Register
train
func (a ActionFunc) Register(name string, defaultKeys ...termbox.Key) { nameToActions["peco."+name] = a for _, k := range defaultKeys { a.registerKeySequence(keyseq.KeyList{keyseq.NewKeyFromKey(k)}) } }
go
{ "resource": "" }
q12293
RegisterKeySequence
train
func (a ActionFunc) RegisterKeySequence(name string, k keyseq.KeyList) { nameToActions["peco."+name] = a a.registerKeySequence(k) }
go
{ "resource": "" }
q12294
doAcceptChar
train
func doAcceptChar(ctx context.Context, state *Peco, e termbox.Event) { if e.Key == termbox.KeySpace { e.Ch = ' ' } ch := e.Ch if ch <= 0 { return } if state.SingleKeyJumpMode() { doSingleKeyJump(ctx, state, e) return } q := state.Query() c := state.Caret() q.InsertAt(ch, c.Pos()) c.Move(1) h := state.Hub() h.SendDrawPrompt() // Update prompt before running query state.ExecQuery() }
go
{ "resource": "" }
q12295
IsEndMark
train
func IsEndMark(err error) bool { if em, ok := errors.Cause(err).(EndMarker); ok { return em.EndMark() } return false }
go
{ "resource": "" }
q12296
Send
train
func (oc ChanOutput) Send(v interface{}) (err error) { if oc == nil { return errors.New("nil channel") } // We allow ourselves a timeout of 1 second. t := time.NewTimer(time.Second) defer t.Stop() select { case oc <- v: case <-t.C: return errors.New("failed to send (not listening)") } return nil }
go
{ "resource": "" }
q12297
SendEndMark
train
func (oc ChanOutput) SendEndMark(s string) error { return errors.Wrap(oc.Send(errors.Wrap(EndMark{}, s)), "failed to send end mark") }
go
{ "resource": "" }
q12298
SetSource
train
func (p *Pipeline) SetSource(s Source) { p.mutex.Lock() defer p.mutex.Unlock() p.src = s }
go
{ "resource": "" }
q12299
Add
train
func (p *Pipeline) Add(n Acceptor) { p.mutex.Lock() defer p.mutex.Unlock() p.nodes = append(p.nodes, n) }
go
{ "resource": "" }