_id
stringlengths
2
7
title
stringlengths
1
118
partition
stringclasses
3 values
text
stringlengths
52
85.5k
language
stringclasses
1 value
meta_information
dict
q180600
printPlainResults
test
func printPlainResults(results Results) error { for _, res := range results { // Explicitely start with the string and error output fmt.Printf("Source = %s\n", res.origString) fmt.Printf(" String = %s\n", res.String) if res.Error != "" { fmt.Printf(" Error = %s\n", res.Error) continue } // Dynamically loop over the rest of the fields typ := reflect.TypeOf(*res) val := reflect.ValueOf(*res) for i := 0; i < typ.NumField(); i++ { field := typ.Field(i) if field.Name == "Error" || field.Name == "String" { continue } if field.PkgPath != "" { // ignore unexported fields continue } fmt.Printf(" %s = %v\n", field.Name, val.Field(i).Interface()) } fmt.Print("\n") } return nil }
go
{ "resource": "" }
q180601
printJsonResults
test
func printJsonResults(results Results) error { data, err := json.MarshalIndent(results, "", " ") if err != nil { return fmt.Errorf("Failed to convert results to JSON: %s", err.Error()) } if _, err = io.Copy(os.Stdout, bytes.NewReader(data)); err != nil { return fmt.Errorf("Failed to write json output: %s", err.Error()) } fmt.Print("\n") return nil }
go
{ "resource": "" }
q180602
NewXor64Source
test
func NewXor64Source(seed int64) *Xor64Source { var s Xor64Source s.Seed(seed) return &s }
go
{ "resource": "" }
q180603
xor64
test
func xor64(x uint64) uint64 { x ^= x << 13 x ^= x >> 7 x ^= x << 17 return x }
go
{ "resource": "" }
q180604
next
test
func (s *Xor64Source) next() uint64 { x := xor64(uint64(*s)) *s = Xor64Source(x) return x }
go
{ "resource": "" }
q180605
Seed
test
func (s *Xor64Source) Seed(seed int64) { if seed == 0 { seed = seed0 } *s = Xor64Source(seed) }
go
{ "resource": "" }
q180606
NewFrameSet
test
func NewFrameSet(frange string) (*FrameSet, error) { // Process the frame range and get a slice of match slices matches, err := frameRangeMatches(frange) if err != nil { return nil, err } frameSet := &FrameSet{frange, &ranges.InclusiveRanges{}} // Process each slice match and add it to the frame set for _, match := range matches { if err = frameSet.handleMatch(match); err != nil { return nil, err } } return frameSet, nil }
go
{ "resource": "" }
q180607
handleMatch
test
func (s *FrameSet) handleMatch(match []string) error { switch len(match) { // Single frame match case 1: f, err := parseInt(match[0]) if err != nil { return err } s.rangePtr.AppendUnique(f, f, 1) // Simple frame range case 2: start, err := parseInt(match[0]) if err != nil { return err } end, err := parseInt(match[1]) if err != nil { return err } // Handle descending frame ranges, like 10-1 var inc int if start > end { inc = -1 } else { inc = 1 } s.rangePtr.AppendUnique(start, end, inc) // Complex frame range case 4: var ( err error mod string start, end, chunk int ) chunk, err = parseInt(match[3]) if err != nil { return err } if chunk == 0 { return fmt.Errorf("Failed to parse part of range %v. "+ "Encountered invalid 0 value", match[3]) } if start, err = parseInt(match[0]); err != nil { return err } if end, err = parseInt(match[1]); err != nil { return err } if mod = match[2]; !isModifier(mod) { return fmt.Errorf("%q is not one of the valid modifier 'xy:'", mod) } switch mod { case `x`: s.rangePtr.AppendUnique(start, end, chunk) case `y`: // TODO: Add proper support for adding inverse of range. // This approach will add excessive amounts of singe // range elements. They could be compressed into chunks skip := start aRange := ranges.NewInclusiveRange(start, end, 1) var val int for it := aRange.IterValues(); !it.IsDone(); { val = it.Next() if val == skip { skip += chunk continue } s.rangePtr.AppendUnique(val, val, 1) } case `:`: for ; chunk > 0; chunk-- { s.rangePtr.AppendUnique(start, end, chunk) } } default: return fmt.Errorf("Unexpected match []string size: %v", match) } return nil }
go
{ "resource": "" }
q180608
Index
test
func (s *FrameSet) Index(frame int) int { return s.rangePtr.Index(frame) }
go
{ "resource": "" }
q180609
Frame
test
func (s *FrameSet) Frame(index int) (int, error) { return s.rangePtr.Value(index) }
go
{ "resource": "" }
q180610
HasFrame
test
func (s *FrameSet) HasFrame(frame int) bool { return s.rangePtr.Contains(frame) }
go
{ "resource": "" }
q180611
FrameRangePadded
test
func (s *FrameSet) FrameRangePadded(pad int) string { return PadFrameRange(s.frange, pad) }
go
{ "resource": "" }
q180612
Normalize
test
func (s *FrameSet) Normalize() *FrameSet { ptr := s.rangePtr.Normalized() return &FrameSet{ptr.String(), ptr} }
go
{ "resource": "" }
q180613
FramesToFrameRange
test
func FramesToFrameRange(frames []int, sorted bool, zfill int) string { count := len(frames) if count == 0 { return "" } if count == 1 { return zfillInt(frames[0], zfill) } if sorted { sort.Ints(frames) } var i, frame, step int var start, end string var buf strings.Builder // Keep looping until all frames are consumed for len(frames) > 0 { count = len(frames) // If we get to the last element, just write it // and end if count <= 2 { for _, frame = range frames { if buf.Len() > 0 { buf.WriteString(",") } buf.WriteString(zfillInt(frame, zfill)) } break } // At this point, we have 3 or more frames to check. // Scan the current window of the slice to see how // many frames we can consume into a group step = frames[1] - frames[0] for i = 0; i < len(frames)-1; i++ { // We have scanned as many frames as we can // for this group. Now write them and stop // looping on this window if (frames[i+1] - frames[i]) != step { break } } // Subsequent groups are comma-separated if buf.Len() > 0 { buf.WriteString(",") } // We only have a single frame to write for this group if i == 0 { buf.WriteString(zfillInt(frames[0], zfill)) frames = frames[1:] continue } // First do a check to see if we could have gotten a larger range // out of subsequent values with a different step size if i == 1 && count > 3 { // Check if the next two pairwise frames have the same step. // If so, then it is better than our current grouping. if (frames[2] - frames[1]) == (frames[3] - frames[2]) { // Just consume the first frame, and allow the next // loop to scan the new stepping buf.WriteString(zfillInt(frames[0], zfill)) frames = frames[1:] continue } } // Otherwise write out this step range start = zfillInt(frames[0], zfill) end = zfillInt(frames[i], zfill) buf.WriteString(fmt.Sprintf("%s-%s", start, end)) if step > 1 { buf.WriteString(fmt.Sprintf("x%d", step)) } frames = frames[i+1:] } return buf.String() }
go
{ "resource": "" }
q180614
frameRangeMatches
test
func frameRangeMatches(frange string) ([][]string, error) { for _, k := range defaultPadding.AllChars() { frange = strings.Replace(frange, k, "", -1) } var ( matched bool match []string rx *regexp.Regexp ) frange = strings.Replace(frange, " ", "", -1) // For each comma-sep component, we will parse a frame range parts := strings.Split(frange, ",") size := len(parts) matches := make([][]string, size, size) for i, part := range parts { matched = false // Build up frames for all comma-sep components for _, rx = range rangePatterns { if match = rx.FindStringSubmatch(part); match == nil { continue } matched = true matches[i] = match[1:] } // If any component of the comma-sep frame range fails to // parse, we bail out if !matched { err := fmt.Errorf("Failed to parse frame range: %s on part %q", frange, part) return nil, err } } return matches, nil }
go
{ "resource": "" }
q180615
toRange
test
func toRange(start, end, step int) []int { nums := []int{} if step < 1 { step = 1 } if start <= end { for i := start; i <= end; { nums = append(nums, i) i += step } } else { for i := start; i >= end; { nums = append(nums, i) i -= step } } return nums }
go
{ "resource": "" }
q180616
NewWorkManager
test
func NewWorkManager() *workManager { var fileopts []fileseq.FileOption if Options.AllFiles { fileopts = append(fileopts, fileseq.HiddenFiles) } if !Options.SeqsOnly { fileopts = append(fileopts, fileseq.SingleFiles) } s := &workManager{ inDirs: make(chan string), inSeqs: make(chan *fileseq.FileSequence), outSeqs: make(chan fileseq.FileSequences), fileOpts: fileopts, } return s }
go
{ "resource": "" }
q180617
processSources
test
func (w *workManager) processSources() { var ( ok bool path string seq *fileseq.FileSequence ) fileopts := w.fileOpts inDirs := w.inDirs inSeqs := w.inSeqs outSeqs := w.outSeqs isDone := func() bool { return (inDirs == nil && inSeqs == nil) } for !isDone() { select { // Directory paths will be scanned for contents case path, ok = <-inDirs: if !ok { inDirs = nil continue } seqs, err := fileseq.FindSequencesOnDisk(path, fileopts...) if err != nil { fmt.Fprintf(errOut, "%s %q: %s\n", ErrorPath, path, err) continue } outSeqs <- seqs // Sequence paths will be scanned for a direct match // against the sequence pattern case seq, ok = <-inSeqs: if !ok { inSeqs = nil continue } path, err := seq.Format("{{dir}}{{base}}{{pad}}{{ext}}") if err != nil { fmt.Fprintf(errOut, "%s %q: Not a valid path\n", ErrorPattern, path) continue } seq, err := fileseq.FindSequenceOnDisk(path) if err != nil { if !os.IsNotExist(err) { fmt.Fprintf(errOut, "%s %q: %s\n", ErrorPattern, path, err) } continue } if seq != nil { outSeqs <- fileseq.FileSequences{seq} } } } }
go
{ "resource": "" }
q180618
isInputDone
test
func (w *workManager) isInputDone() bool { if w.inDirs != nil { return false } if w.inSeqs != nil { return false } return true }
go
{ "resource": "" }
q180619
closeInputs
test
func (w *workManager) closeInputs() { if w.inDirs != nil { close(w.inDirs) } if w.inSeqs != nil { close(w.inSeqs) } }
go
{ "resource": "" }
q180620
load
test
func (w *workManager) load(paths []string) { dirs, seqs := preparePaths(paths) for _, s := range seqs { w.inSeqs <- s } for _, r := range dirs { w.inDirs <- r } }
go
{ "resource": "" }
q180621
loadRecursive
test
func (w *workManager) loadRecursive(paths []string) { walkFn := func(path string, info os.FileInfo, err error) error { if err != nil { return nil } var isDir bool if info.IsDir() { isDir = true } else if info, err = os.Stat(path); err == nil && info.IsDir() { isDir = true } if isDir { if !Options.AllFiles { // Skip tranversing into hidden dirs if len(info.Name()) > 1 && strings.HasPrefix(info.Name(), ".") { return walk.SkipDir } } // Add the path to the input channel for sequence scanning w.inDirs <- path } return nil } dirs, seqs := preparePaths(paths) for _, s := range seqs { w.inSeqs <- s } for _, r := range dirs { r := r if err := walk.Walk(r, walkFn); err != nil { if err != walk.SkipDir { fmt.Fprintf(errOut, "%s %q: %s\n", ErrorPath, r, err) } } } }
go
{ "resource": "" }
q180622
preparePaths
test
func preparePaths(paths []string) ([]string, fileseq.FileSequences) { var ( fi os.FileInfo err error ) dirs := make([]string, 0) seqs := make(fileseq.FileSequences, 0) previous := make(map[string]struct{}) for _, p := range paths { p := strings.TrimSpace(filepath.Clean(p)) if p == "" { continue } if _, seen := previous[p]; seen { continue } previous[p] = struct{}{} if fi, err = os.Stat(p); err != nil { // If the path doesn't exist, test it for // a valid fileseq pattern if seq, err := fileseq.NewFileSequence(p); err == nil { seqs = append(seqs, seq) continue } fmt.Fprintf(errOut, "%s %q: %s\n", ErrorPath, p, err) continue } if !fi.IsDir() { continue } dirs = append(dirs, p) } return dirs, seqs }
go
{ "resource": "" }
q180623
PadFrameRange
test
func PadFrameRange(frange string, pad int) string { // We don't need to do anything if they gave us // an invalid pad number if pad < 2 { return frange } size := strings.Count(frange, ",") + 1 parts := make([]string, size, size) for i, part := range strings.Split(frange, ",") { didMatch := false for _, rx := range rangePatterns { matched := rx.FindStringSubmatch(part) if len(matched) == 0 { continue } matched = matched[1:] size = len(matched) switch size { case 1: parts[i] = zfillString(matched[0], pad) case 2: parts[i] = fmt.Sprintf("%s-%s", zfillString(matched[0], pad), zfillString(matched[1], pad)) case 4: parts[i] = fmt.Sprintf("%s-%s%s%s", zfillString(matched[0], pad), zfillString(matched[1], pad), matched[2], matched[3]) default: // No match. Try the next pattern continue } // If we got here, we matched a case and can stop // checking the rest of the patterns didMatch = true break } // If we didn't match one of our expected patterns // then just take the original part and add it unmodified if !didMatch { parts = append(parts, part) } } return strings.Join(parts, ",") }
go
{ "resource": "" }
q180624
zfillString
test
func zfillString(src string, z int) string { size := len(src) if size >= z { return src } fill := strings.Repeat("0", z-size) if strings.HasPrefix(src, "-") { return fmt.Sprintf("-%s%s", fill, src[1:]) } return fmt.Sprintf("%s%s", fill, src) }
go
{ "resource": "" }
q180625
zfillInt
test
func zfillInt(src int, z int) string { if z < 2 { return strconv.Itoa(src) } return fmt.Sprintf(fmt.Sprintf("%%0%dd", z), src) }
go
{ "resource": "" }
q180626
NewInclusiveRange
test
func NewInclusiveRange(start, end, step int) *InclusiveRange { if step == 0 { if start <= end { step = 1 } else { step = -1 } } r := &InclusiveRange{ start: start, end: end, step: step, } return r }
go
{ "resource": "" }
q180627
String
test
func (r *InclusiveRange) String() string { var buf strings.Builder // Always for a single value buf.WriteString(strconv.Itoa(r.Start())) // If we have a range, express the end value if r.End() != r.Start() { buf.WriteString(`-`) buf.WriteString(strconv.Itoa(r.End())) // Express the stepping, if its not 1 step := r.Step() if step > 1 || step < -1 { buf.WriteString(`x`) buf.WriteString(strconv.Itoa(r.Step())) } } return buf.String() }
go
{ "resource": "" }
q180628
End
test
func (r *InclusiveRange) End() int { if r.isEndCached { return r.cachedEnd } r.isEndCached = true // If we aren't stepping, or we don't have // a full range, then just use the end value if r.step == 1 || r.step == -1 || r.start == r.end { r.cachedEnd = r.end return r.cachedEnd } // If the step is in the wrong direction, // compared to the range direction, then // just use the start as the end. if (r.end < r.start) && r.step < (r.end-r.start) { r.cachedEnd = r.start return r.cachedEnd } else if (r.end > r.start) && r.step > (r.end-r.start) { r.cachedEnd = r.start return r.cachedEnd } // Calculate the end, taking into account the stepping r.cachedEnd = r.closestInRange(r.end, r.start, r.end, r.step) return r.cachedEnd }
go
{ "resource": "" }
q180629
Len
test
func (r *InclusiveRange) Len() int { if r.isLenCached { return r.cachedLen } // Offset by one to include the end value diff := math.Abs(float64(r.end-r.start)) + 1 r.cachedLen = int(math.Ceil(diff / math.Abs(float64(r.step)))) r.isLenCached = true return r.cachedLen }
go
{ "resource": "" }
q180630
Min
test
func (r *InclusiveRange) Min() int { start := r.Start() end := r.End() if start < end { return start } return end }
go
{ "resource": "" }
q180631
Max
test
func (r *InclusiveRange) Max() int { start := r.Start() end := r.End() if start > end { return start } return end }
go
{ "resource": "" }
q180632
Contains
test
func (r *InclusiveRange) Contains(value int) bool { // If we attempt to find the closest value, given // the start of the range and the step, we can check // if it is still the same number. If it hasn't changed, // then it is in the range. closest := r.closestInRange(value, r.start, r.End(), r.step) return closest == value }
go
{ "resource": "" }
q180633
closestInRange
test
func (*InclusiveRange) closestInRange(value, start, end, step int) int { // Possibly clamp the value if it is outside the range if end >= start { if value < start { return start } else if value > end { return end } } else { if value > start { return start } else if value < end { return end } } // No calculation needed if there is no stepping if step == 1 || step == -1 { return value } // Modified the value so that it is a properly stepped // increment within the range return (((value - start) / step) * step) + start }
go
{ "resource": "" }
q180634
Index
test
func (f *InclusiveRange) Index(value int) int { closest := f.closestInRange(value, f.start, f.End(), f.step) if closest != value { return -1 } idx := (value - f.start) / f.step if idx < 0 { idx *= -1 } return idx }
go
{ "resource": "" }
q180635
String
test
func (l *InclusiveRanges) String() string { var buf strings.Builder for i, b := range l.blocks { if i > 0 { buf.WriteString(`,`) } buf.WriteString(b.String()) } return buf.String() }
go
{ "resource": "" }
q180636
Len
test
func (l *InclusiveRanges) Len() int { var totalLen int for _, b := range l.blocks { totalLen += b.Len() } return totalLen }
go
{ "resource": "" }
q180637
Start
test
func (l *InclusiveRanges) Start() int { for _, b := range l.blocks { return b.Start() } return 0 }
go
{ "resource": "" }
q180638
End
test
func (l *InclusiveRanges) End() int { if l.blocks == nil { return 0 } return l.blocks[len(l.blocks)-1].End() }
go
{ "resource": "" }
q180639
Min
test
func (l *InclusiveRanges) Min() int { val := l.Start() for _, aRange := range l.blocks { next := aRange.Min() if next < val { val = next } } return val }
go
{ "resource": "" }
q180640
Max
test
func (l *InclusiveRanges) Max() int { val := l.End() for _, aRange := range l.blocks { next := aRange.Max() if next > val { val = next } } return val }
go
{ "resource": "" }
q180641
numRanges
test
func (l *InclusiveRanges) numRanges() int { if l.blocks == nil { return 0 } return len(l.blocks) }
go
{ "resource": "" }
q180642
rangeAt
test
func (l *InclusiveRanges) rangeAt(idx int) *InclusiveRange { if idx < 0 || idx >= l.numRanges() { return nil } return l.blocks[idx] }
go
{ "resource": "" }
q180643
Append
test
func (l *InclusiveRanges) Append(start, end, step int) { block := NewInclusiveRange(start, end, step) l.blocks = append(l.blocks, block) }
go
{ "resource": "" }
q180644
AppendUnique
test
func (l *InclusiveRanges) AppendUnique(start, end, step int) { if step == 0 { return } subStart := start subEnd := start subStep := step last := start pending := 0 // Track unique value count // Handle loop test for both increasing // and decreasing ranges var pred func() bool if start <= end { if step < 0 { step *= -1 } pred = func() bool { return subEnd <= end } } else { if step > 0 { step *= -1 } pred = func() bool { return subEnd >= end } } // Short-circuit if this is the first range being added if len(l.blocks) == 0 { l.Append(start, end, step) return } // TODO: More intelligent fast-paths for easy-to-identify // overlapping ranges. Such as when the existing range is: // 1-100x1 and we are appending 50-150x1. Should be easy // enough to just know we can Append(101,150,1) for ; pred(); subEnd += step { if !l.Contains(subEnd) { // Is a unique value in the range last = subEnd if pending == 0 { subStart = last } pending++ continue } if pending == 0 { // Nothing to add yet continue } // Current value is already in range. // Add previous values l.Append(subStart, last, subStep) subStart = subEnd + step pending = 0 } // Flush the remaining values if pending > 0 { l.Append(subStart, last, subStep) } }
go
{ "resource": "" }
q180645
Contains
test
func (l *InclusiveRanges) Contains(value int) bool { for _, b := range l.blocks { if b.Contains(value) { return true } } return false }
go
{ "resource": "" }
q180646
Index
test
func (l *InclusiveRanges) Index(value int) int { var idx, n int for _, b := range l.blocks { // If the value is within the current block // then return the local index, offset by the // number of previous values we have tracked if idx = b.Index(value); idx >= 0 { return idx + n } // Update the offset for the values we have seen n += b.Len() } // The previous loop ended in error return -1 }
go
{ "resource": "" }
q180647
FrameRange
test
func (s *FileSequence) FrameRange() string { if s.frameSet == nil { return "" } return s.frameSet.FrameRange() }
go
{ "resource": "" }
q180648
FrameRangePadded
test
func (s *FileSequence) FrameRangePadded() string { if s.frameSet == nil { return "" } return s.frameSet.FrameRangePadded(s.zfill) }
go
{ "resource": "" }
q180649
Index
test
func (s *FileSequence) Index(idx int) string { if s.frameSet == nil { return s.String() } frame, err := s.frameSet.Frame(idx) if err != nil { return "" } path, err := s.Frame(frame) if err != nil { return "" } return path }
go
{ "resource": "" }
q180650
SetDirname
test
func (s *FileSequence) SetDirname(dir string) { if !strings.HasSuffix(dir, string(filepath.Separator)) { dir = dir + string(filepath.Separator) } s.dir = dir }
go
{ "resource": "" }
q180651
SetPadding
test
func (s *FileSequence) SetPadding(padChars string) { s.padChar = padChars s.zfill = s.padMapper.PaddingCharsSize(padChars) }
go
{ "resource": "" }
q180652
SetPaddingStyle
test
func (s *FileSequence) SetPaddingStyle(style PadStyle) { s.padMapper = padders[style] s.SetPadding(s.padMapper.PaddingChars(s.ZFill())) }
go
{ "resource": "" }
q180653
SetExt
test
func (s *FileSequence) SetExt(ext string) { if !strings.HasPrefix(ext, ".") { ext = "." + ext } s.ext = ext }
go
{ "resource": "" }
q180654
SetFrameRange
test
func (s *FileSequence) SetFrameRange(frameRange string) error { frameSet, err := NewFrameSet(frameRange) if err != nil { return err } s.frameSet = frameSet return nil }
go
{ "resource": "" }
q180655
Len
test
func (s *FileSequence) Len() int { if s.frameSet == nil { return 1 } return s.frameSet.Len() }
go
{ "resource": "" }
q180656
String
test
func (s *FileSequence) String() string { var fs string if s.frameSet != nil { fs = s.frameSet.String() } buf := bytes.NewBufferString(s.dir) buf.WriteString(s.basename) buf.WriteString(fs) buf.WriteString(s.padChar) buf.WriteString(s.ext) return buf.String() }
go
{ "resource": "" }
q180657
Copy
test
func (s *FileSequence) Copy() *FileSequence { seq, _ := NewFileSequence(s.String()) return seq }
go
{ "resource": "" }
q180658
NewClient
test
func NewClient(url string, tls bool, header interface{}) *Client { return &Client{ url: url, tls: tls, header: header, } }
go
{ "resource": "" }
q180659
UnmarshalXML
test
func (h *Header) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { var ( token xml.Token err error ) Loop: for { if token, err = d.Token(); err != nil { return err } if token == nil { break } switch se := token.(type) { case xml.StartElement: if err = d.DecodeElement(h.Content, &se); err != nil { return err } case xml.EndElement: break Loop } } return nil }
go
{ "resource": "" }
q180660
UnmarshalXML
test
func (b *Body) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { if b.Content == nil { return xml.UnmarshalError("Content must be a pointer to a struct") } var ( token xml.Token err error consumed bool ) Loop: for { if token, err = d.Token(); err != nil { return err } if token == nil { break } envelopeNameSpace := "http://schemas.xmlsoap.org/soap/envelope/" switch se := token.(type) { case xml.StartElement: if consumed { return xml.UnmarshalError( "Found multiple elements inside SOAP body; not wrapped-document/literal WS-I compliant") } else if se.Name.Space == envelopeNameSpace && se.Name.Local == "Fault" { b.Fault = &Fault{} b.Content = nil err = d.DecodeElement(b.Fault, &se) if err != nil { return err } consumed = true } else { if err = d.DecodeElement(b.Content, &se); err != nil { return err } consumed = true } case xml.EndElement: break Loop } } return nil }
go
{ "resource": "" }
q180661
Call
test
func (s *Client) Call(soapAction string, request, response, header interface{}) error { var envelope Envelope if s.header != nil { envelope = Envelope{ Header: &Header{ Content: s.header, }, Body: Body{ Content: request, }, } } else { envelope = Envelope{ Body: Body{ Content: request, }, } } buffer := new(bytes.Buffer) encoder := xml.NewEncoder(buffer) encoder.Indent(" ", " ") if err := encoder.Encode(envelope); err != nil { return errors.Wrap(err, "failed to encode envelope") } if err := encoder.Flush(); err != nil { return errors.Wrap(err, "failed to flush encoder") } req, err := http.NewRequest("POST", s.url, buffer) if err != nil { return errors.Wrap(err, "failed to create POST request") } req.Header.Add("Content-Type", "text/xml; charset=\"utf-8\"") req.Header.Set("SOAPAction", soapAction) req.Header.Set("User-Agent", s.userAgent) req.Close = true tr := &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: s.tls, }, Dial: dialTimeout, } client := &http.Client{Transport: tr} res, err := client.Do(req) if err != nil { return errors.Wrap(err, "failed to send SOAP request") } defer res.Body.Close() if res.StatusCode != http.StatusOK { soapFault, err := ioutil.ReadAll(res.Body) if err != nil { return errors.Wrap(err, "failed to read SOAP fault response body") } msg := fmt.Sprintf("HTTP Status Code: %d, SOAP Fault: \n%s", res.StatusCode, string(soapFault)) return errors.New(msg) } rawbody, err := ioutil.ReadAll(res.Body) if err != nil { return errors.Wrap(err, "failed to read SOAP body") } if len(rawbody) == 0 { return nil } respEnvelope := Envelope{} respEnvelope.Body = Body{Content: response} if header != nil { respEnvelope.Header = &Header{Content: header} } if err = xml.Unmarshal(rawbody, &respEnvelope); err != nil { return errors.Wrap(err, "failed to unmarshal response SOAP Envelope") } return nil }
go
{ "resource": "" }
q180662
JSONDoc
test
func JSONDoc(path string) (json.RawMessage, error) { data, err := swag.LoadFromFileOrHTTP(path) if err != nil { return nil, err } return json.RawMessage(data), nil }
go
{ "resource": "" }
q180663
AddLoader
test
func AddLoader(predicate DocMatcher, load DocLoader) { prev := loaders loaders = &loader{ Match: predicate, Fn: load, Next: prev, } spec.PathLoader = loaders.Fn }
go
{ "resource": "" }
q180664
JSONSpec
test
func JSONSpec(path string) (*Document, error) { data, err := JSONDoc(path) if err != nil { return nil, err } // convert to json return Analyzed(data, "") }
go
{ "resource": "" }
q180665
Embedded
test
func Embedded(orig, flat json.RawMessage) (*Document, error) { var origSpec, flatSpec spec.Swagger if err := json.Unmarshal(orig, &origSpec); err != nil { return nil, err } if err := json.Unmarshal(flat, &flatSpec); err != nil { return nil, err } return &Document{ raw: orig, origSpec: &origSpec, spec: &flatSpec, }, nil }
go
{ "resource": "" }
q180666
Spec
test
func Spec(path string) (*Document, error) { specURL, err := url.Parse(path) if err != nil { return nil, err } var lastErr error for l := loaders.Next; l != nil; l = l.Next { if loaders.Match(specURL.Path) { b, err2 := loaders.Fn(path) if err2 != nil { lastErr = err2 continue } doc, err3 := Analyzed(b, "") if err3 != nil { return nil, err3 } if doc != nil { doc.specFilePath = path } return doc, nil } } if lastErr != nil { return nil, lastErr } b, err := defaultLoader.Fn(path) if err != nil { return nil, err } document, err := Analyzed(b, "") if document != nil { document.specFilePath = path } return document, err }
go
{ "resource": "" }
q180667
Analyzed
test
func Analyzed(data json.RawMessage, version string) (*Document, error) { if version == "" { version = "2.0" } if version != "2.0" { return nil, fmt.Errorf("spec version %q is not supported", version) } raw := data trimmed := bytes.TrimSpace(data) if len(trimmed) > 0 { if trimmed[0] != '{' && trimmed[0] != '[' { yml, err := swag.BytesToYAMLDoc(trimmed) if err != nil { return nil, fmt.Errorf("analyzed: %v", err) } d, err := swag.YAMLToJSON(yml) if err != nil { return nil, fmt.Errorf("analyzed: %v", err) } raw = d } } swspec := new(spec.Swagger) if err := json.Unmarshal(raw, swspec); err != nil { return nil, err } origsqspec, err := cloneSpec(swspec) if err != nil { return nil, err } d := &Document{ Analyzer: analysis.New(swspec), schema: spec.MustLoadSwagger20Schema(), spec: swspec, raw: raw, origSpec: origsqspec, } return d, nil }
go
{ "resource": "" }
q180668
Expanded
test
func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) { swspec := new(spec.Swagger) if err := json.Unmarshal(d.raw, swspec); err != nil { return nil, err } var expandOptions *spec.ExpandOptions if len(options) > 0 { expandOptions = options[0] } else { expandOptions = &spec.ExpandOptions{ RelativeBase: d.specFilePath, } } if err := spec.ExpandSpec(swspec, expandOptions); err != nil { return nil, err } dd := &Document{ Analyzer: analysis.New(swspec), spec: swspec, specFilePath: d.specFilePath, schema: spec.MustLoadSwagger20Schema(), raw: d.raw, origSpec: d.origSpec, } return dd, nil }
go
{ "resource": "" }
q180669
ResetDefinitions
test
func (d *Document) ResetDefinitions() *Document { defs := make(map[string]spec.Schema, len(d.origSpec.Definitions)) for k, v := range d.origSpec.Definitions { defs[k] = v } d.spec.Definitions = defs return d }
go
{ "resource": "" }
q180670
Pristine
test
func (d *Document) Pristine() *Document { dd, _ := Analyzed(d.Raw(), d.Version()) return dd }
go
{ "resource": "" }
q180671
OpenDb
test
func OpenDb(files []string, flag int) (*GeoIP, error) { if len(files) == 0 { files = []string{ "/usr/share/GeoIP/GeoIP.dat", // Linux default "/usr/share/local/GeoIP/GeoIP.dat", // source install? "/usr/local/share/GeoIP/GeoIP.dat", // FreeBSD "/opt/local/share/GeoIP/GeoIP.dat", // MacPorts "/usr/share/GeoIP/GeoIP.dat", // ArchLinux } } g := &GeoIP{} runtime.SetFinalizer(g, (*GeoIP).free) var err error for _, file := range files { // libgeoip prints errors if it can't open the file, so check first if _, err := os.Stat(file); err != nil { if os.IsExist(err) { log.Println(err) } continue } cbase := C.CString(file) defer C.free(unsafe.Pointer(cbase)) g.db, err = C.GeoIP_open(cbase, C.int(flag)) if g.db != nil && err != nil { break } } if err != nil { return nil, fmt.Errorf("Error opening GeoIP database (%s): %s", files, err) } if g.db == nil { return nil, fmt.Errorf("Didn't open GeoIP database (%s)", files) } C.GeoIP_set_charset(g.db, C.GEOIP_CHARSET_UTF8) return g, nil }
go
{ "resource": "" }
q180672
GetOrg
test
func (gi *GeoIP) GetOrg(ip string) string { name, _ := gi.GetName(ip) return name }
go
{ "resource": "" }
q180673
GetRegion
test
func (gi *GeoIP) GetRegion(ip string) (string, string) { if gi.db == nil { return "", "" } cip := C.CString(ip) defer C.free(unsafe.Pointer(cip)) gi.mu.Lock() region := C.GeoIP_region_by_addr(gi.db, cip) gi.mu.Unlock() if region == nil { return "", "" } countryCode := C.GoString(&region.country_code[0]) regionCode := C.GoString(&region.region[0]) defer C.free(unsafe.Pointer(region)) return countryCode, regionCode }
go
{ "resource": "" }
q180674
GetRegionName
test
func GetRegionName(countryCode, regionCode string) string { cc := C.CString(countryCode) defer C.free(unsafe.Pointer(cc)) rc := C.CString(regionCode) defer C.free(unsafe.Pointer(rc)) region := C.GeoIP_region_name_by_code(cc, rc) if region == nil { return "" } // it's a static string constant, don't free this regionName := C.GoString(region) return regionName }
go
{ "resource": "" }
q180675
GetCountry
test
func (gi *GeoIP) GetCountry(ip string) (cc string, netmask int) { if gi.db == nil { return } gi.mu.Lock() defer gi.mu.Unlock() cip := C.CString(ip) defer C.free(unsafe.Pointer(cip)) ccountry := C.GeoIP_country_code_by_addr(gi.db, cip) if ccountry != nil { cc = C.GoString(ccountry) netmask = int(C.GeoIP_last_netmask(gi.db)) return } return }
go
{ "resource": "" }
q180676
NewRotatingFileHandler
test
func NewRotatingFileHandler(fileName string, maxBytes int, backupCount int) (*RotatingFileHandler, error) { dir := path.Dir(fileName) os.MkdirAll(dir, 0777) h := new(RotatingFileHandler) if maxBytes <= 0 { return nil, fmt.Errorf("invalid max bytes") } h.fileName = fileName h.maxBytes = maxBytes h.backupCount = backupCount var err error h.fd, err = os.OpenFile(fileName, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666) if err != nil { return nil, err } f, err := h.fd.Stat() if err != nil { return nil, err } h.curBytes = int(f.Size()) return h, nil }
go
{ "resource": "" }
q180677
Close
test
func (h *RotatingFileHandler) Close() error { if h.fd != nil { return h.fd.Close() } return nil }
go
{ "resource": "" }
q180678
String
test
func (l Level) String() string { switch l { case LevelTrace: return "trace" case LevelDebug: return "debug" case LevelInfo: return "info" case LevelWarn: return "warn" case LevelError: return "error" case LevelFatal: return "fatal" } // return default info return "info" }
go
{ "resource": "" }
q180679
New
test
func New(handler Handler, flag int) *Logger { var l = new(Logger) l.level = LevelInfo l.handler = handler l.flag = flag l.bufs = sync.Pool{ New: func() interface{} { return make([]byte, 0, 1024) }, } return l }
go
{ "resource": "" }
q180680
Close
test
func (l *Logger) Close() { l.hLock.Lock() defer l.hLock.Unlock() l.handler.Close() }
go
{ "resource": "" }
q180681
SetLevelByName
test
func (l *Logger) SetLevelByName(name string) { level := LevelInfo switch strings.ToLower(name) { case "trace": level = LevelTrace case "debug": level = LevelDebug case "warn", "warning": level = LevelWarn case "error": level = LevelError case "fatal": level = LevelFatal default: level = LevelInfo } l.SetLevel(level) }
go
{ "resource": "" }
q180682
Output
test
func (l *Logger) Output(callDepth int, level Level, msg string) { if l.level > level { return } buf := l.bufs.Get().([]byte) buf = buf[0:0] defer l.bufs.Put(buf) if l.flag&Ltime > 0 { now := time.Now().Format(timeFormat) buf = append(buf, '[') buf = append(buf, now...) buf = append(buf, "] "...) } if l.flag&Llevel > 0 { buf = append(buf, '[') buf = append(buf, level.String()...) buf = append(buf, "] "...) } if l.flag&Lfile > 0 { _, file, line, ok := runtime.Caller(callDepth) if !ok { file = "???" line = 0 } else { for i := len(file) - 1; i > 0; i-- { if file[i] == '/' { file = file[i+1:] break } } } buf = append(buf, file...) buf = append(buf, ':') buf = strconv.AppendInt(buf, int64(line), 10) buf = append(buf, ' ') } buf = append(buf, msg...) if len(msg) == 0 || msg[len(msg)-1] != '\n' { buf = append(buf, '\n') } l.hLock.Lock() l.handler.Write(buf) l.hLock.Unlock() }
go
{ "resource": "" }
q180683
OutputJson
test
func (l *Logger) OutputJson(callDepth int, level Level, body interface{}) { if l.level > level { return } buf := l.bufs.Get().([]byte) buf = buf[0:0] defer l.bufs.Put(buf) type JsonLog struct { Time string `json:"log_time"` Level string `json:"log_level"` File string `json:"log_file"` Line string `json:"log_line"` Body interface{} `json:"log_body"` } var jsonlog JsonLog if l.flag&Ltime > 0 { now := time.Now().Format(timeFormat) jsonlog.Time = now } if l.flag&Llevel > 0 { jsonlog.Level = level.String() } if l.flag&Lfile > 0 { _, file, line, ok := runtime.Caller(callDepth) if !ok { file = "???" line = 0 } else { for i := len(file) - 1; i > 0; i-- { if file[i] == '/' { file = file[i+1:] break } } } jsonlog.File = file jsonlog.Line = string(strconv.AppendInt(buf, int64(line), 10)) } jsonlog.Body = body msg, _ := json.Marshal(jsonlog) msg = append(msg, '\n') l.hLock.Lock() l.handler.Write(msg) l.hLock.Unlock() }
go
{ "resource": "" }
q180684
Print
test
func (l *Logger) Print(args ...interface{}) { l.Output(2, LevelTrace, fmt.Sprint(args...)) }
go
{ "resource": "" }
q180685
Println
test
func (l *Logger) Println(args ...interface{}) { l.Output(2, LevelTrace, fmt.Sprintln(args...)) }
go
{ "resource": "" }
q180686
Debug
test
func (l *Logger) Debug(args ...interface{}) { l.Output(2, LevelDebug, fmt.Sprint(args...)) }
go
{ "resource": "" }
q180687
Debugln
test
func (l *Logger) Debugln(args ...interface{}) { l.Output(2, LevelDebug, fmt.Sprintln(args...)) }
go
{ "resource": "" }
q180688
Error
test
func (l *Logger) Error(args ...interface{}) { l.Output(2, LevelError, fmt.Sprint(args...)) }
go
{ "resource": "" }
q180689
Errorln
test
func (l *Logger) Errorln(args ...interface{}) { l.Output(2, LevelError, fmt.Sprintln(args...)) }
go
{ "resource": "" }
q180690
Info
test
func (l *Logger) Info(args ...interface{}) { l.Output(2, LevelInfo, fmt.Sprint(args...)) }
go
{ "resource": "" }
q180691
Infoln
test
func (l *Logger) Infoln(args ...interface{}) { l.Output(2, LevelInfo, fmt.Sprintln(args...)) }
go
{ "resource": "" }
q180692
Warn
test
func (l *Logger) Warn(args ...interface{}) { l.Output(2, LevelWarn, fmt.Sprint(args...)) }
go
{ "resource": "" }
q180693
Warnln
test
func (l *Logger) Warnln(args ...interface{}) { l.Output(2, LevelWarn, fmt.Sprintln(args...)) }
go
{ "resource": "" }
q180694
NewStreamHandler
test
func NewStreamHandler(w io.Writer) (*StreamHandler, error) { h := new(StreamHandler) h.w = w return h, nil }
go
{ "resource": "" }
q180695
Right
test
func Right(str string, length int, pad string) string { return str + times(pad, length-len(str)) }
go
{ "resource": "" }
q180696
New
test
func New(h string, a rsapi.Authenticator) *API { api := rsapi.New(h, a) api.Metadata = GenMetadata return &API{API: api} }
go
{ "resource": "" }
q180697
setupMetadata
test
func setupMetadata() (result map[string]*metadata.Resource) { result = make(map[string]*metadata.Resource) for n, r := range ssd.GenMetadata { result[n] = r for _, a := range r.Actions { for _, p := range a.PathPatterns { // remove "/api/designer" prefix p.Regexp = removePrefixes(p.Regexp, 2) } } } for n, r := range ssc.GenMetadata { result[n] = r for _, a := range r.Actions { for _, p := range a.PathPatterns { // remove "/api/catalog" prefix p.Regexp = removePrefixes(p.Regexp, 2) } } } for n, r := range ssm.GenMetadata { result[n] = r for _, a := range r.Actions { for _, p := range a.PathPatterns { // remove "/api/manager" prefix p.Regexp = removePrefixes(p.Regexp, 2) } } } return }
go
{ "resource": "" }
q180698
recordTypes
test
func (p *ParamAnalyzer) recordTypes(root gen.DataType) { if o, ok := root.(*gen.ObjectDataType); ok { if _, found := p.ParamTypes[o.TypeName]; !found { p.ParamTypes[o.TypeName] = o for _, f := range o.Fields { p.recordTypes(f.Type) } } } else if a, ok := root.(*gen.ArrayDataType); ok { p.recordTypes(a.ElemType.Type) } }
go
{ "resource": "" }
q180699
appendSorted
test
func appendSorted(params []*gen.ActionParam, param *gen.ActionParam) []*gen.ActionParam { params = append(params, param) sort.Sort(gen.ByName(params)) return params }
go
{ "resource": "" }