_id stringlengths 2 7 | title stringlengths 1 118 | partition stringclasses 3 values | text stringlengths 52 85.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q12500 | havingExpressionBuilder | train | func havingExpressionBuilder() ClauseHook {
var f ClauseHook
f = func(s *Statement, _ Symbol) (ClauseHook, error) {
s.havingExpressionEvaluator = &AlwaysReturn{V: true}
if len(s.havingExpression) > 0 {
eval, err := NewEvaluator(s.havingExpression)
if err != nil {
return nil, err
}
s.havingExpressionEvaluator = eval
}
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12501 | limitCollection | train | func limitCollection() ElementHook {
var f func(st *Statement, ce ConsumedElement) (ElementHook, error)
f = func(st *Statement, ce ConsumedElement) (ElementHook, error) {
if ce.IsSymbol() || ce.token.Type == lexer.ItemLimit {
return f, nil
}
if ce.token.Type != lexer.ItemLiteral {
return nil, fmt.Errorf("limit clause required an int64 literal; found %v instead", ce.token)
}
l, err := literal.DefaultBuilder().Parse(ce.token.Text)
if err != nil {
return nil, fmt.Errorf("failed to parse limit literal %q with error %v", ce.token.Text, err)
}
if l.Type() != literal.Int64 {
return nil, fmt.Errorf("limit required an int64 value; found %s instead", l)
}
lv, err := l.Int64()
if err != nil {
return nil, fmt.Errorf("failed to retrieve the int64 value for literal %v with error %v", l, err)
}
st.limitSet, st.limit = true, lv
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12502 | collectGlobalBounds | train | func collectGlobalBounds() ElementHook {
var (
f func(st *Statement, ce ConsumedElement) (ElementHook, error)
opToken *lexer.Token
lastToken *lexer.Token
)
f = func(st *Statement, ce ConsumedElement) (ElementHook, error) {
if ce.IsSymbol() {
return f, nil
}
tkn := ce.token
switch tkn.Type {
case lexer.ItemBefore, lexer.ItemAfter, lexer.ItemBetween:
if lastToken != nil {
return nil, fmt.Errorf("invalid token %v after already valid token %v", tkn, lastToken)
}
opToken, lastToken = tkn, tkn
case lexer.ItemComma:
if lastToken == nil || opToken.Type != lexer.ItemBetween {
return nil, fmt.Errorf("token %v can only be used in a between clause; previous token %v instead", tkn, lastToken)
}
lastToken = tkn
case lexer.ItemPredicate:
if lastToken == nil {
return nil, fmt.Errorf("invalid token %v without a global time modifier", tkn)
}
p, err := predicate.Parse(tkn.Text)
if err != nil {
return nil, err
}
if p.ID() != "" {
return nil, fmt.Errorf("global time bounds do not accept individual predicate IDs; found %s instead", p)
}
ta, err := p.TimeAnchor()
if err != nil {
return nil, err
}
if lastToken.Type == lexer.ItemComma || lastToken.Type == lexer.ItemBefore {
st.lookupOptions.UpperAnchor = ta
opToken, lastToken = nil, nil
} else {
st.lookupOptions.LowerAnchor = ta
if opToken.Type != lexer.ItemBetween {
opToken, lastToken = nil, nil
}
}
default:
return nil, fmt.Errorf("global bound found unexpected token %v", tkn)
}
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12503 | InitWorkingConstructClause | train | func InitWorkingConstructClause() ClauseHook {
var f ClauseHook
f = func(s *Statement, _ Symbol) (ClauseHook, error) {
s.ResetWorkingConstructClause()
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12504 | NextWorkingConstructClause | train | func NextWorkingConstructClause() ClauseHook {
var f ClauseHook
f = func(s *Statement, _ Symbol) (ClauseHook, error) {
s.AddWorkingConstructClause()
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12505 | constructSubject | train | func constructSubject() ElementHook {
var f ElementHook
f = func(st *Statement, ce ConsumedElement) (ElementHook, error) {
if ce.IsSymbol() {
return f, nil
}
tkn := ce.Token()
c := st.WorkingConstructClause()
if c.S != nil {
return nil, fmt.Errorf("invalid subject %v in construct clause, subject already set to %v", tkn.Type, c.S)
}
if c.SBinding != "" {
return nil, fmt.Errorf("invalid subject %v in construct clause, subject already set to %v", tkn.Type, c.SBinding)
}
switch tkn.Type {
case lexer.ItemNode, lexer.ItemBlankNode:
n, err := ToNode(ce)
if err != nil {
return nil, err
}
c.S = n
case lexer.ItemBinding:
c.SBinding = tkn.Text
}
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12506 | constructPredicate | train | func constructPredicate() ElementHook {
var f ElementHook
f = func(st *Statement, ce ConsumedElement) (ElementHook, error) {
if ce.IsSymbol() {
return f, nil
}
tkn := ce.Token()
p := st.WorkingConstructClause().WorkingPredicateObjectPair()
if p.P != nil {
return nil, fmt.Errorf("invalid predicate %v in construct clause, predicate already set to %v", tkn.Type, p.P)
}
if p.PID != "" {
return nil, fmt.Errorf("invalid predicate %v in construct clause, predicate already set to %v", tkn.Type, p.PID)
}
if p.PBinding != "" {
return nil, fmt.Errorf("invalid predicate %v in construct clause, predicate already set to %v", tkn.Type, p.PBinding)
}
switch tkn.Type {
case lexer.ItemPredicate:
pred, pID, pAnchorBinding, pTemporal, err := processPredicate(ce)
if err != nil {
return nil, err
}
p.P, p.PID, p.PAnchorBinding, p.PTemporal = pred, pID, pAnchorBinding, pTemporal
case lexer.ItemBinding:
p.PBinding = tkn.Text
}
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12507 | constructObject | train | func constructObject() ElementHook {
var f ElementHook
f = func(st *Statement, ce ConsumedElement) (ElementHook, error) {
if ce.IsSymbol() {
return f, nil
}
tkn := ce.Token()
p := st.WorkingConstructClause().WorkingPredicateObjectPair()
if p.O != nil {
return nil, fmt.Errorf("invalid object %v in construct clause, object already set to %v", tkn.Text, p.O)
}
if p.OID != "" {
return nil, fmt.Errorf("invalid object %v in construct clause, object already set to %v", tkn.Type, p.OID)
}
if p.OBinding != "" {
return nil, fmt.Errorf("invalid object %v in construct clause, object already set to %v", tkn.Type, p.OBinding)
}
switch tkn.Type {
case lexer.ItemNode, lexer.ItemBlankNode, lexer.ItemLiteral:
obj, err := triple.ParseObject(tkn.Text, literal.DefaultBuilder())
if err != nil {
return nil, err
}
p.O = obj
case lexer.ItemPredicate:
var (
pred *predicate.Predicate
err error
)
pred, p.OID, p.OAnchorBinding, p.OTemporal, err = processPredicate(ce)
if err != nil {
return nil, err
}
if pred != nil {
p.O = triple.NewPredicateObject(pred)
}
case lexer.ItemBinding:
p.OBinding = tkn.Text
}
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12508 | NextWorkingConstructPredicateObjectPair | train | func NextWorkingConstructPredicateObjectPair() ClauseHook {
var f ClauseHook
f = func(s *Statement, _ Symbol) (ClauseHook, error) {
s.WorkingConstructClause().AddWorkingPredicateObjectPair()
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12509 | ShowClauseHook | train | func ShowClauseHook() ClauseHook {
var f ClauseHook
f = func(s *Statement, _ Symbol) (ClauseHook, error) {
s.sType = Show
return f, nil
}
return f
} | go | {
"resource": ""
} |
q12510 | getTreeGenerators | train | func getTreeGenerators(bFactors []int) ([]generator.Generator, error) {
var gens []generator.Generator
for _, b := range bFactors {
t, err := tree.New(b)
if err != nil {
return nil, err
}
gens = append(gens, t)
}
return gens, nil
} | go | {
"resource": ""
} |
q12511 | getGraphGenerators | train | func getGraphGenerators(nodes []int) ([]generator.Generator, error) {
var gens []generator.Generator
for _, b := range nodes {
t, err := graph.NewRandomGraph(b)
if err != nil {
return nil, err
}
gens = append(gens, t)
}
return gens, nil
} | go | {
"resource": ""
} |
q12512 | assertCommand | train | func assertCommand(ctx context.Context, cmd *command.Command, args []string, store storage.Store, builder literal.Builder, chanSize, bulkSize int) int {
if len(args) < 2 {
log.Printf("Missing required folder path. ")
cmd.Usage()
return 2
}
// Open the folder.
folder := strings.TrimSpace(args[len(args)-1])
f, err := os.Open(folder)
if err != nil {
log.Printf("[ERROR] Failed to open folder %s\n\n\t%v\n\n", folder, err)
return 2
}
fis, err := f.Readdir(0)
if err != nil {
log.Printf("[ERROR] Failed to read folder %s\n\n\t%v\n\n", folder, err)
return 2
}
fmt.Println("-------------------------------------------------------------")
fmt.Printf("Processing folder %q...\n", folder)
var stories []*compliance.Story
empty := true
for _, fi := range fis {
if !strings.Contains(fi.Name(), "json") {
continue
}
fmt.Printf("\tProcessing file %q... ", fi.Name())
lns, err := io.ReadLines(path.Join(folder, fi.Name()))
if err != nil {
log.Printf("\n\n\tFailed to read file content with error %v\n\n", err)
return 2
}
rawStory := strings.Join(lns, "\n")
s := &compliance.Story{}
if err := s.Unmarshal(rawStory); err != nil {
log.Printf("\n\n\tFailed to unmarshal story with error %v\n\n", err)
return 2
}
empty = false
stories = append(stories, s)
fmt.Println("done.")
}
if empty {
fmt.Println("No stories found!")
fmt.Println("-------------------------------------------------------------")
return 2
}
fmt.Println("-------------------------------------------------------------")
fmt.Printf("Evaluating %d stories... ", len(stories))
results := compliance.RunStories(ctx, store, builder, stories, chanSize, bulkSize)
fmt.Println("done.")
fmt.Println("-------------------------------------------------------------")
for i, entry := range results.Entries {
fmt.Printf("(%d/%d) Story %q...\n", i+1, len(stories), entry.Story.Name)
if entry.Err != nil {
log.Printf("\tFailed to run story %q with error %v\n\n", entry.Story.Name, entry.Err)
return 2
}
for aid, aido := range entry.Outcome {
if aido.Equal {
fmt.Printf("\t%s [Assertion=TRUE]\n", aid)
} else {
fmt.Printf("\t%s [Assertion=FALSE]\n\nGot:\n\n%s\nWant:\n\n%s\n", aid, aido.Got, aido.Want)
}
}
fmt.Println()
}
fmt.Println("-------------------------------------------------------------")
fmt.Println("\ndone")
return 0
} | go | {
"resource": ""
} |
q12513 | runCommand | train | func runCommand(ctx context.Context, cmd *command.Command, args []string, store storage.Store, chanSize, bulkSize int) int {
if len(args) < 2 {
log.Printf("[ERROR] Missing required file path. ")
cmd.Usage()
return 2
}
file := strings.TrimSpace(args[len(args)-1])
lines, err := io.GetStatementsFromFile(file)
if err != nil {
log.Printf("[ERROR] Failed to read file %s\n\n\t%v\n\n", file, err)
return 2
}
fmt.Printf("Processing file %s\n\n", args[len(args)-1])
for idx, stm := range lines {
fmt.Printf("Processing statement (%d/%d):\n%s\n\n", idx+1, len(lines), stm)
tbl, err := BQL(ctx, stm, store, chanSize, bulkSize)
if err != nil {
fmt.Printf("[FAIL] %v\n\n", err)
continue
}
fmt.Println("Result:")
if tbl.NumRows() > 0 {
fmt.Println(tbl)
}
fmt.Printf("OK\n\n")
}
return 0
} | go | {
"resource": ""
} |
q12514 | BQL | train | func BQL(ctx context.Context, bql string, s storage.Store, chanSize, bulkSize int) (*table.Table, error) {
p, err := grammar.NewParser(grammar.SemanticBQL())
if err != nil {
return nil, fmt.Errorf("[ERROR] Failed to initilize a valid BQL parser")
}
stm := &semantic.Statement{}
if err := p.Parse(grammar.NewLLk(bql, 1), stm); err != nil {
return nil, fmt.Errorf("[ERROR] Failed to parse BQL statement with error %v", err)
}
pln, err := planner.New(ctx, s, stm, chanSize, bulkSize, nil)
if err != nil {
return nil, fmt.Errorf("[ERROR] Should have not failed to create a plan using memory.DefaultStorage for statement %v with error %v", stm, err)
}
res, err := pln.Execute(ctx)
if err != nil {
return nil, fmt.Errorf("[ERROR] Failed to execute BQL statement with error %v", err)
}
return res, nil
} | go | {
"resource": ""
} |
q12515 | NewConsumedToken | train | func NewConsumedToken(tkn *lexer.Token) ConsumedElement {
return ConsumedElement{
isSymbol: false,
token: tkn,
}
} | go | {
"resource": ""
} |
q12516 | ToNode | train | func ToNode(ce ConsumedElement) (*node.Node, error) {
if ce.IsSymbol() {
return nil, fmt.Errorf("semantic.ToNode cannot convert symbol %v to a node", ce)
}
tkn := ce.Token()
if tkn.Type != lexer.ItemNode && tkn.Type != lexer.ItemBlankNode {
return nil, fmt.Errorf("semantic.ToNode cannot convert token type %s to a node", tkn.Type)
}
return node.Parse(tkn.Text)
} | go | {
"resource": ""
} |
q12517 | ToPredicate | train | func ToPredicate(ce ConsumedElement) (*predicate.Predicate, error) {
if ce.IsSymbol() {
return nil, fmt.Errorf("semantic.ToPredicate cannot convert symbol %v to a predicate", ce)
}
tkn := ce.Token()
if tkn.Type != lexer.ItemPredicate {
return nil, fmt.Errorf("semantic.ToPredicate cannot convert token type %s to a predicate", tkn.Type)
}
return predicate.Parse(tkn.Text)
} | go | {
"resource": ""
} |
q12518 | ToLiteral | train | func ToLiteral(ce ConsumedElement) (*literal.Literal, error) {
if ce.IsSymbol() {
return nil, fmt.Errorf("semantic.ToLiteral cannot convert symbol %v to a literal", ce)
}
tkn := ce.Token()
if tkn.Type != lexer.ItemLiteral {
return nil, fmt.Errorf("semantic.ToLiteral cannot convert token type %s to a literal", tkn.Type)
}
return literal.DefaultBuilder().Parse(tkn.Text)
} | go | {
"resource": ""
} |
q12519 | Covariant | train | func (t *Type) Covariant(ot *Type) bool {
if !strings.HasPrefix(t.String(), ot.String()) {
return false
}
// /type/foo is covariant of /type, but /typefoo is not covariant of /type.
return len(t.String()) == len(ot.String()) || t.String()[len(ot.String())] == '/'
} | go | {
"resource": ""
} |
q12520 | String | train | func (n *Node) String() string {
return fmt.Sprintf("%s<%s>", n.t.String(), n.id.String())
} | go | {
"resource": ""
} |
q12521 | Parse | train | func Parse(s string) (*Node, error) {
raw := strings.TrimSpace(s)
switch raw[0] {
case slash:
idx := strings.Index(raw, "<")
if idx < 0 {
return nil, fmt.Errorf("node.Parse: invalid format, could not find ID in %v", raw)
}
t, err := NewType(raw[:idx])
if err != nil {
return nil, fmt.Errorf("node.Parse: invalid type %q, %v", raw[:idx], err)
}
if raw[len(raw)-1] != '>' {
return nil, fmt.Errorf("node.Parse: pretty printing should finish with '>' in %q", raw)
}
id, err := NewID(raw[idx+1 : len(raw)-1])
if err != nil {
return nil, fmt.Errorf("node.Parse: invalid ID in %q, %v", raw, err)
}
return NewNode(t, id), nil
case underscore:
id, err := NewID(raw[2:len(raw)])
if err != nil {
return nil, fmt.Errorf("node.Parse: invalid ID in %q, %v", raw, err)
}
t, _ := NewType("/_")
return NewNode(t, id), nil
default:
return nil, fmt.Errorf("node.Parse: node representation should start with '/' or '_' in %v", raw)
}
} | go | {
"resource": ""
} |
q12522 | Covariant | train | func (n *Node) Covariant(on *Node) bool {
return n.t.Covariant(on.t)
} | go | {
"resource": ""
} |
q12523 | NewType | train | func NewType(t string) (*Type, error) {
if strings.ContainsAny(t, " \t\n\r") {
return nil, fmt.Errorf("node.NewType(%q) does not allow spaces", t)
}
if !strings.HasPrefix(t, "/") || strings.HasSuffix(t, "/") {
return nil, fmt.Errorf("node.NewType(%q) should start with a '/' and do not end with '/'", t)
}
if t == "" {
return nil, fmt.Errorf("node.NewType(%q) cannot create empty types", t)
}
nt := Type(t)
return &nt, nil
} | go | {
"resource": ""
} |
q12524 | NewID | train | func NewID(id string) (*ID, error) {
if strings.ContainsAny(id, "<>") {
return nil, fmt.Errorf("node.NewID(%q) does not allow '<' or '>'", id)
}
if id == "" {
return nil, fmt.Errorf("node.NewID(%q) cannot create empty ID", id)
}
nID := ID(id)
return &nID, nil
} | go | {
"resource": ""
} |
q12525 | NewNode | train | func NewNode(t *Type, id *ID) *Node {
return &Node{
t: t,
id: id,
}
} | go | {
"resource": ""
} |
q12526 | NewNodeFromStrings | train | func NewNodeFromStrings(sT, sID string) (*Node, error) {
t, err := NewType(sT)
if err != nil {
return nil, err
}
n, err := NewID(sID)
if err != nil {
return nil, err
}
return NewNode(t, n), nil
} | go | {
"resource": ""
} |
q12527 | NewBlankNode | train | func NewBlankNode() *Node {
uuid := <-nextVal
id := ID(uuid.String())
return &Node{
t: &tBlank,
id: &id,
}
} | go | {
"resource": ""
} |
q12528 | UUID | train | func (n *Node) UUID() uuid.UUID {
var buffer bytes.Buffer
buffer.WriteString(string(*n.t))
buffer.WriteString(string(*n.id))
return uuid.NewSHA1(uuid.NIL, buffer.Bytes())
} | go | {
"resource": ""
} |
q12529 | String | train | func (t StatementType) String() string {
switch t {
case Query:
return "QUERY"
case Insert:
return "INSERT"
case Delete:
return "DELETE"
case Create:
return "CREATE"
case Drop:
return "DROP"
case Construct:
return "CONSTRUCT"
case Deconstruct:
return "DECONSTRUCT"
case Show:
return "SHOW"
default:
return "UNKNOWN"
}
} | go | {
"resource": ""
} |
q12530 | BindingsMap | train | func (c *GraphClause) BindingsMap() map[string]int {
bm := make(map[string]int)
addToBindings(bm, c.SBinding)
addToBindings(bm, c.SAlias)
addToBindings(bm, c.STypeAlias)
addToBindings(bm, c.SIDAlias)
addToBindings(bm, c.PAlias)
addToBindings(bm, c.PAnchorBinding)
addToBindings(bm, c.PBinding)
addToBindings(bm, c.PLowerBoundAlias)
addToBindings(bm, c.PUpperBoundAlias)
addToBindings(bm, c.PIDAlias)
addToBindings(bm, c.PAnchorAlias)
addToBindings(bm, c.OBinding)
addToBindings(bm, c.OAlias)
addToBindings(bm, c.OTypeAlias)
addToBindings(bm, c.OIDAlias)
addToBindings(bm, c.OAnchorAlias)
addToBindings(bm, c.OAnchorBinding)
addToBindings(bm, c.OLowerBoundAlias)
addToBindings(bm, c.OUpperBoundAlias)
return bm
} | go | {
"resource": ""
} |
q12531 | Bindings | train | func (c *GraphClause) Bindings() []string {
var bs []string
for k := range c.BindingsMap() {
bs = append(bs, k)
}
return bs
} | go | {
"resource": ""
} |
q12532 | String | train | func (c *ConstructClause) String() string {
b := bytes.NewBufferString("{ ")
// Subject section.
if c.S != nil {
b.WriteString(c.S.String())
} else {
b.WriteString(c.SBinding)
}
// Predicate-object pairs section.
for _, pop := range c.PredicateObjectPairs() {
b.WriteString(fmt.Sprintf("%v;", pop))
}
b.Truncate(b.Len() - 1)
b.WriteString(" }")
return b.String()
} | go | {
"resource": ""
} |
q12533 | String | train | func (c *ConstructPredicateObjectPair) String() string {
b := bytes.NewBufferString("")
// Predicate section.
predicate := false
if c.P != nil {
b.WriteString(" ")
b.WriteString(c.P.String())
predicate = true
}
if c.PBinding != "" {
b.WriteString(" ")
b.WriteString(c.PBinding)
}
if c.PID != "" {
b.WriteString(" \"")
b.WriteString(c.PID)
b.WriteString("\"")
}
if !predicate {
if !c.PTemporal {
b.WriteString("@[]")
} else {
b.WriteString("@[")
if c.PAnchorBinding != "" {
b.WriteString(c.PAnchorBinding)
}
}
b.WriteString("]")
}
// Object section.
// Node portion.
object := false
if c.O != nil {
b.WriteString(" ")
b.WriteString(c.O.String())
object = true
} else {
b.WriteString(" ")
b.WriteString(c.OBinding)
object = true
}
// Predicate portion.
if !object {
if c.OBinding != "" {
b.WriteString(" ")
b.WriteString(c.OBinding)
}
if c.OID != "" {
b.WriteString(" \"")
b.WriteString(c.OID)
b.WriteString("\"")
}
if !c.OTemporal {
b.WriteString("[]")
} else {
b.WriteString("[")
if c.OAnchorBinding != "" {
b.WriteString(c.OAnchorBinding)
}
b.WriteString("]")
}
}
return b.String()
} | go | {
"resource": ""
} |
q12534 | AddGraph | train | func (s *Statement) AddGraph(g string) {
s.graphNames = append(s.graphNames, g)
} | go | {
"resource": ""
} |
q12535 | AddInputGraph | train | func (s *Statement) AddInputGraph(g string) {
s.inputGraphNames = append(s.inputGraphNames, g)
} | go | {
"resource": ""
} |
q12536 | AddOutputGraph | train | func (s *Statement) AddOutputGraph(g string) {
s.outputGraphNames = append(s.outputGraphNames, g)
} | go | {
"resource": ""
} |
q12537 | Init | train | func (s *Statement) Init(ctx context.Context, st storage.Store) error {
for _, gn := range s.graphNames {
g, err := st.Graph(ctx, gn)
if err != nil {
return err
}
s.graphs = append(s.graphs, g)
}
for _, ign := range s.inputGraphNames {
ig, err := st.Graph(ctx, ign)
if err != nil {
return err
}
s.inputGraphs = append(s.inputGraphs, ig)
}
for _, ogn := range s.outputGraphNames {
og, err := st.Graph(ctx, ogn)
if err != nil {
return err
}
s.outputGraphs = append(s.outputGraphs, og)
}
return nil
} | go | {
"resource": ""
} |
q12538 | AddData | train | func (s *Statement) AddData(d *triple.Triple) {
s.data = append(s.data, d)
} | go | {
"resource": ""
} |
q12539 | AddWorkingGraphClause | train | func (s *Statement) AddWorkingGraphClause() {
if s.workingClause != nil && !s.workingClause.IsEmpty() {
s.pattern = append(s.pattern, s.workingClause)
}
s.ResetWorkingGraphClause()
} | go | {
"resource": ""
} |
q12540 | BindingsMap | train | func (s *Statement) BindingsMap() map[string]int {
bm := make(map[string]int)
for _, cls := range s.pattern {
if cls != nil {
addToBindings(bm, cls.SBinding)
addToBindings(bm, cls.SAlias)
addToBindings(bm, cls.STypeAlias)
addToBindings(bm, cls.SIDAlias)
addToBindings(bm, cls.PAlias)
addToBindings(bm, cls.PAnchorBinding)
addToBindings(bm, cls.PBinding)
addToBindings(bm, cls.PLowerBoundAlias)
addToBindings(bm, cls.PUpperBoundAlias)
addToBindings(bm, cls.PIDAlias)
addToBindings(bm, cls.PAnchorAlias)
addToBindings(bm, cls.OBinding)
addToBindings(bm, cls.OAlias)
addToBindings(bm, cls.OTypeAlias)
addToBindings(bm, cls.OIDAlias)
addToBindings(bm, cls.OAnchorAlias)
addToBindings(bm, cls.OAnchorBinding)
addToBindings(bm, cls.OLowerBoundAlias)
addToBindings(bm, cls.OUpperBoundAlias)
}
}
return bm
} | go | {
"resource": ""
} |
q12541 | Bindings | train | func (s *Statement) Bindings() []string {
var bs []string
for k := range s.BindingsMap() {
bs = append(bs, k)
}
return bs
} | go | {
"resource": ""
} |
q12542 | Swap | train | func (s bySpecificity) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
} | go | {
"resource": ""
} |
q12543 | Less | train | func (s bySpecificity) Less(i, j int) bool {
return s[i].Specificity() >= s[j].Specificity()
} | go | {
"resource": ""
} |
q12544 | SortedGraphPatternClauses | train | func (s *Statement) SortedGraphPatternClauses() []*GraphClause {
var ptrns []*GraphClause
// Filter empty clauses.
for _, cls := range s.pattern {
if cls != nil && !cls.IsEmpty() {
ptrns = append(ptrns, cls)
}
}
sort.Sort(bySpecificity(ptrns))
return ptrns
} | go | {
"resource": ""
} |
q12545 | String | train | func (p *Projection) String() string {
b := bytes.NewBufferString(p.Binding)
b.WriteString(" as ")
b.WriteString(p.Binding)
if p.OP != lexer.ItemError {
b.WriteString(" via ")
b.WriteString(p.OP.String())
if p.Modifier != lexer.ItemError {
b.WriteString(" ")
b.WriteString(p.Modifier.String())
}
}
return b.String()
} | go | {
"resource": ""
} |
q12546 | IsEmpty | train | func (p *Projection) IsEmpty() bool {
return p.Binding == "" && p.Alias == "" && p.OP == lexer.ItemError && p.Modifier == lexer.ItemError
} | go | {
"resource": ""
} |
q12547 | WorkingProjection | train | func (s *Statement) WorkingProjection() *Projection {
if s.workingProjection == nil {
s.ResetProjection()
}
return s.workingProjection
} | go | {
"resource": ""
} |
q12548 | AddWorkingProjection | train | func (s *Statement) AddWorkingProjection() {
if s.workingProjection != nil && !s.workingProjection.IsEmpty() {
s.projection = append(s.projection, s.workingProjection)
}
s.ResetProjection()
} | go | {
"resource": ""
} |
q12549 | InputBindings | train | func (s *Statement) InputBindings() []string {
var res []string
for _, p := range s.projection {
if p.Binding != "" {
res = append(res, p.Binding)
}
}
for _, c := range s.constructClauses {
if c.SBinding != "" {
res = append(res, c.SBinding)
}
for _, p := range c.predicateObjectPairs {
if p.PBinding != "" {
res = append(res, p.PBinding)
}
if p.PAnchorBinding != "" {
res = append(res, p.PAnchorBinding)
}
if p.OBinding != "" {
res = append(res, p.OBinding)
}
if p.OAnchorBinding != "" {
res = append(res, p.OAnchorBinding)
}
}
}
return res
} | go | {
"resource": ""
} |
q12550 | OutputBindings | train | func (s *Statement) OutputBindings() []string {
var res []string
for _, p := range s.projection {
if p.Alias != "" {
res = append(res, p.Alias)
continue
}
if p.Binding != "" {
res = append(res, p.Binding)
}
}
set := make(map[string]bool)
set[""] = true
for _, c := range s.constructClauses {
if _, ok := set[c.SBinding]; !ok {
res = append(res, c.SBinding)
set[c.SBinding] = true
}
for _, p := range c.predicateObjectPairs {
if _, ok := set[p.PBinding]; !ok {
res = append(res, p.PBinding)
set[p.PBinding] = true
}
if _, ok := set[p.PAnchorBinding]; !ok {
res = append(res, p.PAnchorBinding)
set[p.PAnchorBinding] = true
}
if _, ok := set[p.OBinding]; !ok {
res = append(res, p.OBinding)
set[p.OBinding] = true
}
if _, ok := set[p.OAnchorBinding]; !ok {
res = append(res, p.OAnchorBinding)
set[p.OAnchorBinding] = true
}
}
}
return res
} | go | {
"resource": ""
} |
q12551 | AddWorkingConstructClause | train | func (s *Statement) AddWorkingConstructClause() {
if s.workingConstructClause != nil && !s.workingConstructClause.IsEmpty() {
s.constructClauses = append(s.constructClauses, s.workingConstructClause)
}
s.ResetWorkingConstructClause()
} | go | {
"resource": ""
} |
q12552 | AddWorkingPredicateObjectPair | train | func (c *ConstructClause) AddWorkingPredicateObjectPair() {
if c.workingPredicateObjectPair != nil && !c.workingPredicateObjectPair.IsEmpty() {
c.predicateObjectPairs = append(c.predicateObjectPairs, c.workingPredicateObjectPair)
}
c.ResetWorkingPredicateObjectPair()
} | go | {
"resource": ""
} |
q12553 | NewSymbol | train | func NewSymbol(s semantic.Symbol) Element {
return Element{
isSymbol: true,
symbol: s,
}
} | go | {
"resource": ""
} |
q12554 | NewTokenType | train | func NewTokenType(t lexer.TokenType) Element {
return Element{
isSymbol: false,
tokenType: t,
}
} | go | {
"resource": ""
} |
q12555 | NewParser | train | func NewParser(grammar *Grammar) (*Parser, error) {
// Check that the grammar is left factorized.
for _, clauses := range *grammar {
idx := 0
for _, cls := range clauses {
if len(cls.Elements) == 0 {
if idx == 0 {
idx++
continue
}
return nil, fmt.Errorf("grammar.NewParser: invalid extra empty clause derivation %v", clauses)
}
if cls.Elements[0].isSymbol {
return nil, fmt.Errorf("grammar.NewParser: not left factored grammar in %v", clauses)
}
}
}
return &Parser{
grammar: grammar,
}, nil
} | go | {
"resource": ""
} |
q12556 | Parse | train | func (p *Parser) Parse(llk *LLk, st *semantic.Statement) error {
b, err := p.consume(llk, st, "START")
if err != nil {
return err
}
if !b {
return fmt.Errorf("Parser.Parse: inconsitent parser, no error found, and no tokens were consumed")
}
return nil
} | go | {
"resource": ""
} |
q12557 | consume | train | func (p *Parser) consume(llk *LLk, st *semantic.Statement, s semantic.Symbol) (bool, error) {
for _, clause := range (*p.grammar)[s] {
if len(clause.Elements) == 0 {
return true, nil
}
elem := clause.Elements[0]
if elem.isSymbol {
return false, fmt.Errorf("Parser.consume: not left factored grammar in %v", clause)
}
if llk.CanAccept(elem.Token()) {
return p.expect(llk, st, s, clause)
}
}
return false, fmt.Errorf("Parser.consume: could not consume token %s in production %s", llk.Current(), s)
} | go | {
"resource": ""
} |
q12558 | expect | train | func (p *Parser) expect(llk *LLk, st *semantic.Statement, s semantic.Symbol, cls *Clause) (bool, error) {
if cls.ProcessStart != nil {
if _, err := cls.ProcessStart(st, s); err != nil {
return false, err
}
}
for _, elem := range cls.Elements {
tkn := llk.Current()
if elem.isSymbol {
if b, err := p.consume(llk, st, elem.Symbol()); !b || err != nil {
return false, fmt.Errorf("Parser.parse: Failed to consume symbol %v, with error %v", elem.Symbol(), err)
}
} else {
if !llk.Consume(elem.Token()) {
return false, fmt.Errorf("Parser.parse: Failed to consume %s, got %s instead", elem.Token(), llk.Current().Type)
}
}
if cls.ProcessedElement != nil {
var ce semantic.ConsumedElement
if elem.isSymbol {
ce = semantic.NewConsumedSymbol(elem.Symbol())
} else {
ce = semantic.NewConsumedToken(tkn)
}
if _, err := cls.ProcessedElement(st, ce); err != nil {
return false, err
}
}
}
if cls.ProcessEnd != nil {
if _, err := cls.ProcessEnd(st, s); err != nil {
return false, err
}
}
return true, nil
} | go | {
"resource": ""
} |
q12559 | getGraphFromStore | train | func getGraphFromStore(ctx context.Context, st storage.Store, id string) (storage.Graph, error) {
g, err := st.Graph(ctx, id)
if err == nil {
return g, nil
}
return st.NewGraph(ctx, id)
} | go | {
"resource": ""
} |
q12560 | populateSources | train | func (s *Story) populateSources(ctx context.Context, st storage.Store, b literal.Builder) error {
for _, src := range s.Sources {
g, err := getGraphFromStore(ctx, st, src.ID)
if err != nil {
return err
}
var trps []*triple.Triple
for _, trp := range src.Facts {
t, err := triple.Parse(trp, b)
if err != nil {
return err
}
trps = append(trps, t)
}
if err := g.AddTriples(ctx, trps); err != nil {
return err
}
}
return nil
} | go | {
"resource": ""
} |
q12561 | cleanSources | train | func (s *Story) cleanSources(ctx context.Context, st storage.Store) error {
for _, src := range s.Sources {
if err := st.DeleteGraph(ctx, src.ID); err != nil {
return err
}
}
return nil
} | go | {
"resource": ""
} |
q12562 | runAssertion | train | func (a *Assertion) runAssertion(ctx context.Context, st storage.Store, chanSize, bulkSize int) (bool, *table.Table, *table.Table, error) {
errorizer := func(e error) (bool, *table.Table, *table.Table, error) {
if a.WillFail && e != nil {
return true, nil, nil, nil
}
return false, nil, nil, e
}
// Run the query.
p, err := grammar.NewParser(grammar.SemanticBQL())
if err != nil {
return errorizer(fmt.Errorf("Failed to initilize a valid BQL parser"))
}
stm := &semantic.Statement{}
if err := p.Parse(grammar.NewLLk(a.Statement, 1), stm); err != nil {
return errorizer(fmt.Errorf("Failed to parse BQL statement with error %v", err))
}
pln, err := planner.New(ctx, st, stm, chanSize, bulkSize, nil)
if err != nil {
return errorizer(fmt.Errorf("Should have not failed to create a plan using memory.DefaultStorage for statement %v with error %v", stm, err))
}
tbl, err := pln.Execute(ctx)
if err != nil {
return errorizer(fmt.Errorf("planner.Execute: failed to execute assertion %q with error %v", a.Requires, err))
}
// Check the output.
want, err := a.OutputTable(stm.OutputBindings())
if err != nil {
return errorizer(err)
}
// Cannot use reflect.DeepEqual, since projections only remove bindings from
// the table but not the actual data. However, the serialized text version
// of the tables will be equal regardless of the internal representation.
return tbl.String() == want.String(), tbl, want, nil
} | go | {
"resource": ""
} |
q12563 | Run | train | func (s *Story) Run(ctx context.Context, st storage.Store, b literal.Builder, chanSize, bulkSize int) (map[string]*AssertionOutcome, error) {
// Populate the sources.
if err := s.populateSources(ctx, st, b); err != nil {
return nil, err
}
// Run assertions.
m := make(map[string]*AssertionOutcome)
for _, a := range s.Assertions {
b, got, want, err := a.runAssertion(ctx, st, chanSize, bulkSize)
if err != nil {
return nil, err
}
aName := fmt.Sprintf("requires %s", strings.TrimSpace(a.Requires))
m[aName] = &AssertionOutcome{
Equal: b,
Got: got,
Want: want,
}
}
// Clean the sources.
if err := s.cleanSources(ctx, st); err != nil {
return nil, err
}
return m, nil
} | go | {
"resource": ""
} |
q12564 | RunStories | train | func RunStories(ctx context.Context, st storage.Store, b literal.Builder, stories []*Story, chanSize, bulkSize int) *AssertionBattery {
results := &AssertionBattery{}
for _, s := range stories {
o, err := s.Run(ctx, st, b, chanSize, bulkSize)
results.Entries = append(results.Entries, &AssertionBatteryEntry{
Story: s,
Outcome: o,
Err: err,
})
}
return results
} | go | {
"resource": ""
} |
q12565 | runServer | train | func runServer(ctx context.Context, cmd *command.Command, args []string, store storage.Store, chanSize, bulkSize int) int {
// Check parameters.
if len(args) < 2 {
log.Printf("[%v] Missing required port number. ", time.Now())
cmd.Usage()
return 2
}
// Validate port number.
p := strings.TrimSpace(args[len(args)-1])
port, err := strconv.Atoi(p)
if err != nil {
log.Printf("[%v] Invalid port number %q; %v\n", time.Now(), p, err)
return 2
}
// Start the server.
log.Printf("[%v] Starting server at port %d\n", time.Now(), port)
s := &serverConfig{
store: store,
chanSize: chanSize,
bulkSize: bulkSize,
}
http.HandleFunc("/bql", s.bqlHandler)
http.HandleFunc("/", defaultHandler)
if err := http.ListenAndServe(":"+p, nil); err != nil {
log.Printf("[%v] Failed to start server on port %s; %v", time.Now(), p, err)
return 2
}
return 0
} | go | {
"resource": ""
} |
q12566 | bqlHandler | train | func (s *serverConfig) bqlHandler(w http.ResponseWriter, r *http.Request) {
if err := r.ParseForm(); err != nil {
w.WriteHeader(http.StatusMethodNotAllowed)
reportError(w, r, err)
return
}
if r.Method != http.MethodPost {
reportError(w, r, fmt.Errorf("invalid %s request on %q endpoint. Only POST request are accepted", r.Method, r.URL.Path))
log.Printf("[%s] Invalid request: %#v\n", time.Now(), r)
return
}
// Run the query.
var (
ctx context.Context
cancel context.CancelFunc
)
timeout, err := time.ParseDuration(r.FormValue("timeout"))
if err == nil {
// The request has a timeout, so create a context that is
// canceled automatically when the timeout expires.
ctx, cancel = context.WithTimeout(context.Background(), timeout)
} else {
ctx, cancel = context.WithCancel(context.Background())
}
defer cancel() // Cancel ctx as soon as handleSearch returns.
var res []*result
for _, q := range getQueries(r.PostForm["bqlQuery"]) {
if nq, err := url.QueryUnescape(q); err == nil {
q = strings.Replace(strings.Replace(nq, "\n", " ", -1), "\r", " ", -1)
}
t, err := BQL(ctx, q, s.store, s.chanSize, s.bulkSize)
r := &result{
Q: q,
T: t,
}
if err != nil {
log.Printf("[%s] %q failed; %v", time.Now(), q, err.Error())
r.Msg = err.Error()
} else {
r.Msg = "[OK]"
}
res = append(res, r)
}
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`[`))
cnt := len(res)
for _, r := range res {
w.Write([]byte(`{ "query": "`))
w.Write([]byte(strings.Replace(r.Q, `"`, `\"`, -1)))
w.Write([]byte(`", "msg": "`))
w.Write([]byte(strings.Replace(r.Msg, `"`, `\"`, -1)))
w.Write([]byte(`", "table": `))
if r.T == nil {
w.Write([]byte(`{}`))
} else {
r.T.ToJSON(w)
}
w.Write([]byte(` }`))
if cnt > 1 {
w.Write([]byte(`, `))
}
cnt--
}
w.Write([]byte(`]`))
} | go | {
"resource": ""
} |
q12567 | getQueries | train | func getQueries(raw []string) []string {
var res []string
for _, q := range raw {
for _, qs := range strings.Split(q, ";") {
if nq := strings.TrimSpace(qs); len(nq) > 0 {
res = append(res, nq+";")
}
}
}
return res
} | go | {
"resource": ""
} |
q12568 | defaultHandler | train | func defaultHandler(w http.ResponseWriter, r *http.Request) {
if err := defaultEntryTemplate.Execute(w, nil); err != nil {
reportError(w, r, err)
}
} | go | {
"resource": ""
} |
q12569 | reportError | train | func reportError(w http.ResponseWriter, r *http.Request, err error) {
w.WriteHeader(http.StatusInternalServerError)
log.Printf("[%s] %v\n", time.Now(), err)
errorTemplate.Execute(w, err)
} | go | {
"resource": ""
} |
q12570 | REPL | train | func REPL(od storage.Store, input *os.File, rl ReadLiner, chanSize, bulkSize, builderSize int, done chan bool) int {
var tracer io.Writer
ctx, isTracingToFile, sessionStart := context.Background(), false, time.Now()
driverPlain := func() storage.Store {
return od
}
driverWithMemoization := func() storage.Store {
return memoization.New(od)
}
driver := driverWithMemoization
stopTracing := func() {
if tracer != nil {
if isTracingToFile {
fmt.Println("Closing tracing file.")
tracer.(*os.File).Close()
}
tracer, isTracingToFile = nil, false
}
}
defer stopTracing()
fmt.Printf("Welcome to BadWolf vCli (%d.%d.%d-%s)\n", version.Major, version.Minor, version.Patch, version.Release)
fmt.Printf("Using driver %s/%s. Type quit; to exit.\n", driver().Name(ctx), driver().Version(ctx))
fmt.Printf("Session started at %v.\n", sessionStart.Format("2006-01-02T15:04:05.999999-07:00"))
fmt.Println("Memoization enabled. Type help; to print help.")
fmt.Println()
defer func() {
fmt.Printf("\n\nThanks for all those BQL queries!\nSession duration: %v\n\n", time.Now().Sub(sessionStart))
}()
for l := range rl(done) {
if strings.HasPrefix(l, "quit") {
done <- true
break
}
if strings.HasPrefix(l, "help") {
printHelp()
done <- false
continue
}
if strings.HasPrefix(l, "enable memoization") {
driver = driverWithMemoization
fmt.Println("[OK] Partial query memoization is on.")
done <- false
continue
}
if strings.HasPrefix(l, "disable memoization") {
driver = driverPlain
fmt.Println("[OK] Partial query memoization is off.")
done <- false
continue
}
if strings.HasPrefix(l, "start tracing") {
args := strings.Split(strings.TrimSpace(l)[:len(l)-1], " ")
switch len(args) {
case 2:
// Start tracing to the console.
stopTracing()
tracer, isTracingToFile = os.Stdout, false
fmt.Println("[WARNING] Tracing is on. This may slow your BQL queries.")
case 3:
// Start tracing to file.
stopTracing()
f, err := os.Create(args[2])
if err != nil {
fmt.Println(err)
} else {
tracer, isTracingToFile = f, true
fmt.Println("[WARNING] Tracing is on. This may slow your BQL queries.")
}
default:
fmt.Println("Invalid syntax\n\tstart tracing [trace_file]")
}
done <- false
continue
}
if strings.HasPrefix(l, "stop tracing") {
stopTracing()
fmt.Println("Tracing is off.")
done <- false
continue
}
if strings.HasPrefix(l, "export") {
now := time.Now()
args := strings.Split("bw "+strings.TrimSpace(l)[:len(l)-1], " ")
usage := "Wrong syntax\n\n\tload <graph_names_separated_by_commas> <file_path>\n"
export.Eval(ctx, usage, args, driver(), bulkSize)
fmt.Println("[OK] Time spent: ", time.Now().Sub(now))
done <- false
continue
}
if strings.HasPrefix(l, "load") {
now := time.Now()
args := strings.Split("bw "+strings.TrimSpace(l[:len(l)-1]), " ")
usage := "Wrong syntax\n\n\tload <file_path> <graph_names_separated_by_commas>\n"
load.Eval(ctx, usage, args, driver(), bulkSize, builderSize)
fmt.Println("[OK] Time spent: ", time.Now().Sub(now))
done <- false
continue
}
if strings.HasPrefix(l, "desc") {
pln, err := planBQL(ctx, l[4:], driver(), chanSize, bulkSize, nil)
if err != nil {
fmt.Printf("[ERROR] %s\n\n", err)
} else {
if pln != nil {
fmt.Println(pln.String(ctx))
}
fmt.Println("[OK]")
}
done <- false
continue
}
if strings.HasPrefix(l, "run") {
now := time.Now()
path, cmds, err := runBQLFromFile(ctx, driver(), chanSize, bulkSize, strings.TrimSpace(l[:len(l)-1]), tracer)
if err != nil {
fmt.Printf("[ERROR] %s\n\n", err)
} else {
fmt.Printf("Loaded %q and run %d BQL commands successfully\n\n", path, cmds)
}
fmt.Println("Time spent: ", time.Now().Sub(now))
done <- false
continue
}
now := time.Now()
table, err := runBQL(ctx, l, driver(), chanSize, bulkSize, tracer)
bqlDiff := time.Now().Sub(now)
if err != nil {
fmt.Printf("[ERROR] %s\n", err)
fmt.Println("Time spent: ", time.Now().Sub(now))
fmt.Println()
} else {
if table == nil {
fmt.Printf("[OK] 0 rows retrieved. BQL time: %v. Display time: %v\n",
bqlDiff, time.Now().Sub(now)-bqlDiff)
} else {
if len(table.Bindings()) > 0 {
fmt.Println(table.String())
}
fmt.Printf("[OK] %d rows retrieved. BQL time: %v. Display time: %v\n",
table.NumRows(), bqlDiff, time.Now().Sub(now)-bqlDiff)
}
}
done <- false
}
return 0
} | go | {
"resource": ""
} |
q12571 | printHelp | train | func printHelp() {
fmt.Println()
fmt.Println("help - prints help for the bw console.")
fmt.Println("disable memoization - disables partial result memoization on query resolution.")
fmt.Println("enable memoization - enables partial result memoization of partial query results.")
fmt.Println("export <graph_names_separated_by_commas> <file_path> - dumps triples from graphs into a file path.")
fmt.Println("desc <BQL> - prints the execution plan for a BQL statement.")
fmt.Println("load <file_path> <graph_names_separated_by_commas> - load triples into the specified graphs.")
fmt.Println("run <file_with_bql_statements> - runs all the BQL statements in the file.")
fmt.Println("start tracing [trace_file] - starts tracing queries.")
fmt.Println("stop tracing - stops tracing queries.")
fmt.Println("quit - quits the console.")
fmt.Println()
} | go | {
"resource": ""
} |
q12572 | runBQLFromFile | train | func runBQLFromFile(ctx context.Context, driver storage.Store, chanSize, bulkSize int, line string, w io.Writer) (string, int, error) {
ss := strings.Split(strings.TrimSpace(line), " ")
if len(ss) != 2 {
return "", 0, fmt.Errorf("wrong syntax: run <file_with_bql_statements>")
}
path := ss[1]
tracer.Trace(w, func() []string {
return []string{fmt.Sprintf("Attempting to read file %q", path)}
})
lines, err := bio.GetStatementsFromFile(path)
if err != nil {
msg := fmt.Errorf("failed to read file %q; error %v", path, err)
tracer.Trace(w, func() []string {
return []string{msg.Error()}
})
return "", 0, msg
}
for idx, stm := range lines {
fmt.Printf("Processing statement (%d/%d)\n", idx+1, len(lines))
_, err := runBQL(ctx, stm, driver, chanSize, bulkSize, w)
if err != nil {
msg := fmt.Errorf("%q; %v", stm, err)
tracer.Trace(w, func() []string {
return []string{msg.Error()}
})
return "", 0, msg
}
}
fmt.Println()
return path, len(lines), nil
} | go | {
"resource": ""
} |
q12573 | runBQL | train | func runBQL(ctx context.Context, bql string, s storage.Store, chanSize, bulkSize int, w io.Writer) (*table.Table, error) {
tracer.Trace(w, func() []string {
return []string{fmt.Sprintf("Executing query: %s", bql)}
})
pln, err := planBQL(ctx, bql, s, chanSize, bulkSize, w)
if err != nil {
return nil, err
}
if pln == nil {
return nil, nil
}
res, err := pln.Execute(ctx)
if err != nil {
msg := fmt.Errorf("planner.Execute: failed to execute; %v", err)
tracer.Trace(w, func() []string {
return []string{msg.Error()}
})
return nil, msg
}
tracer.Trace(w, func() []string {
return []string{fmt.Sprintf("planner execute returned %d rows", res.NumRows())}
})
return res, nil
} | go | {
"resource": ""
} |
q12574 | planBQL | train | func planBQL(ctx context.Context, bql string, s storage.Store, chanSize, bulkSize int, w io.Writer) (planner.Executor, error) {
bql = strings.TrimSpace(bql)
if bql == ";" {
tracer.Trace(w, func() []string {
return []string{"Empty statement found"}
})
return nil, nil
}
p, err := grammar.NewParser(grammar.SemanticBQL())
if err != nil {
msg := fmt.Errorf("NewParser failed; %v", err)
tracer.Trace(w, func() []string {
return []string{msg.Error()}
})
return nil, msg
}
stm := &semantic.Statement{}
if err := p.Parse(grammar.NewLLk(bql, 1), stm); err != nil {
msg := fmt.Errorf("NewLLk parser failed; %v", err)
tracer.Trace(w, func() []string {
return []string{msg.Error()}
})
return nil, msg
}
pln, err := planner.New(ctx, s, stm, chanSize, bulkSize, w)
if err != nil {
msg := fmt.Errorf("planer.New failed failed; %v", err)
tracer.Trace(w, func() []string {
return []string{msg.Error()}
})
return nil, msg
}
tracer.Trace(w, func() []string {
return []string{"Plan successfuly created"}
})
return pln, nil
} | go | {
"resource": ""
} |
q12575 | Marshal | train | func (s *Story) Marshal() (string, error) {
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
return "", err
}
return string(b), nil
} | go | {
"resource": ""
} |
q12576 | Unmarshal | train | func (s *Story) Unmarshal(ss string) error {
return json.Unmarshal([]byte(ss), s)
} | go | {
"resource": ""
} |
q12577 | inferCell | train | func inferCell(s string) *table.Cell {
if n, err := node.Parse(s); err == nil {
return &table.Cell{N: n}
}
if p, err := predicate.Parse(s); err == nil {
return &table.Cell{P: p}
}
if l, err := literal.DefaultBuilder().Parse(s); err == nil {
return &table.Cell{L: l}
}
t, err := time.Parse(time.RFC3339Nano, s)
if err == nil {
return &table.Cell{T: &t}
}
return &table.Cell{S: table.CellString(s)}
} | go | {
"resource": ""
} |
q12578 | OutputTable | train | func (a *Assertion) OutputTable(bo []string) (*table.Table, error) {
// Return the already computed output table.
if a.table != nil {
return a.table, nil
}
// Compute the output table.
var (
first bool
mBdngs map[string]bool
data []table.Row
bs []string
)
mBdngs, first = make(map[string]bool), true
for _, row := range a.MustReturn {
nr := table.Row{}
for k, v := range row {
_, ok := mBdngs[k]
if first && !ok {
bs = append(bs, k)
}
if !first && !ok {
return nil, fmt.Errorf("unknow binding %q; available ones are %v", k, mBdngs)
}
mBdngs[k], nr[k] = true, inferCell(v)
}
data = append(data, nr)
first = false
}
if first {
// No data was provided. This will create the empty table with the right
// bindings.
bs = bo
}
// Build the table.
if len(bo) != len(bs) {
return nil, fmt.Errorf("incompatible bindings; got %v, want %v", bs, bo)
}
for _, b := range bo {
if _, ok := mBdngs[b]; !first && !ok {
return nil, fmt.Errorf("missing binding %q; want bining in %v", b, bo)
}
}
t, err := table.New(bo)
if err != nil {
return nil, err
}
for _, r := range data {
t.AddRow(r)
}
return t, nil
} | go | {
"resource": ""
} |
q12579 | updateTimeBounds | train | func updateTimeBounds(lo *storage.LookupOptions, cls *semantic.GraphClause) *storage.LookupOptions {
nlo := &storage.LookupOptions{
MaxElements: lo.MaxElements,
LowerAnchor: lo.LowerAnchor,
UpperAnchor: lo.UpperAnchor,
}
if cls.PLowerBound != nil {
if lo.LowerAnchor == nil || (lo.LowerAnchor != nil && cls.PLowerBound.After(*lo.LowerAnchor)) {
nlo.LowerAnchor = cls.PLowerBound
}
}
if cls.PUpperBound != nil {
if lo.UpperAnchor == nil || (lo.UpperAnchor != nil && cls.PUpperBound.Before(*lo.UpperAnchor)) {
nlo.UpperAnchor = cls.PUpperBound
}
}
return nlo
} | go | {
"resource": ""
} |
q12580 | updateTimeBoundsForRow | train | func updateTimeBoundsForRow(lo *storage.LookupOptions, cls *semantic.GraphClause, r table.Row) (*storage.LookupOptions, error) {
lo = updateTimeBounds(lo, cls)
if cls.PLowerBoundAlias != "" {
v, ok := r[cls.PLowerBoundAlias]
if ok && v.T == nil {
return nil, fmt.Errorf("invalid time anchor value %v for bound %s", v, cls.PLowerBoundAlias)
}
if lo.LowerAnchor == nil || (lo.LowerAnchor != nil && v.T.After(*lo.LowerAnchor)) {
lo.LowerAnchor = v.T
}
}
if cls.PUpperBoundAlias != "" {
v, ok := r[cls.PUpperBoundAlias]
if ok && v.T == nil {
return nil, fmt.Errorf("invalid time anchor value %v for bound %s", v, cls.PUpperBoundAlias)
}
if lo.UpperAnchor == nil || (lo.UpperAnchor != nil && v.T.After(*lo.UpperAnchor)) {
lo.UpperAnchor = v.T
}
}
nlo := updateTimeBounds(lo, cls)
return nlo, nil
} | go | {
"resource": ""
} |
q12581 | simpleExist | train | func simpleExist(ctx context.Context, gs []storage.Graph, cls *semantic.GraphClause, t *triple.Triple) (bool, *table.Table, error) {
unfeasible := true
tbl, err := table.New(cls.Bindings())
if err != nil {
return true, nil, err
}
for _, g := range gs {
b, err := g.Exist(ctx, t)
if err != nil {
return true, nil, err
}
if b {
unfeasible = false
ts := make(chan *triple.Triple, 1)
ts <- t
close(ts)
if err := addTriples(ts, cls, tbl); err != nil {
return true, nil, err
}
}
}
return unfeasible, tbl, nil
} | go | {
"resource": ""
} |
q12582 | addTriples | train | func addTriples(ts <-chan *triple.Triple, cls *semantic.GraphClause, tbl *table.Table) error {
for t := range ts {
if cls.PID != "" {
// The triples need to be filtered.
if string(t.Predicate().ID()) != cls.PID {
continue
}
if cls.PTemporal {
if t.Predicate().Type() != predicate.Temporal {
continue
}
ta, err := t.Predicate().TimeAnchor()
if err != nil {
return fmt.Errorf("failed to retrieve time anchor from time predicate in triple %s with error %v", t, err)
}
// Need to check the bounds of the triple.
if cls.PLowerBound != nil && cls.PLowerBound.After(*ta) {
continue
}
if cls.PUpperBound != nil && cls.PUpperBound.Before(*ta) {
continue
}
}
}
if cls.OID != "" {
if p, err := t.Object().Predicate(); err == nil {
// The triples need to be filtered.
if string(p.ID()) != cls.OID {
continue
}
if cls.OTemporal {
if p.Type() != predicate.Temporal {
continue
}
ta, err := p.TimeAnchor()
if err != nil {
return fmt.Errorf("failed to retrieve time anchor from time predicate in triple %s with error %v", t, err)
}
// Need to check the bounds of the triple.
if cls.OLowerBound != nil && cls.OLowerBound.After(*ta) {
continue
}
if cls.OUpperBound != nil && cls.OUpperBound.Before(*ta) {
continue
}
}
}
}
r, err := tripleToRow(t, cls)
if err != nil {
return err
}
if r != nil {
tbl.AddRow(r)
}
}
return nil
} | go | {
"resource": ""
} |
q12583 | objectToCell | train | func objectToCell(o *triple.Object) (*table.Cell, error) {
c := &table.Cell{}
if n, err := o.Node(); err == nil {
c.N = n
return c, nil
}
if p, err := o.Predicate(); err == nil {
c.P = p
return c, nil
}
if l, err := o.Literal(); err == nil {
c.L = l
return c, nil
}
return nil, fmt.Errorf("unknown object type in object %q", o)
} | go | {
"resource": ""
} |
q12584 | runAll | train | func runAll(ctx context.Context, st storage.Store, chanSize, bulkSize int) int {
// - Add non existing triples. (done)
// - Add triples that already exist. (done)
// - Remove non existing triples. (done)
// - Remove existing triples. (done)
// - BQL tree walking from root. (done)
// - BQL random graph hopping. (done)
// - BQL sorting. (done)
// - BQL grouping. (done)
// - BQL counting. (bounded by sort and grouping)
// - BQL filter existent (bounded by sort and grouping)
// - BQL filter non existent (bounded by sort and grouping)
fmt.Printf("DISCLAIMER: Running this benchmarks is expensive. Consider using a machine with at least 3G of RAM.\n\n")
var out int
// Add non existing triples.
out += runBattery(ctx, st, "adding non existing tree triples", chanSize, bulkSize, batteries.AddTreeTriplesBenchmark)
out += runBattery(ctx, st, "adding non existing graph triples", chanSize, bulkSize, batteries.AddGraphTriplesBenchmark)
// Add existing triples.
out += runBattery(ctx, st, "adding existing tree triples", chanSize, bulkSize, batteries.AddExistingTreeTriplesBenchmark)
out += runBattery(ctx, st, "adding existing graph triples", chanSize, bulkSize, batteries.AddExistingGraphTriplesBenchmark)
// Remove non existing triples.
out += runBattery(ctx, st, "removing non existing tree triples", chanSize, bulkSize, batteries.RemoveTreeTriplesBenchmark)
out += runBattery(ctx, st, "removing non existing graph triples", chanSize, bulkSize, batteries.RemoveGraphTriplesBenchmark)
// Remove existing triples.
out += runBattery(ctx, st, "removing existing tree triples", chanSize, bulkSize, batteries.RemoveExistingTreeTriplesBenchmark)
out += runBattery(ctx, st, "removing existing graph triples", chanSize, bulkSize, batteries.RemoveExistingGraphTriplesBenchmark)
// BQL graph walking.
out += runBattery(ctx, st, "walking the tree graph with BQL", chanSize, bulkSize, batteries.BQLTreeGraphWalking)
out += runBattery(ctx, st, "walking the random graph with BQL", chanSize, bulkSize, batteries.BQLRandomGraphWalking)
return out
} | go | {
"resource": ""
} |
q12585 | runBattery | train | func runBattery(ctx context.Context, st storage.Store, name string, chanSize, bulkSize int, f func(context.Context, storage.Store, int, int) ([]*runtime.BenchEntry, error)) int {
// Add triples.
fmt.Printf("Creating %s triples benchmark... ", name)
bes, err := f(ctx, st, chanSize, bulkSize)
if err != nil {
log.Printf("[ERROR] %v\n", err)
return 2
}
fmt.Printf("%d entries created\n", len(bes))
fmt.Printf("Run %s benchmark sequentially... ", name)
ts := time.Now()
brs := runtime.RunBenchmarkBatterySequentially(bes)
ds := time.Now().Sub(ts)
fmt.Printf("(%v) done\n", ds)
fmt.Printf("Run %s benchmark concurrently... ", name)
tc := time.Now()
brc := runtime.RunBenchmarkBatteryConcurrently(bes)
dc := time.Now().Sub(tc)
fmt.Printf("(%v) done\n\n", dc)
format := func(br *runtime.BenchResult) string {
if br.Err != nil {
return fmt.Sprintf("%20s - %20s -[ERROR] %v", br.BatteryID, br.ID, br.Err)
}
tps := float64(br.Triples) / (float64(br.Mean) / float64(time.Second))
return fmt.Sprintf("%20s - %20s - %05.2f triples/sec - %v/%v", br.BatteryID, br.ID, tps, br.Mean, br.StdDev)
}
sortAndPrint := func(ss []string) {
sort.Strings(ss)
for _, s := range ss {
fmt.Println(s)
}
}
fmt.Printf("Stats for sequentially run %s benchmark\n", name)
var ress []string
for _, br := range brs {
ress = append(ress, format(br))
}
sortAndPrint(ress)
fmt.Println()
fmt.Printf("Stats for concurrently run %s benchmark\n", name)
var resc []string
for _, br := range brc {
resc = append(resc, format(br))
}
sortAndPrint(resc)
fmt.Println()
return 0
} | go | {
"resource": ""
} |
q12586 | GetStatementsFromFile | train | func GetStatementsFromFile(path string) ([]string, error) {
stms, err := ReadLines(path)
if err != nil {
return nil, err
}
return stms, nil
} | go | {
"resource": ""
} |
q12587 | ReadLines | train | func ReadLines(path string) ([]string, error) {
f, err := os.Open(path)
if err != nil {
return nil, err
}
defer f.Close()
var lines []string
scanner := bufio.NewScanner(f)
line := ""
for scanner.Scan() {
l := strings.TrimSpace(scanner.Text())
if len(l) == 0 || strings.Index(l, "#") == 0 {
continue
}
line += " " + l
if l[len(l)-1:] == ";" {
lines = append(lines, strings.TrimSpace(line))
line = ""
}
}
if line != "" {
lines = append(lines, strings.TrimSpace(line))
}
return lines, scanner.Err()
} | go | {
"resource": ""
} |
q12588 | ProcessLines | train | func ProcessLines(path string, fp func(line string) error) (int, error) {
f, err := os.Open(path)
if err != nil {
return 0, err
}
defer f.Close()
scanner := bufio.NewScanner(f)
cnt := 0
for scanner.Scan() {
l := strings.TrimSpace(scanner.Text())
cnt++
if len(l) == 0 || strings.Index(l, "#") == 0 {
continue
}
if err := fp(l); err != nil {
return cnt, err
}
}
return cnt, scanner.Err()
} | go | {
"resource": ""
} |
q12589 | ReadIntoGraph | train | func ReadIntoGraph(ctx context.Context, g storage.Graph, r io.Reader, b literal.Builder) (int, error) {
cnt, scanner := 0, bufio.NewScanner(r)
scanner.Split(bufio.ScanLines)
for scanner.Scan() {
text := strings.TrimSpace(scanner.Text())
if text == "" {
continue
}
t, err := triple.Parse(text, b)
if err != nil {
return cnt, err
}
cnt++
g.AddTriples(ctx, []*triple.Triple{t})
}
return cnt, nil
} | go | {
"resource": ""
} |
q12590 | WriteGraph | train | func WriteGraph(ctx context.Context, w io.Writer, g storage.Graph) (int, error) {
var (
wg sync.WaitGroup
tErr error
wErr error
)
cnt, ts := 0, make(chan *triple.Triple)
wg.Add(1)
go func() {
defer wg.Done()
tErr = g.Triples(ctx, storage.DefaultLookup, ts)
}()
for t := range ts {
if wErr != nil {
continue
}
if _, err := io.WriteString(w, fmt.Sprintf("%s\n", t.String())); err != nil {
wErr = err
continue
}
cnt++
}
wg.Wait()
if tErr != nil {
return 0, tErr
}
if wErr != nil {
return 0, wErr
}
return cnt, nil
} | go | {
"resource": ""
} |
q12591 | AddGraphTriplesBenchmark | train | func AddGraphTriplesBenchmark(ctx context.Context, st storage.Store, chanSize, bulkSize int) ([]*runtime.BenchEntry, error) {
nodes := []int{317, 1000}
sizes := []int{10, 1000, 100000}
var trplSets [][]*triple.Triple
var ids []string
var gids []string
var gSizes []int
gs, err := getGraphGenerators(nodes)
if err != nil {
return nil, err
}
for idx, g := range gs {
for _, s := range sizes {
ts, err := g.Generate(s)
if err != nil {
return nil, err
}
trplSets = append(trplSets, ts)
ids = append(ids, fmt.Sprintf("rg nodes=%04d, size=%07d", nodes[idx], s))
gids = append(gids, fmt.Sprintf("n%d_s%d", nodes[idx], s))
gSizes = append(gSizes, s)
}
}
var bes []*runtime.BenchEntry
reps := []int{10}
for i, max := 0, len(ids); i < max; i++ {
for idxReps, r := range reps {
var g storage.Graph
gID := fmt.Sprintf("add_graph_%s_r%d_i%d", gids[i], i, idxReps)
data := trplSets[i]
bes = append(bes, &runtime.BenchEntry{
BatteryID: "Add non existing triples",
ID: fmt.Sprintf("%s, reps=%02d", ids[i], r),
Triples: gSizes[i],
Reps: r,
Setup: func() error {
var err error
g, err = st.NewGraph(ctx, gID)
return err
},
F: func() error {
return g.AddTriples(ctx, data)
},
TearDown: func() error {
return st.DeleteGraph(ctx, gID)
},
})
}
}
return bes, nil
} | go | {
"resource": ""
} |
q12592 | registerDrivers | train | func registerDrivers() {
registeredDrivers = map[string]common.StoreGenerator{
// Memory only storage driver.
"VOLATILE": func() (storage.Store, error) {
return memory.NewStore(), nil
},
}
} | go | {
"resource": ""
} |
q12593 | lex | train | func lex(input string, capacity int) (*lexer, <-chan Token) {
l := &lexer{
input: input,
tokens: make(chan Token, capacity),
}
go l.run() // Concurrently run state machine.
return l, l.tokens
} | go | {
"resource": ""
} |
q12594 | New | train | func New(input string, capacity int) <-chan Token {
if capacity < 0 {
capacity = 0
}
_, c := lex(input, capacity)
return c
} | go | {
"resource": ""
} |
q12595 | lexToken | train | func lexToken(l *lexer) stateFn {
for {
{
r := l.peek()
switch r {
case binding:
l.next()
return lexBinding
case slash:
return lexNode
case underscore:
l.next()
return lexBlankNode
case quote:
return lexPredicateOrLiteral
}
if unicode.IsLetter(r) {
return lexKeyword
}
}
if state := isSingleSymbolToken(l, ItemLBracket, leftBracket); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemRBracket, rightBracket); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemLPar, leftPar); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemRPar, rightPar); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemSemicolon, semicolon); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemDot, dot); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemComma, comma); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemLT, lt); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemGT, gt); state != nil {
return state
}
if state := isSingleSymbolToken(l, ItemEQ, eq); state != nil {
return state
}
{
r := l.next()
if unicode.IsSpace(r) {
l.ignore()
continue
}
if l.next() == eof {
break
}
}
}
l.emit(ItemEOF) // Useful to make EOF a token.
return nil // Stop the run loop.
} | go | {
"resource": ""
} |
q12596 | isSingleSymbolToken | train | func isSingleSymbolToken(l *lexer, tt TokenType, symbol rune) stateFn {
if r := l.peek(); r == symbol {
l.next()
l.emit(tt)
return lexSpace // Next state.
}
return nil
} | go | {
"resource": ""
} |
q12597 | lexBinding | train | func lexBinding(l *lexer) stateFn {
for {
if r := l.next(); !unicode.IsLetter(r) && !unicode.IsDigit(r) && r != rune('_') || r == eof {
l.backup()
l.emit(ItemBinding)
break
}
}
return lexSpace
} | go | {
"resource": ""
} |
q12598 | lexSpace | train | func lexSpace(l *lexer) stateFn {
for {
if r := l.next(); !unicode.IsSpace(r) || r == eof {
break
}
}
l.backup()
l.ignore()
return lexToken
} | go | {
"resource": ""
} |
q12599 | lexBlankNode | train | func lexBlankNode(l *lexer) stateFn {
if r := l.next(); r != colon {
l.emitError("blank node should start with _:")
return nil
}
if r := l.next(); !unicode.IsLetter(r) {
l.emitError("blank node label should begin with a letter")
return nil
}
for {
if r := l.next(); !unicode.IsLetter(r) && !unicode.IsDigit(r) && r != rune('_') || r == eof {
l.backup()
l.emit(ItemBlankNode)
break
}
}
return lexSpace
} | go | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.