Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
dennis-tra committed Oct 5, 2023
1 parent 5c6e8a6 commit b55f2db
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 19 deletions.
36 changes: 18 additions & 18 deletions internal/coord/query/crawl.go → internal/coord/routing/crawl.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package query
package routing

import (
"context"
Expand Down Expand Up @@ -42,15 +42,6 @@ func DefaultCrawlConfig() *CrawlConfig {
}
}

type crawlJob[K kad.Key[K], N kad.NodeID[K]] struct {
node N
target K
}

func (c *crawlJob[K, N]) mapKey() string {
return c.node.String() + key.HexString(c.target)
}

type Crawl[K kad.Key[K], N kad.NodeID[K], M coordt.Message] struct {
self N
id coordt.QueryID
Expand All @@ -74,10 +65,6 @@ func NewCrawl[K kad.Key[K], N kad.NodeID[K], M coordt.Message](self N, id coordt
return nil, err
}

if len(seed) == 0 {
return nil, fmt.Errorf("empty seed")
}

c := &Crawl[K, N, M]{
self: self,
id: id,
Expand Down Expand Up @@ -113,6 +100,10 @@ func NewCrawl[K kad.Key[K], N kad.NodeID[K], M coordt.Message](self N, id coordt
}
}

if len(seed) == 0 {
return nil, fmt.Errorf("empty seed")
}

return c, nil
}

Expand Down Expand Up @@ -148,18 +139,18 @@ func (c *Crawl[K, N, M]) Advance(ctx context.Context, ev CrawlEvent) (out CrawlS
continue
}

job := crawlJob[K, N]{
newJob := crawlJob[K, N]{
node: node,
target: target.Key(),
}

mapKey := job.mapKey()
newMapKey := newJob.mapKey()

if _, found := c.cpls[mapKey]; found {
if _, found := c.cpls[newMapKey]; found {
continue
}

c.cpls[mapKey] = i
c.cpls[newMapKey] = i
c.todo = append(c.todo, job)
}
}
Expand Down Expand Up @@ -216,6 +207,15 @@ func (c *Crawl[K, N, M]) Advance(ctx context.Context, ev CrawlEvent) (out CrawlS
return &StateCrawlFinished{}
}

type crawlJob[K kad.Key[K], N kad.NodeID[K]] struct {
node N
target K
}

func (c *crawlJob[K, N]) mapKey() string {
return c.node.String() + key.HexString(c.target)
}

type CrawlState interface {
crawlState()
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package query
package routing

import (
"context"
Expand All @@ -14,6 +14,40 @@ import (

var _ coordt.StateMachine[CrawlEvent, CrawlState] = (*Crawl[tiny.Key, tiny.Node, tiny.Message])(nil)

func TestNewCrawl(t *testing.T) {
self := tiny.NewNode(0)
a := tiny.NewNode(0b10000100)
b := tiny.NewNode(0b11000000)

t.Run("initializes maps", func(t *testing.T) {
cfg := DefaultCrawlConfig()
cfg.MaxCPL = 4
seed := []tiny.Node{a}
qry, err := NewCrawl[tiny.Key, tiny.Node, tiny.Message](self, coordt.QueryID("test"), tiny.NodeWithCpl, seed, cfg)
require.NoError(t, err)
require.NotNil(t, qry)
require.Len(t, qry.todo, 4)
require.NotNil(t, qry.waiting)
require.NotNil(t, qry.success)
require.NotNil(t, qry.failed)
require.NotNil(t, qry.errors)
})

t.Run("removes self from seed", func(t *testing.T) {
cfg := DefaultCrawlConfig()
cfg.MaxCPL = 4
seed := []tiny.Node{self, a, b}
qry, err := NewCrawl[tiny.Key, tiny.Node, tiny.Message](self, coordt.QueryID("test"), tiny.NodeWithCpl, seed, cfg)
require.NoError(t, err)
require.NotNil(t, qry)
require.Len(t, qry.todo, cfg.MaxCPL*2) // self is not included
require.NotNil(t, qry.waiting)
require.NotNil(t, qry.success)
require.NotNil(t, qry.failed)
require.NotNil(t, qry.errors)
})
}

func TestCrawl_Advance(t *testing.T) {
ctx := context.Background()

Expand Down Expand Up @@ -87,13 +121,29 @@ func TestCrawl_Advance(t *testing.T) {
assert.Len(t, qry.success, 2)
assert.Len(t, qry.failed, 0)

moreReqs := make([]*StateCrawlFindCloser[tiny.Key, tiny.Node], cfg.MaxCPL)
moreReqs[0] = tstate
for i := 1; i < cfg.MaxCPL; i++ {
state = qry.Advance(ctx, &EventCrawlPoll{})
tstate, ok = state.(*StateCrawlFindCloser[tiny.Key, tiny.Node])
require.True(t, ok, "type is %T", state)
moreReqs[i] = tstate
}

for i := 2; i < len(reqs); i++ {
state = qry.Advance(ctx, &EventCrawlNodeResponse[tiny.Key, tiny.Node]{
NodeID: reqs[i].NodeID,
Target: reqs[i].Target,
CloserNodes: []tiny.Node{},
})
}
for i := 0; i < len(moreReqs); i++ {
state = qry.Advance(ctx, &EventCrawlNodeResponse[tiny.Key, tiny.Node]{
NodeID: moreReqs[i].NodeID,
Target: moreReqs[i].Target,
CloserNodes: []tiny.Node{},
})
}

require.IsType(t, &StateCrawlIdle{}, state)
}

0 comments on commit b55f2db

Please sign in to comment.