-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathnormalizer_test.go
149 lines (120 loc) · 3.12 KB
/
normalizer_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
package pto3_test
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"testing"
"time"
pto3 "github.com/mami-project/pto3-go"
)
func observationsInFile(r io.Reader) (map[string]struct{}, error) {
m := make(map[string]struct{})
s := bufio.NewScanner(r)
for s.Scan() {
var o pto3.Observation
line := s.Text()
if len(line) > 0 && line[0] == '[' {
if err := json.Unmarshal([]byte(line), &o); err != nil {
return nil, err
}
m[fmt.Sprintf("%s|%s|%s|%s",
o.TimeStart.Format(time.RFC3339),
o.TimeEnd.Format(time.RFC3339),
o.Path.String,
o.Condition.Name)] = struct{}{}
}
}
return m, nil
}
// This test simulates the operation of a normalizer. It tests reads from the
// raw data store, writes to the observation store, and reads from the
// observation store (to verify the stored data)
// It uses the initial raw data in testdata/test_raw_init
func TestNormalization(t *testing.T) {
// grab a file from the raw data store
cam, err := TestRDS.CampaignForName("test0")
if err != nil {
t.Fatal(err)
}
obsdata, err := cam.ReadFileData("test0-0-obs.ndjson")
if err != nil {
t.Fatal(err)
}
defer obsdata.Close()
b, err := ioutil.ReadAll(obsdata)
if err != nil {
t.Fatal(err)
}
// build a set containing observations for later comparison
rawobsset, err := observationsInFile(bytes.NewBuffer(b))
if err != nil {
t.Fatal(err)
}
// now dump the observations into a temporary file for loading
tf, err := ioutil.TempFile("", "pto3-test-obs")
if err != nil {
t.Fatal(err)
}
defer tf.Close()
defer os.Remove(tf.Name())
if _, err := tf.Write(b); err != nil {
t.Fatal(err)
}
// append some observation set metadata to the temporary file
mdf, err := os.Open("testdata/test_obset_metadata.json")
if err != nil {
t.Fatal(err)
}
defer mdf.Close()
if _, err := io.Copy(tf, mdf); err != nil {
t.Fatal(err)
}
if err := tf.Sync(); err != nil {
t.Fatal(err)
}
// verify creation and modification time set
nowish := time.Now()
// create an observation set from this normalized file
cidCache, err := pto3.LoadConditionCache(TestDB)
if err != nil {
t.Fatal(err)
}
pidCache := make(pto3.PathCache)
set, err := pto3.CopySetFromObsFile(tf.Name(), TestDB, cidCache, pidCache)
if err != nil {
t.Fatal(err)
}
log.Printf("created observation set ID:%d", set.ID)
creationDelay := set.Created.Sub(nowish)
if creationDelay < -1*time.Minute || creationDelay > 1*time.Minute {
t.Fatalf("nonsensical obset creation delay %v", creationDelay)
}
// retrieve stored observation data with the one we uploaded
dataout := new(bytes.Buffer)
if err := set.CopyDataToStream(TestDB, dataout); err != nil {
t.Fatal(err)
}
outobsset, err := observationsInFile(bytes.NewReader(dataout.Bytes()))
if err != nil {
t.Fatal(err)
}
i := 0
for k := range rawobsset {
if _, ok := outobsset[k]; ok == false {
t.Fatalf("retrieved observation set missing observation %s at index %d", k, i)
}
i++
}
i = 0
for k := range outobsset {
if _, ok := rawobsset[k]; ok == false {
t.Fatalf("retrieved observation has spurious observation %s at index %d", k, i)
}
i++
}
}