forked from netobserv/netobserv-cli
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathflow_capture.go
More file actions
147 lines (127 loc) · 3.33 KB
/
flow_capture.go
File metadata and controls
147 lines (127 loc) · 3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
package cmd
import (
"encoding/json"
"os"
"strings"
"time"
"github.com/jpillora/sizestr"
"github.com/netobserv/flowlogs-pipeline/pkg/config"
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/utils"
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/write/grpc"
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/write/grpc/genericmap"
"github.com/spf13/cobra"
)
var flowCmd = &cobra.Command{
Use: "get-flows",
Short: "",
Long: "",
Run: runFlowCapture,
}
func runFlowCapture(_ *cobra.Command, _ []string) {
captureType = "Flow"
go startFlowCollector()
createDisplay()
}
func startFlowCollector() {
if len(filename) > 0 {
log.Infof("Starting Flow Capture for %s...", filename)
} else {
log.Infof("Starting Flow Capture...")
filename = strings.ReplaceAll(
currentTime().UTC().Format(time.RFC3339),
":", "") // get rid of offensive colons
}
var f *os.File
err := os.MkdirAll("./output/flow/", 0700)
if err != nil {
log.Errorf("Create directory failed: %v", err.Error())
log.Fatal(err)
}
log.Debug("Created flow folder")
f, err = os.Create("./output/flow/" + filename + ".txt")
if err != nil {
log.Errorf("Create file %s failed: %v", filename, err.Error())
log.Fatal(err)
}
defer f.Close()
log.Debug("Created flow logs txt file")
// Initialize sqlite DB
db := initFlowDB(filename)
log.Debug("Initialized database")
flowPackets := make(chan *genericmap.Flow, 100)
collector, err := grpc.StartCollector(port, flowPackets)
if err != nil {
log.Errorf("StartCollector failed: %v", err.Error())
return
}
log.Debug("Started collector")
collectorStarted = true
go func() {
<-utils.ExitChannel()
log.Debug("Ending collector")
close(flowPackets)
collector.Close()
db.Close()
log.Debug("Done")
}()
log.Debug("Ready ! Waiting for flows...")
go hearbeat()
for fp := range flowPackets {
if !captureStarted {
log.Debugf("Received first %d flows", len(flowPackets))
}
if stopReceived {
log.Debug("Stop received")
return
}
// parse and display flow async
go parseGenericMapAndAppendFlow(fp.GenericMap.Value)
// Write flows to sqlite DB
err = queryFlowDB(fp.GenericMap.Value, db)
if err != nil {
log.Error("Error while writing to DB:", err.Error())
}
if !captureStarted {
log.Debug("Wrote flows to DB")
}
// append new line between each record to read file easilly
bytes, err := f.Write(append(fp.GenericMap.Value, []byte(",\n")...))
if err != nil {
log.Error(err)
return
}
if !captureStarted {
log.Debug("Wrote flows to json")
}
// terminate capture if max bytes reached
totalBytes += int64(bytes)
if totalBytes > maxBytes {
if exit := onLimitReached(); exit {
log.Infof("Capture reached %s, exiting now...", sizestr.ToString(maxBytes))
return
}
}
// terminate capture if max time reached
now := currentTime()
duration := now.Sub(startupTime)
if int(duration) > int(maxTime) {
if exit := onLimitReached(); exit {
log.Infof("Capture reached %s, exiting now...", maxTime)
return
}
}
captureStarted = true
}
}
func parseGenericMapAndAppendFlow(bytes []byte) {
genericMap := config.GenericMap{}
err := json.Unmarshal(bytes, &genericMap)
if err != nil {
log.Error("Error while parsing json", err)
return
}
if !captureStarted {
log.Debugf("Parsed genericMap %v", genericMap)
}
AppendFlow(genericMap)
}