diff --git a/cmd/gor/gor.go b/cmd/gor/gor.go index e1a30a65c..61292a7c6 100644 --- a/cmd/gor/gor.go +++ b/cmd/gor/gor.go @@ -6,8 +6,6 @@ import ( "expvar" "flag" "fmt" - "github.com/buger/goreplay" - "log" "net/http" "net/http/httputil" httppptof "net/http/pprof" @@ -15,10 +13,109 @@ import ( "os/signal" "runtime" "runtime/pprof" + "strings" "syscall" "time" + + "github.com/buger/goreplay/pkg/binary" + "github.com/buger/goreplay/pkg/dummy" + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/file" + gor_http "github.com/buger/goreplay/pkg/http" + "github.com/buger/goreplay/pkg/kafka" + "github.com/buger/goreplay/pkg/null" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/raw" + "github.com/buger/goreplay/pkg/settings" + "github.com/buger/goreplay/pkg/tcp" + "github.com/buger/goreplay/pkg/ws" + + "github.com/rs/zerolog/log" ) +// Settings used for quick access to CLI flags +var Settings = settings.Settings + +// NewPlugins specify and initialize all available plugins +func NewPlugins() *plugin.InOutPlugins { + plugins := new(plugin.InOutPlugins) + + for _, options := range Settings.InputDummy { + plugins.RegisterPlugin(dummy.NewDummyInput, options) + } + + for range Settings.OutputDummy { + plugins.RegisterPlugin(dummy.NewDummyOutput) + } + + if Settings.OutputStdout { + plugins.RegisterPlugin(dummy.NewDummyOutput) + } + + if Settings.OutputNull { + plugins.RegisterPlugin(null.NewNullOutput) + } + + for _, options := range Settings.InputRAW { + plugins.RegisterPlugin(raw.NewRAWInput, options, Settings.InputRAWConfig) + } + + for _, options := range Settings.InputTCP { + plugins.RegisterPlugin(tcp.NewTCPInput, options, &Settings.InputTCPConfig) + } + + for _, options := range Settings.OutputTCP { + plugins.RegisterPlugin(tcp.NewTCPOutput, options, &Settings.OutputTCPConfig) + } + + for _, options := range Settings.OutputWebSocket { + plugins.RegisterPlugin(ws.NewWebSocketOutput, options, &Settings.OutputWebSocketConfig) + } + + for _, options := range Settings.InputFile { + plugins.RegisterPlugin(file.NewFileInput, options, Settings.InputFileLoop, Settings.InputFileReadDepth, Settings.InputFileMaxWait, Settings.InputFileDryRun) + } + + for _, path := range Settings.OutputFile { + if strings.HasPrefix(path, "s3://") { + plugins.RegisterPlugin(file.NewS3Output, path, &Settings.OutputFileConfig) + } else { + plugins.RegisterPlugin(file.NewFileOutput, path, &Settings.OutputFileConfig) + } + } + + for _, options := range Settings.InputHTTP { + plugins.RegisterPlugin(gor_http.NewHTTPInput, options) + } + + // If we explicitly set Host header http output should not rewrite it + // Fix: https://github.com/buger/gor/issues/174 + for _, header := range Settings.ModifierConfig.Headers { + if header.Name == "Host" { + Settings.OutputHTTPConfig.OriginalHost = true + break + } + } + + for _, options := range Settings.OutputHTTP { + plugins.RegisterPlugin(gor_http.NewHTTPOutput, options, &Settings.OutputHTTPConfig) + } + + for _, options := range Settings.OutputBinary { + plugins.RegisterPlugin(binary.NewBinaryOutput, options, &Settings.OutputBinaryConfig) + } + + if Settings.OutputKafkaConfig.Host != "" && Settings.OutputKafkaConfig.Topic != "" { + plugins.RegisterPlugin(kafka.NewKafkaOutput, "", &Settings.OutputKafkaConfig, &Settings.KafkaTLSConfig) + } + + if Settings.InputKafkaConfig.Host != "" && Settings.InputKafkaConfig.Topic != "" { + plugins.RegisterPlugin(kafka.NewKafkaInput, "", &Settings.InputKafkaConfig, &Settings.KafkaTLSConfig) + } + + return plugins +} + var ( cpuprofile = flag.String("cpuprofile", "", "write cpu profile to file") memprofile = flag.String("memprofile", "", "write memory profile to this file") @@ -57,11 +154,14 @@ func loggingMiddleware(addr string, next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/loop" { _, err := http.Get("http://" + addr) - log.Println(err) + + if err != nil { + log.Error().Err(err).Msg("Error while calling loop endpoint") + } } rb, _ := httputil.DumpRequest(r, false) - log.Println(string(rb)) + log.Info().Msg(string(rb)) next.ServeHTTP(w, r) }) } @@ -72,26 +172,28 @@ func main() { } args := os.Args[1:] - var plugins *goreplay.InOutPlugins + var plugins *plugin.InOutPlugins if len(args) > 0 && args[0] == "file-server" { if len(args) != 2 { - log.Fatal("You should specify port and IP (optional) for the file server. Example: `gor file-server :80`") + log.Fatal().Msg("You should specify port and IP (optional) for the file server. Example: `gor file-server :80`") } dir, _ := os.Getwd() - goreplay.Debug(0, "Started example file server for current directory on address ", args[1]) + log.Info().Msgf("Started example file server for current directory on address %s", args[1]) - log.Fatal(http.ListenAndServe(args[1], loggingMiddleware(args[1], http.FileServer(http.Dir(dir))))) + if err := http.ListenAndServe(args[1], loggingMiddleware(args[1], http.FileServer(http.Dir(dir)))); err != nil { + log.Fatal().Err(err).Msg("Failed to start file server") + } } else { flag.Parse() - goreplay.CheckSettings() - plugins = goreplay.NewPlugins() + settings.CheckSettings() + plugins = NewPlugins() } - log.Printf("[PPID %d and PID %d] Version:%s\n", os.Getppid(), os.Getpid(), goreplay.VERSION) + log.Printf("[PPID %d and PID %d] Version:%s\n", os.Getppid(), os.Getpid(), settings.VERSION) if len(plugins.Inputs) == 0 || len(plugins.Outputs) == 0 { - log.Fatal("Required at least 1 input and 1 output") + log.Fatal().Msg("Required at least 1 input and 1 output") } if *memprofile != "" { @@ -102,20 +204,22 @@ func main() { profileCPU(*cpuprofile) } - if goreplay.Settings.Pprof != "" { + if settings.Settings.Pprof != "" { go func() { - log.Println(http.ListenAndServe(goreplay.Settings.Pprof, nil)) + if err := http.ListenAndServe(settings.Settings.Pprof, nil); err != nil { + log.Fatal().Err(err).Msg("Failed to start pprof server") + } }() } closeCh := make(chan int) - emitter := goreplay.NewEmitter() - go emitter.Start(plugins, goreplay.Settings.Middleware) - if goreplay.Settings.ExitAfter > 0 { - log.Printf("Running gor for a duration of %s\n", goreplay.Settings.ExitAfter) + emitter := emitter.New(&settings.Settings.EmitterConfig) + go emitter.Start(plugins) + if settings.Settings.ExitAfter > 0 { + log.Printf("Running gor for a duration of %s\n", settings.Settings.ExitAfter) - time.AfterFunc(goreplay.Settings.ExitAfter, func() { - log.Printf("gor run timeout %s\n", goreplay.Settings.ExitAfter) + time.AfterFunc(settings.Settings.ExitAfter, func() { + log.Printf("gor run timeout %s\n", settings.Settings.ExitAfter) close(closeCh) }) } @@ -136,7 +240,7 @@ func profileCPU(cpuprofile string) { if cpuprofile != "" { f, err := os.Create(cpuprofile) if err != nil { - log.Fatal(err) + log.Fatal().Err(err).Msg("Failed to create cpu profile file") } pprof.StartCPUProfile(f) @@ -151,7 +255,7 @@ func profileMEM(memprofile string) { if memprofile != "" { f, err := os.Create(memprofile) if err != nil { - log.Fatal(err) + log.Fatal().Err(err).Msg("Failed to create memory profile file") } time.AfterFunc(30*time.Second, func() { pprof.WriteHeapProfile(f) diff --git a/emitter.go b/emitter.go deleted file mode 100644 index 6680c896a..000000000 --- a/emitter.go +++ /dev/null @@ -1,159 +0,0 @@ -package goreplay - -import ( - "fmt" - "github.com/buger/goreplay/internal/byteutils" - "hash/fnv" - "io" - "log" - "sync" - - "github.com/coocood/freecache" -) - -// Emitter represents an abject to manage plugins communication -type Emitter struct { - sync.WaitGroup - plugins *InOutPlugins -} - -// NewEmitter creates and initializes new Emitter object. -func NewEmitter() *Emitter { - return &Emitter{} -} - -// Start initialize loop for sending data from inputs to outputs -func (e *Emitter) Start(plugins *InOutPlugins, middlewareCmd string) { - if Settings.CopyBufferSize < 1 { - Settings.CopyBufferSize = 5 << 20 - } - e.plugins = plugins - - if middlewareCmd != "" { - middleware := NewMiddleware(middlewareCmd) - - for _, in := range plugins.Inputs { - middleware.ReadFrom(in) - } - - e.plugins.Inputs = append(e.plugins.Inputs, middleware) - e.plugins.All = append(e.plugins.All, middleware) - e.Add(1) - go func() { - defer e.Done() - if err := CopyMulty(middleware, plugins.Outputs...); err != nil { - Debug(2, fmt.Sprintf("[EMITTER] error during copy: %q", err)) - } - }() - } else { - for _, in := range plugins.Inputs { - e.Add(1) - go func(in PluginReader) { - defer e.Done() - if err := CopyMulty(in, plugins.Outputs...); err != nil { - Debug(2, fmt.Sprintf("[EMITTER] error during copy: %q", err)) - } - }(in) - } - } -} - -// Close closes all the goroutine and waits for it to finish. -func (e *Emitter) Close() { - for _, p := range e.plugins.All { - if cp, ok := p.(io.Closer); ok { - cp.Close() - } - } - if len(e.plugins.All) > 0 { - // wait for everything to stop - e.Wait() - } - e.plugins.All = nil // avoid Close to make changes again -} - -// CopyMulty copies from 1 reader to multiple writers -func CopyMulty(src PluginReader, writers ...PluginWriter) error { - wIndex := 0 - modifier := NewHTTPModifier(&Settings.ModifierConfig) - filteredRequests := freecache.NewCache(200 * 1024 * 1024) // 200M - - for { - msg, err := src.PluginRead() - if err != nil { - if err == ErrorStopped || err == io.EOF { - return nil - } - return err - } - if msg != nil && len(msg.Data) > 0 { - if len(msg.Data) > int(Settings.CopyBufferSize) { - msg.Data = msg.Data[:Settings.CopyBufferSize] - } - meta := payloadMeta(msg.Meta) - if len(meta) < 3 { - Debug(2, fmt.Sprintf("[EMITTER] Found malformed record %q from %q", msg.Meta, src)) - continue - } - requestID := meta[1] - // start a subroutine only when necessary - if Settings.Verbose >= 3 { - Debug(3, "[EMITTER] input: ", byteutils.SliceToString(msg.Meta[:len(msg.Meta)-1]), " from: ", src) - } - if modifier != nil { - Debug(3, "[EMITTER] modifier:", requestID, "from:", src) - if isRequestPayload(msg.Meta) { - msg.Data = modifier.Rewrite(msg.Data) - // If modifier tells to skip request - if len(msg.Data) == 0 { - filteredRequests.Set(requestID, []byte{}, 60) // - continue - } - Debug(3, "[EMITTER] Rewritten input:", requestID, "from:", src) - - } else { - _, err := filteredRequests.Get(requestID) - if err == nil { - filteredRequests.Del(requestID) - continue - } - } - } - - if Settings.PrettifyHTTP { - msg.Data = prettifyHTTP(msg.Data) - if len(msg.Data) == 0 { - continue - } - } - - if Settings.SplitOutput { - if Settings.RecognizeTCPSessions { - if !PRO { - log.Fatal("Detailed TCP sessions work only with PRO license") - } - hasher := fnv.New32a() - hasher.Write(meta[1]) - - wIndex = int(hasher.Sum32()) % len(writers) - if _, err := writers[wIndex].PluginWrite(msg); err != nil { - return err - } - } else { - // Simple round robin - if _, err := writers[wIndex].PluginWrite(msg); err != nil { - return err - } - - wIndex = (wIndex + 1) % len(writers) - } - } else { - for _, dst := range writers { - if _, err := dst.PluginWrite(msg); err != nil && err != io.ErrClosedPipe { - return err - } - } - } - } - } -} diff --git a/emitter_test.go b/emitter_test.go deleted file mode 100644 index b95d701fc..000000000 --- a/emitter_test.go +++ /dev/null @@ -1,263 +0,0 @@ -package goreplay - -import ( - "fmt" - "os" - "sync" - "sync/atomic" - "testing" - "time" -) - -func TestMain(m *testing.M) { - PRO = true - code := m.Run() - os.Exit(code) -} - -func TestEmitter(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - output := NewTestOutput(func(*Message) { - wg.Done() - }) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 1000; i++ { - wg.Add(1) - input.EmitGET() - } - - wg.Wait() - emitter.Close() -} - -func TestEmitterFiltered(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - input.skipHeader = true - - output := NewTestOutput(func(*Message) { - wg.Done() - }) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - methods := HTTPMethods{[]byte("GET")} - Settings.ModifierConfig = HTTPModifierConfig{Methods: methods} - - emitter := &Emitter{} - go emitter.Start(plugins, "") - - wg.Add(2) - - id := uuid() - reqh := payloadHeader(RequestPayload, id, time.Now().UnixNano(), -1) - reqb := append(reqh, []byte("POST / HTTP/1.1\r\nHost: www.w3.org\r\nUser-Agent: Go 1.1 package http\r\nAccept-Encoding: gzip\r\n\r\n")...) - - resh := payloadHeader(ResponsePayload, id, time.Now().UnixNano()+1, 1) - respb := append(resh, []byte("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")...) - - input.EmitBytes(reqb) - input.EmitBytes(respb) - - id = uuid() - reqh = payloadHeader(RequestPayload, id, time.Now().UnixNano(), -1) - reqb = append(reqh, []byte("GET / HTTP/1.1\r\nHost: www.w3.org\r\nUser-Agent: Go 1.1 package http\r\nAccept-Encoding: gzip\r\n\r\n")...) - - resh = payloadHeader(ResponsePayload, id, time.Now().UnixNano()+1, 1) - respb = append(resh, []byte("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")...) - - input.EmitBytes(reqb) - input.EmitBytes(respb) - - wg.Wait() - emitter.Close() - - Settings.ModifierConfig = HTTPModifierConfig{} -} - -func TestEmitterSplitRoundRobin(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - - var counter1, counter2 int32 - - output1 := NewTestOutput(func(*Message) { - atomic.AddInt32(&counter1, 1) - wg.Done() - }) - - output2 := NewTestOutput(func(*Message) { - atomic.AddInt32(&counter2, 1) - wg.Done() - }) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output1, output2}, - } - - Settings.SplitOutput = true - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 1000; i++ { - wg.Add(1) - input.EmitGET() - } - - wg.Wait() - - emitter.Close() - - if counter1 == 0 || counter2 == 0 || counter1 != counter2 { - t.Errorf("Round robin should split traffic equally: %d vs %d", counter1, counter2) - } - - Settings.SplitOutput = false -} - -func TestEmitterRoundRobin(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - - var counter1, counter2 int32 - - output1 := NewTestOutput(func(*Message) { - counter1++ - wg.Done() - }) - - output2 := NewTestOutput(func(*Message) { - counter2++ - wg.Done() - }) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output1, output2}, - } - plugins.All = append(plugins.All, input, output1, output2) - - Settings.SplitOutput = true - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 1000; i++ { - wg.Add(1) - input.EmitGET() - } - - wg.Wait() - emitter.Close() - - if counter1 == 0 || counter2 == 0 { - t.Errorf("Round robin should split traffic equally: %d vs %d", counter1, counter2) - } - - Settings.SplitOutput = false -} - -func TestEmitterSplitSession(t *testing.T) { - wg := new(sync.WaitGroup) - wg.Add(200) - - input := NewTestInput() - input.skipHeader = true - - var counter1, counter2 int32 - - output1 := NewTestOutput(func(msg *Message) { - if payloadID(msg.Meta)[0] == 'a' { - counter1++ - } - wg.Done() - }) - - output2 := NewTestOutput(func(msg *Message) { - if payloadID(msg.Meta)[0] == 'b' { - counter2++ - } - wg.Done() - }) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output1, output2}, - } - - Settings.SplitOutput = true - Settings.RecognizeTCPSessions = true - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 200; i++ { - // Keep session but randomize - id := make([]byte, 20) - if i&1 == 0 { // for recognizeTCPSessions one should be odd and other will be even number - id[0] = 'a' - } else { - id[0] = 'b' - } - input.EmitBytes([]byte(fmt.Sprintf("1 %s 1 1\nGET / HTTP/1.1\r\n\r\n", id[:20]))) - } - - wg.Wait() - - if counter1 != counter2 { - t.Errorf("Round robin should split traffic equally: %d vs %d", counter1, counter2) - } - - Settings.SplitOutput = false - Settings.RecognizeTCPSessions = false - emitter.Close() -} - -func BenchmarkEmitter(b *testing.B) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - - output := NewTestOutput(func(*Message) { - wg.Done() - }) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - b.ResetTimer() - - for i := 0; i < b.N; i++ { - wg.Add(1) - input.EmitGET() - } - - wg.Wait() - emitter.Close() -} diff --git a/examples/middleware/token_modifier.go b/examples/middleware/token_modifier.go index b623483ea..8c34a05dc 100644 --- a/examples/middleware/token_modifier.go +++ b/examples/middleware/token_modifier.go @@ -24,8 +24,9 @@ import ( "bytes" "encoding/hex" "fmt" - "github.com/buger/goreplay/proto" "os" + + proto "github.com/buger/goreplay/pkg/http_proto" ) // requestID -> originalToken diff --git a/go.mod b/go.mod index 43eab40a9..a1906116a 100644 --- a/go.mod +++ b/go.mod @@ -10,10 +10,11 @@ require ( github.com/gorilla/websocket v1.4.2 github.com/klauspost/compress v1.11.13 // indirect github.com/mattbaird/elastigo v0.0.0-20170123220020-2fe47fd29e4b + github.com/rs/zerolog v1.28.0 github.com/stretchr/testify v1.7.0 github.com/xdg-go/scram v1.1.1 golang.org/x/net v0.0.0-20211209124913-491a49abca63 - golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e + golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6 k8s.io/apimachinery v0.23.3 k8s.io/client-go v0.23.3 ) @@ -38,6 +39,8 @@ require ( github.com/jcmturner/gofork v1.0.0 // indirect github.com/jmespath/go-jmespath v0.3.0 // indirect github.com/json-iterator/go v1.1.12 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pierrec/lz4 v2.4.1+incompatible // indirect diff --git a/go.sum b/go.sum index 521c6a92f..ef9680375 100644 --- a/go.sum +++ b/go.sum @@ -75,6 +75,7 @@ github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnht github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/coocood/freecache v1.2.0 h1:p8RhjN6Y4DRBIMzdRlm1y+M7h7YJxye3lGW8/VvzCz0= github.com/coocood/freecache v1.2.0/go.mod h1:OKrEjkGVoxZhyWAJoeFi5BMLUJm2Tit0kpGkIr7NGYY= +github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -118,6 +119,7 @@ github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34 github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -235,6 +237,10 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mattbaird/elastigo v0.0.0-20170123220020-2fe47fd29e4b h1:v29yPGHhOqw7VHEnTeQFAth3SsBrmwc8JfuhNY0G34k= github.com/mattbaird/elastigo v0.0.0-20170123220020-2fe47fd29e4b/go.mod h1:5MWrJXKRQyhQdUCF+vu6U5c4nQpg70vW3eHaU0/AYbU= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -268,6 +274,9 @@ github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1: github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563 h1:dY6ETXrvDG7Sa4vE8ZQG4yqWg6UnOcbqTAahkV813vQ= github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.28.0 h1:MirSo27VyNi7RJYP3078AA1+Cyzd2GB66qy3aUHvsWY= +github.com/rs/zerolog v1.28.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg= @@ -460,8 +469,10 @@ golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e h1:XMgFehsDnnLGtjvjOfqWSUzt0alpTR1RSEuznObga2c= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6 h1:foEbQz/B0Oz6YIqu/69kfXPYeFQAuuMYFkjaqXzl5Wo= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b h1:9zKuko04nR4gjZ4+DNjHqRlAJqbJETHwiNKDqTfOjfE= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= diff --git a/gor_stat.go b/gor_stat.go deleted file mode 100644 index 46ce35806..000000000 --- a/gor_stat.go +++ /dev/null @@ -1,64 +0,0 @@ -package goreplay - -import ( - "runtime" - "strconv" - "time" -) - -type GorStat struct { - statName string - rateMs int - latest int - mean int - max int - count int -} - -func NewGorStat(statName string, rateMs int) (s *GorStat) { - s = new(GorStat) - s.statName = statName - s.rateMs = rateMs - s.latest = 0 - s.mean = 0 - s.max = 0 - s.count = 0 - - if Settings.Stats { - go s.reportStats() - } - return -} - -func (s *GorStat) Write(latest int) { - if Settings.Stats { - if latest > s.max { - s.max = latest - } - if latest != 0 { - s.mean = ((s.mean * s.count) + latest) / (s.count + 1) - } - s.latest = latest - s.count = s.count + 1 - } -} - -func (s *GorStat) Reset() { - s.latest = 0 - s.max = 0 - s.mean = 0 - s.count = 0 -} - -func (s *GorStat) String() string { - return s.statName + ":" + strconv.Itoa(s.latest) + "," + strconv.Itoa(s.mean) + "," + strconv.Itoa(s.max) + "," + strconv.Itoa(s.count) + "," + strconv.Itoa(s.count/(s.rateMs/1000.0)) + "," + strconv.Itoa(runtime.NumGoroutine()) -} - -func (s *GorStat) reportStats() { - Debug(0, "\n", s.statName+":latest,mean,max,count,count/second,gcount") - for { - Debug(0, "\n", s) - s.Reset() - time.Sleep(time.Duration(s.rateMs) * time.Millisecond) - } -} diff --git a/http_prettifier.go b/http_prettifier.go deleted file mode 100644 index 596c6b244..000000000 --- a/http_prettifier.go +++ /dev/null @@ -1,66 +0,0 @@ -package goreplay - -import ( - "bytes" - "compress/gzip" - "fmt" - "github.com/buger/goreplay/proto" - "io/ioutil" - "net/http/httputil" - "strconv" -) - -func prettifyHTTP(p []byte) []byte { - - tEnc := bytes.Equal(proto.Header(p, []byte("Transfer-Encoding")), []byte("chunked")) - cEnc := bytes.Equal(proto.Header(p, []byte("Content-Encoding")), []byte("gzip")) - - if !(tEnc || cEnc) { - return p - } - - headersPos := proto.MIMEHeadersEndPos(p) - - if headersPos < 5 || headersPos > len(p) { - return p - } - - headers := p[:headersPos] - content := p[headersPos:] - - if tEnc { - buf := bytes.NewReader(content) - r := httputil.NewChunkedReader(buf) - content, _ = ioutil.ReadAll(r) - - headers = proto.DeleteHeader(headers, []byte("Transfer-Encoding")) - - newLen := strconv.Itoa(len(content)) - headers = proto.SetHeader(headers, []byte("Content-Length"), []byte(newLen)) - } - - if cEnc { - buf := bytes.NewReader(content) - g, err := gzip.NewReader(buf) - - if err != nil { - Debug(1, "[Prettifier] GZIP encoding error:", err) - return []byte{} - } - - content, err = ioutil.ReadAll(g) - if err != nil { - Debug(1, fmt.Sprintf("[HTTP-PRETTIFIER] %q", err)) - return p - } - - headers = proto.DeleteHeader(headers, []byte("Content-Encoding")) - - newLen := strconv.Itoa(len(content)) - headers = proto.SetHeader(headers, []byte("Content-Length"), []byte(newLen)) - } - - newPayload := append(headers, content...) - - return newPayload -} diff --git a/internal/capture/capture.go b/internal/capture/capture.go index b2a31f1d8..fb1635087 100644 --- a/internal/capture/capture.go +++ b/internal/capture/capture.go @@ -5,9 +5,6 @@ import ( "errors" "expvar" "fmt" - "github.com/buger/goreplay/internal/size" - "github.com/buger/goreplay/internal/tcp" - "github.com/buger/goreplay/proto" "io" "log" "net" @@ -18,6 +15,10 @@ import ( "syscall" "time" + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/internal/tcp" + "github.com/buger/goreplay/pkg/http_proto" + "github.com/google/gopacket" "github.com/google/gopacket/layers" "github.com/google/gopacket/pcap" @@ -500,11 +501,11 @@ func (l *Listener) SocketHandle(ifi pcap.Interface) (handle Socket, err error) { } func http1StartHint(pckt *tcp.Packet) (isRequest, isResponse bool) { - if proto.HasRequestTitle(pckt.Payload) { + if http_proto.HasRequestTitle(pckt.Payload) { return true, false } - if proto.HasResponseTitle(pckt.Payload) { + if http_proto.HasResponseTitle(pckt.Payload) { return false, true } @@ -518,7 +519,7 @@ func http1EndHint(m *tcp.Message) bool { } req, res := http1StartHint(m.Packets()[0]) - return proto.HasFullPayload(m, m.PacketData()...) && (req || res) + return http_proto.HasFullPayload(m, m.PacketData()...) && (req || res) } func (l *Listener) readHandle(key string, hndl packetHandle) { diff --git a/internal/tcp/tcp_message.go b/internal/tcp/tcp_message.go index e2a2188aa..1f9701e59 100644 --- a/internal/tcp/tcp_message.go +++ b/internal/tcp/tcp_message.go @@ -4,12 +4,13 @@ import ( "encoding/binary" "encoding/hex" "fmt" - "github.com/buger/goreplay/proto" "net" "reflect" "sort" "time" "unsafe" + + "github.com/buger/goreplay/pkg/http_proto" ) // TCPProtocol is a number to indicate type of protocol @@ -169,9 +170,9 @@ func (m *Message) Data() []byte { } // Remove Expect header, since its replay not fully supported - if state, ok := m.feedback.(*proto.HTTPState); ok { + if state, ok := m.feedback.(*http_proto.HTTPState); ok { if state.Continue100 { - tmp = proto.DeleteHeader(tmp, []byte("Expect")) + tmp = http_proto.DeleteHeader(tmp, []byte("Expect")) } } @@ -376,7 +377,7 @@ func (parser *MessageParser) addPacket(m *Message, pckt *Packet) bool { func (parser *MessageParser) Fix100Continue(m *Message) { // Only adjust a message once - if state, ok := m.feedback.(*proto.HTTPState); ok && state.Continue100 && !m.continueAdjusted { + if state, ok := m.feedback.(*http_proto.HTTPState); ok && state.Continue100 && !m.continueAdjusted { // Shift Ack by given offset // Size of "HTTP/1.1 100 Continue\r\n\r\n" message for _, p := range m.packets { diff --git a/internal/tcp/tcp_test.go b/internal/tcp/tcp_test.go index e820d9c28..3dfaaae36 100644 --- a/internal/tcp/tcp_test.go +++ b/internal/tcp/tcp_test.go @@ -3,16 +3,14 @@ package tcp import ( "bytes" "encoding/binary" - "github.com/buger/goreplay/proto" - - // "runtime" "testing" "time" - "github.com/stretchr/testify/assert" + proto "github.com/buger/goreplay/pkg/http_proto" "github.com/google/gopacket" "github.com/google/gopacket/layers" + "github.com/stretchr/testify/assert" ) func generateHeader(request bool, seq uint32, length uint16) []byte { diff --git a/limiter_test.go b/limiter_test.go deleted file mode 100644 index 6408dd2d8..000000000 --- a/limiter_test.go +++ /dev/null @@ -1,111 +0,0 @@ -//go:build !race - -package goreplay - -import ( - "sync" - "testing" -) - -func TestOutputLimiter(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - output := NewLimiter(NewTestOutput(func(*Message) { - wg.Done() - }), "10") - wg.Add(10) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 100; i++ { - input.EmitGET() - } - - wg.Wait() - emitter.Close() -} - -func TestInputLimiter(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewLimiter(NewTestInput(), "10") - output := NewTestOutput(func(*Message) { - wg.Done() - }) - wg.Add(10) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 100; i++ { - input.(*Limiter).plugin.(*TestInput).EmitGET() - } - - wg.Wait() - emitter.Close() -} - -// Should limit all requests -func TestPercentLimiter1(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - output := NewLimiter(NewTestOutput(func(*Message) { - wg.Done() - }), "0%") - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 100; i++ { - input.EmitGET() - } - - wg.Wait() -} - -// Should not limit at all -func TestPercentLimiter2(t *testing.T) { - wg := new(sync.WaitGroup) - - input := NewTestInput() - output := NewLimiter(NewTestOutput(func(*Message) { - wg.Done() - }), "100%") - wg.Add(100) - - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, - } - plugins.All = append(plugins.All, input, output) - - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) - - for i := 0; i < 100; i++ { - input.EmitGET() - } - - wg.Wait() -} diff --git a/output_binary.go b/pkg/binary/output_binary.go similarity index 74% rename from output_binary.go rename to pkg/binary/output_binary.go index 7f7757340..9effcb2da 100644 --- a/output_binary.go +++ b/pkg/binary/output_binary.go @@ -1,11 +1,22 @@ -package goreplay +package binary import ( - "github.com/buger/goreplay/internal/size" "sync/atomic" "time" + + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) +const ( + initialDynamicWorkers = 10 +) + +var outputLogger = log.With().Str("component", "output_binary").Logger() + // BinaryOutputConfig struct for holding binary output configuration type BinaryOutputConfig struct { Workers int `json:"output-binary-workers"` @@ -24,24 +35,23 @@ type BinaryOutput struct { // aligned at 64bit. See https://github.com/golang/go/issues/599 activeWorkers int64 address string - queue chan *Message - responses chan response + queue chan *plugin.Message + responses chan plugin.Response needWorker chan int quit chan struct{} config *BinaryOutputConfig - queueStats *GorStat } // NewBinaryOutput constructor for BinaryOutput // Initialize workers -func NewBinaryOutput(address string, config *BinaryOutputConfig) PluginReadWriter { +func NewBinaryOutput(address string, config *BinaryOutputConfig) plugin.ReadWriter { o := new(BinaryOutput) o.address = address o.config = config - o.queue = make(chan *Message, 1000) - o.responses = make(chan response, 1000) + o.queue = make(chan *plugin.Message, 1000) + o.responses = make(chan plugin.Response, 1000) o.needWorker = make(chan int, 1) o.quit = make(chan struct{}) @@ -109,8 +119,8 @@ func (o *BinaryOutput) startWorker() { } // PluginWrite writes a message tothis plugin -func (o *BinaryOutput) PluginWrite(msg *Message) (n int, err error) { - if !isRequestPayload(msg.Meta) { +func (o *BinaryOutput) PluginWrite(msg *plugin.Message) (n int, err error) { + if !proto.IsRequestPayload(msg.Meta) { return len(msg.Data), nil } @@ -128,37 +138,37 @@ func (o *BinaryOutput) PluginWrite(msg *Message) (n int, err error) { } // PluginRead reads a message from this plugin -func (o *BinaryOutput) PluginRead() (*Message, error) { - var resp response - var msg Message +func (o *BinaryOutput) PluginRead() (*plugin.Message, error) { + var resp plugin.Response + var msg plugin.Message select { case <-o.quit: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case resp = <-o.responses: } - msg.Data = resp.payload - msg.Meta = payloadHeader(ReplayedResponsePayload, resp.uuid, resp.startedAt, resp.roundTripTime) + msg.Data = resp.Payload + msg.Meta = proto.PayloadHeader(proto.ReplayedResponsePayload, resp.UUID, resp.StartedAt, resp.RoundTripTime) return &msg, nil } -func (o *BinaryOutput) sendRequest(client *TCPClient, msg *Message) { - if !isRequestPayload(msg.Meta) { +func (o *BinaryOutput) sendRequest(client *TCPClient, msg *plugin.Message) { + if !proto.IsRequestPayload(msg.Meta) { return } - uuid := payloadID(msg.Meta) + uuid := proto.PayloadID(msg.Meta) start := time.Now() resp, err := client.Send(msg.Data) stop := time.Now() if err != nil { - Debug(1, "Request error:", err) + outputLogger.Error().Err(err).Msg("Request error") } if o.config.TrackResponses { - o.responses <- response{resp, uuid, start.UnixNano(), stop.UnixNano() - start.UnixNano()} + o.responses <- plugin.Response{resp, uuid, start.UnixNano(), stop.UnixNano() - start.UnixNano()} } } diff --git a/tcp_client.go b/pkg/binary/tcp_client.go similarity index 77% rename from tcp_client.go rename to pkg/binary/tcp_client.go index 156ffcac4..adec39e6f 100644 --- a/tcp_client.go +++ b/pkg/binary/tcp_client.go @@ -1,14 +1,18 @@ -package goreplay +package binary import ( "crypto/tls" "io" "net" - "runtime/debug" "syscall" "time" ) +const ( + maxResponseSize = 1073741824 + readChunkSize = 64 * 1024 +) + // TCPClientConfig client configuration type TCPClientConfig struct { Debug bool @@ -70,7 +74,8 @@ func (c *TCPClient) Disconnect() { if c.conn != nil { c.conn.Close() c.conn = nil - Debug(1, "[TCPClient] Disconnected: ", c.baseURL) + + outputLogger.Warn().Msgf("Disconnected: %s", c.baseURL) } } @@ -84,10 +89,10 @@ func (c *TCPClient) isAlive() bool { if err == nil { return true } else if err == io.EOF { - Debug(1, "[TCPClient] connection closed, reconnecting") + outputLogger.Warn().Msg("connection closed, reconnecting") return false } else if err == syscall.EPIPE { - Debug(1, "Detected broken pipe.", err) + outputLogger.Warn().Msg("broken pipe, reconnecting") return false } @@ -99,19 +104,18 @@ func (c *TCPClient) Send(data []byte) (response []byte, err error) { // Don't exit on panic defer func() { if r := recover(); r != nil { - Debug(1, "[TCPClient]", r, string(data)) + outputLogger.Error().Msgf("PANIC: pkg: %v", r) if _, ok := r.(error); !ok { - Debug(1, "[TCPClient] Failed to send request: ", string(data)) - Debug(1, "PANIC: pkg:", r, debug.Stack()) + outputLogger.Error().Stack().Msgf("faile to send request: %s", string(data)) } } }() if c.conn == nil || !c.isAlive() { - Debug(1, "[TCPClient] Connecting:", c.baseURL) + outputLogger.Info().Msgf("Connecting: %s", c.baseURL) if err = c.Connect(); err != nil { - Debug(1, "[TCPClient] Connection error:", err) + outputLogger.Error().Err(err).Msgf("Connection error: %s", c.baseURL) return } } @@ -121,11 +125,11 @@ func (c *TCPClient) Send(data []byte) (response []byte, err error) { c.conn.SetWriteDeadline(timeout) if c.config.Debug { - Debug(1, "[TCPClient] Sending:", string(data)) + outputLogger.Debug().Msgf("Sending: %s", string(data)) } if _, err = c.conn.Write(data); err != nil { - Debug(1, "[TCPClient] Write error:", err, c.baseURL) + outputLogger.Error().Err(err).Msgf("Write error: %s", c.baseURL) return } @@ -156,7 +160,7 @@ func (c *TCPClient) Send(data []byte) (response []byte, err error) { if err == io.EOF { break } else if err != nil { - Debug(1, "[TCPClient] Read the whole body error:", err, c.baseURL) + outputLogger.Error().Err(err).Msgf("Read error: %s", c.baseURL) break } @@ -164,8 +168,7 @@ func (c *TCPClient) Send(data []byte) (response []byte, err error) { } if readBytes >= maxResponseSize { - Debug(1, "[TCPClient] Body is more than the max size", maxResponseSize, - c.baseURL) + outputLogger.Error().Msgf("Body is more than the max size: %d", maxResponseSize) break } @@ -174,7 +177,7 @@ func (c *TCPClient) Send(data []byte) (response []byte, err error) { } if err != nil { - Debug(1, "[TCPClient] Response read error", err, c.conn, readBytes) + outputLogger.Error().Err(err).Msgf("Response read error") return } @@ -186,7 +189,7 @@ func (c *TCPClient) Send(data []byte) (response []byte, err error) { copy(payload, c.respBuf[:readBytes]) if c.config.Debug { - Debug(1, "[TCPClient] Received:", string(payload)) + outputLogger.Debug().Msgf("Received: %s", string(payload)) } return payload, err diff --git a/input_dummy.go b/pkg/dummy/input_dummy.go similarity index 67% rename from input_dummy.go rename to pkg/dummy/input_dummy.go index cca7fe34f..b20ad1e16 100644 --- a/input_dummy.go +++ b/pkg/dummy/input_dummy.go @@ -1,7 +1,10 @@ -package goreplay +package dummy import ( "time" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" ) // DummyInput used for debugging. It generate 1 "GET /"" request per second. @@ -22,13 +25,13 @@ func NewDummyInput(options string) (di *DummyInput) { } // PluginRead reads message from this plugin -func (i *DummyInput) PluginRead() (*Message, error) { - var msg Message +func (i *DummyInput) PluginRead() (*plugin.Message, error) { + var msg plugin.Message select { case <-i.quit: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case buf := <-i.data: - msg.Meta, msg.Data = payloadMetaWithBody(buf) + msg.Meta, msg.Data = proto.PayloadMetaWithBody(buf) return &msg, nil } } @@ -37,11 +40,11 @@ func (i *DummyInput) emit() { ticker := time.NewTicker(time.Second) for range ticker.C { - uuid := uuid() - reqh := payloadHeader(RequestPayload, uuid, time.Now().UnixNano(), -1) + uuid := proto.UUID() + reqh := proto.PayloadHeader(proto.RequestPayload, uuid, time.Now().UnixNano(), -1) i.data <- append(reqh, []byte("GET / HTTP/1.1\r\nHost: www.w3.org\r\nUser-Agent: Go 1.1 package http\r\nAccept-Encoding: gzip\r\n\r\n")...) - resh := payloadHeader(ResponsePayload, uuid, time.Now().UnixNano()+1, 1) + resh := proto.PayloadHeader(proto.ResponsePayload, uuid, time.Now().UnixNano()+1, 1) i.data <- append(resh, []byte("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")...) } } diff --git a/output_dummy.go b/pkg/dummy/output_dummy.go similarity index 55% rename from output_dummy.go rename to pkg/dummy/output_dummy.go index 32ab794c4..73bc1b98f 100644 --- a/output_dummy.go +++ b/pkg/dummy/output_dummy.go @@ -1,7 +1,10 @@ -package goreplay +package dummy import ( "os" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" ) // DummyOutput used for debugging, prints all incoming requests @@ -16,16 +19,15 @@ func NewDummyOutput() (di *DummyOutput) { } // PluginWrite writes message to this plugin -func (i *DummyOutput) PluginWrite(msg *Message) (int, error) { +func (i *DummyOutput) PluginWrite(msg *plugin.Message) (int, error) { var n, nn int - var err error - n, err = os.Stdout.Write(msg.Meta) - nn, err = os.Stdout.Write(msg.Data) + n, _ = os.Stdout.Write(msg.Meta) + nn, _ = os.Stdout.Write(msg.Data) n += nn - nn, err = os.Stdout.Write(payloadSeparatorAsBytes) + nn, _ = os.Stdout.Write(proto.PayloadSeparatorAsBytes) n += nn - return n, err + return n, nil } func (i *DummyOutput) String() string { diff --git a/elasticsearch.go b/pkg/elasticsearch/elasticsearch.go similarity index 92% rename from elasticsearch.go rename to pkg/elasticsearch/elasticsearch.go index e979a40f1..e578d7b4b 100644 --- a/elasticsearch.go +++ b/pkg/elasticsearch/elasticsearch.go @@ -1,16 +1,20 @@ -package goreplay +package elasticsearch import ( "encoding/json" - "github.com/buger/goreplay/proto" - "log" "net/url" "strings" "time" + proto "github.com/buger/goreplay/pkg/http_proto" + elastigo "github.com/mattbaird/elastigo/lib" + + "github.com/rs/zerolog/log" ) +var logger = log.With().Str("component", "elasticsearch").Logger() + type ESUriErorr struct{} func (e *ESUriErorr) Error() string { @@ -87,7 +91,7 @@ func (p *ESPlugin) Init(URI string) { err, p.Index = parseURI(URI) if err != nil { - log.Fatal("Can't initialize ElasticSearch plugin.", err) + logger.Fatal().Err(err).Msg("Can't initialize ElasticSearch plugin.") } p.eConn = elastigo.NewConn() @@ -100,7 +104,7 @@ func (p *ESPlugin) Init(URI string) { go p.ErrorHandler() - Debug(1, "Initialized Elasticsearch Plugin") + logger.Info().Msg("Initialized Elasticsearch Plugin") return } @@ -112,7 +116,7 @@ func (p *ESPlugin) IndexerShutdown() { func (p *ESPlugin) ErrorHandler() { for { errBuf := <-p.indexor.ErrorChannel - Debug(1, "[ELASTICSEARCH]", errBuf.Err) + logger.Error().Err(errBuf.Err).Msg("Error indexing document") } } @@ -156,9 +160,10 @@ func (p *ESPlugin) ResponseAnalyze(req, resp []byte, start, stop time.Time) { Rtt: rtt, Timestamp: t, } + j, err := json.Marshal(&esResp) if err != nil { - Debug(0, "[ELASTIC-RESPONSE]", err) + logger.Error().Err(err).Msg("Error marshaling ESRequestResponse") } else { p.indexor.Index(p.Index, "RequestResponse", "", "", "", &t, j) } diff --git a/elasticsearch_test.go b/pkg/elasticsearch/elasticsearch_test.go similarity index 99% rename from elasticsearch_test.go rename to pkg/elasticsearch/elasticsearch_test.go index 77b8c0e7b..fab4ea78a 100644 --- a/elasticsearch_test.go +++ b/pkg/elasticsearch/elasticsearch_test.go @@ -1,4 +1,4 @@ -package goreplay +package elasticsearch import ( "testing" diff --git a/pkg/emitter/emitter.go b/pkg/emitter/emitter.go new file mode 100644 index 000000000..24f4de76a --- /dev/null +++ b/pkg/emitter/emitter.go @@ -0,0 +1,186 @@ +package emitter + +import ( + "hash/fnv" + "io" + "sync" + + "github.com/buger/goreplay/internal/byteutils" + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/pkg/http_modifier" + "github.com/buger/goreplay/pkg/middleware" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/prettify" + "github.com/buger/goreplay/pkg/pro" + "github.com/buger/goreplay/pkg/proto" + + "github.com/coocood/freecache" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" +) + +var logger = log.With().Str("component", "emitter").Logger() + +// Emitter represents an abject to manage plugins communication +type Emitter struct { + sync.WaitGroup + plugins *plugin.InOutPlugins + config *Config +} + +// Config represents configuration for Emitter +type Config struct { + CopyBufferSize size.Size `json:"copy-buffer-size"` + Middleware string `json:"middleware"` + ModifierConfig http_modifier.HTTPModifierConfig + PrettifyHTTP bool `json:"prettify-http"` + SplitOutput bool `json:"split-output"` + RecognizeTCPSessions bool `json:"recognize-tcp-sessions"` +} + +// New creates and initializes new Emitter object. +func New(configs ...*Config) *Emitter { + config := &Config{} + if len(configs) > 0 { + config = configs[0] + } + + return &Emitter{ + config: config, + } +} + +// Start initialize loop for sending data from inputs to outputs +func (e *Emitter) Start(plugins *plugin.InOutPlugins) { + if e.config.CopyBufferSize < 1 { + e.config.CopyBufferSize = 5 << 20 + } + e.plugins = plugins + + if e.config.Middleware != "" { + middleware := middleware.NewMiddleware(e.config.Middleware, nil) + + for _, in := range plugins.Inputs { + middleware.ReadFrom(in) + } + + e.plugins.Inputs = append(e.plugins.Inputs, middleware) + e.plugins.All = append(e.plugins.All, middleware) + e.Add(1) + go func() { + defer e.Done() + if err := e.CopyMulty(middleware, plugins.Outputs...); err != nil { + logger.Error().Err(err).Msg("error during copy") + } + }() + } else { + for _, in := range plugins.Inputs { + e.Add(1) + go func(in plugin.Reader) { + defer e.Done() + if err := e.CopyMulty(in, plugins.Outputs...); err != nil { + logger.Error().Err(err).Msg("error during copy") + } + }(in) + } + } +} + +// Close closes all the goroutine and waits for it to finish. +func (e *Emitter) Close() { + for _, p := range e.plugins.All { + if cp, ok := p.(io.Closer); ok { + cp.Close() + } + } + if len(e.plugins.All) > 0 { + // wait for everything to stop + e.Wait() + } + e.plugins.All = nil // avoid Close to make changes again +} + +// CopyMulty copies from 1 reader to multiple writers +func (e *Emitter) CopyMulty(src plugin.Reader, writers ...plugin.Writer) error { + wIndex := 0 + modifier := http_modifier.NewHTTPModifier(&e.config.ModifierConfig) + filteredRequests := freecache.NewCache(200 * 1024 * 1024) // 200M + + for { + msg, err := src.PluginRead() + if err != nil { + if err == plugin.ErrorStopped || err == io.EOF { + return nil + } + return err + } + if msg != nil && len(msg.Data) > 0 { + if len(msg.Data) > int(e.config.CopyBufferSize) { + msg.Data = msg.Data[:e.config.CopyBufferSize] + } + meta := proto.PayloadMeta(msg.Meta) + if len(meta) < 3 { + logger.Warn().Msgf("Found malformed record %q from %q", msg.Meta, src) + continue + } + requestID := meta[1] + // start a subroutine only when necessary + if log.Logger.GetLevel() == zerolog.DebugLevel { + logger.Debug().Msgf("input: %s from: %s", byteutils.SliceToString(msg.Meta[:len(msg.Meta)-1]), src) + } + if modifier != nil { + logger.Debug().Msgf("modifier: %s from: %s", requestID, src) + if proto.IsRequestPayload(msg.Meta) { + msg.Data = modifier.Rewrite(msg.Data) + // If modifier tells to skip request + if len(msg.Data) == 0 { + filteredRequests.Set(requestID, []byte{}, 60) // + continue + } + logger.Debug().Msgf("Rewritten input: %s from: %s", requestID, src) + } else { + _, err := filteredRequests.Get(requestID) + if err == nil { + filteredRequests.Del(requestID) + continue + } + } + } + + if e.config.PrettifyHTTP { + msg.Data = prettify.PrettifyHTTP(msg.Data) + if len(msg.Data) == 0 { + continue + } + } + + if e.config.SplitOutput { + if e.config.RecognizeTCPSessions { + if !pro.PRO { + logger.Fatal().Msg("Detailed TCP sessions work only with PRO license") + } + hasher := fnv.New32a() + hasher.Write(meta[1]) + + wIndex = int(hasher.Sum32()) % len(writers) + if _, err := writers[wIndex].PluginWrite(msg); err != nil { + return err + } + } else { + // Simple round robin + if _, err := writers[wIndex].PluginWrite(msg); err != nil { + return err + } + + wIndex = (wIndex + 1) % len(writers) + } + } else { + for _, dst := range writers { + if _, err := dst.PluginWrite(msg); err != nil && err != io.ErrClosedPipe { + return err + } + } + } + } + } +} diff --git a/pkg/emitter/emitter_test.go b/pkg/emitter/emitter_test.go new file mode 100644 index 000000000..af40d92e4 --- /dev/null +++ b/pkg/emitter/emitter_test.go @@ -0,0 +1,344 @@ +package emitter + +import ( + "bytes" + "context" + "fmt" + "os" + "os/exec" + "strings" + "sync" + "sync/atomic" + "syscall" + "testing" + "time" + + "github.com/buger/goreplay/pkg/http_modifier" + "github.com/buger/goreplay/pkg/middleware" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/pro" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/test" +) + +func TestMain(m *testing.M) { + pro.Enable() + code := m.Run() + os.Exit(code) +} + +func TestEmitter(t *testing.T) { + wg := new(sync.WaitGroup) + + input := test.NewTestInput() + output := test.NewTestOutput(func(*plugin.Message) { + wg.Done() + }) + + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, + } + plugins.All = append(plugins.All, input, output) + + emitter := New() + go emitter.Start(plugins) + + for i := 0; i < 1000; i++ { + wg.Add(1) + input.EmitGET() + } + + wg.Wait() + emitter.Close() +} + +func TestEmitterFiltered(t *testing.T) { + wg := new(sync.WaitGroup) + + input := test.NewTestInput() + input.SkipHeader = true + + output := test.NewTestOutput(func(*plugin.Message) { + wg.Done() + }) + + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, + } + plugins.All = append(plugins.All, input, output) + + methods := http_modifier.HTTPMethods{[]byte("GET")} + emitter := New(&Config{ + ModifierConfig: http_modifier.HTTPModifierConfig{Methods: methods}, + }) + go emitter.Start(plugins) + + wg.Add(2) + + id := proto.UUID() + reqh := proto.PayloadHeader(proto.RequestPayload, id, time.Now().UnixNano(), -1) + reqb := append(reqh, []byte("POST / HTTP/1.1\r\nHost: www.w3.org\r\nUser-Agent: Go 1.1 package http\r\nAccept-Encoding: gzip\r\n\r\n")...) + + resh := proto.PayloadHeader(proto.ResponsePayload, id, time.Now().UnixNano()+1, 1) + respb := append(resh, []byte("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")...) + + input.EmitBytes(reqb) + input.EmitBytes(respb) + + id = proto.UUID() + reqh = proto.PayloadHeader(proto.RequestPayload, id, time.Now().UnixNano(), -1) + reqb = append(reqh, []byte("GET / HTTP/1.1\r\nHost: www.w3.org\r\nUser-Agent: Go 1.1 package http\r\nAccept-Encoding: gzip\r\n\r\n")...) + + resh = proto.PayloadHeader(proto.ResponsePayload, id, time.Now().UnixNano()+1, 1) + respb = append(resh, []byte("HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")...) + + input.EmitBytes(reqb) + input.EmitBytes(respb) + + wg.Wait() + emitter.Close() +} + +func TestEmitterSplitRoundRobin(t *testing.T) { + wg := new(sync.WaitGroup) + + input := test.NewTestInput() + + var counter1, counter2 int32 + + output1 := test.NewTestOutput(func(*plugin.Message) { + atomic.AddInt32(&counter1, 1) + wg.Done() + }) + + output2 := test.NewTestOutput(func(*plugin.Message) { + atomic.AddInt32(&counter2, 1) + wg.Done() + }) + + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output1, output2}, + } + + emitter := New(&Config{ + SplitOutput: true, + }) + go emitter.Start(plugins) + + for i := 0; i < 1000; i++ { + wg.Add(1) + input.EmitGET() + } + + wg.Wait() + + emitter.Close() + + if counter1 == 0 || counter2 == 0 || counter1 != counter2 { + t.Errorf("Round robin should split traffic equally: %d vs %d", counter1, counter2) + } +} + +func TestEmitterRoundRobin(t *testing.T) { + wg := new(sync.WaitGroup) + + input := test.NewTestInput() + + var counter1, counter2 int32 + + output1 := test.NewTestOutput(func(*plugin.Message) { + counter1++ + wg.Done() + }) + + output2 := test.NewTestOutput(func(*plugin.Message) { + counter2++ + wg.Done() + }) + + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output1, output2}, + } + plugins.All = append(plugins.All, input, output1, output2) + + emitter := New(&Config{ + SplitOutput: true, + }) + go emitter.Start(plugins) + + for i := 0; i < 1000; i++ { + wg.Add(1) + input.EmitGET() + } + + wg.Wait() + emitter.Close() + + if counter1 == 0 || counter2 == 0 { + t.Errorf("Round robin should split traffic equally: %d vs %d", counter1, counter2) + } +} + +func TestEmitterSplitSession(t *testing.T) { + wg := new(sync.WaitGroup) + wg.Add(200) + + input := test.NewTestInput() + input.SkipHeader = true + + var counter1, counter2 int32 + + output1 := test.NewTestOutput(func(msg *plugin.Message) { + if proto.PayloadID(msg.Meta)[0] == 'a' { + counter1++ + } + wg.Done() + }) + + output2 := test.NewTestOutput(func(msg *plugin.Message) { + if proto.PayloadID(msg.Meta)[0] == 'b' { + counter2++ + } + wg.Done() + }) + + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output1, output2}, + } + + emitter := New(&Config{ + SplitOutput: true, + RecognizeTCPSessions: true, + }) + go emitter.Start(plugins) + + for i := 0; i < 200; i++ { + // Keep session but randomize + id := make([]byte, 20) + if i&1 == 0 { // for recognizeTCPSessions one should be odd and other will be even number + id[0] = 'a' + } else { + id[0] = 'b' + } + input.EmitBytes([]byte(fmt.Sprintf("1 %s 1 1\nGET / HTTP/1.1\r\n\r\n", id[:20]))) + } + + wg.Wait() + + if counter1 != counter2 { + t.Errorf("Round robin should split traffic equally: %d vs %d", counter1, counter2) + } + + emitter.Close() +} + +func BenchmarkEmitter(b *testing.B) { + wg := new(sync.WaitGroup) + + input := test.NewTestInput() + + output := test.NewTestOutput(func(*plugin.Message) { + wg.Done() + }) + + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, + } + plugins.All = append(plugins.All, input, output) + + emitter := New(&Config{}) + go emitter.Start(plugins) + + b.ResetTimer() + + for i := 0; i < b.N; i++ { + wg.Add(1) + input.EmitGET() + } + + wg.Wait() + emitter.Close() +} + +const echoSh = "./examples/middleware/echo.sh" +const tokenModifier = "go run ./examples/middleware/token_modifier.go" + +var withDebug = append(syscall.Environ(), "GOR_TEST=1") + +func initMiddleware(cmd *exec.Cmd, cancl context.CancelFunc, l plugin.Reader, c func(error)) *middleware.Middleware { + var m middleware.Middleware + m.Data = make(chan *plugin.Message, 1000) + m.Stop = make(chan bool) + m.CommandCancel = cancl + m.Stdout, _ = cmd.StdoutPipe() + m.Stdin, _ = cmd.StdinPipe() + cmd.Stderr = os.Stderr + go m.Read(m.Stdout) + go func() { + defer m.Close() + var err error + if err = cmd.Start(); err == nil { + err = cmd.Wait() + } + if err != nil { + c(err) + } + }() + m.ReadFrom(l) + return &m +} + +func initCmd(command string, env []string) (*exec.Cmd, context.CancelFunc) { + commands := strings.Split(command, " ") + ctx, cancl := context.WithCancel(context.Background()) + cmd := exec.CommandContext(ctx, commands[0], commands[1:]...) + cmd.Env = env + return cmd, cancl +} + +func TestMiddlewareEarlyClose(t *testing.T) { + t.Skip() + quit := make(chan struct{}) + in := test.NewTestInput() + cmd, cancl := initCmd(echoSh, withDebug) + midd := initMiddleware(cmd, cancl, in, func(err error) { + if err != nil { + if e, ok := err.(*exec.ExitError); ok { + status := e.Sys().(syscall.WaitStatus) + if status.Signal() != syscall.SIGKILL { + t.Errorf("expected error to be signal killed. got %s", status.Signal().String()) + } + } + } + quit <- struct{}{} + }) + var body = []byte("OPTIONS / HTTP/1.1\r\nHost: example.org\r\n\r\n") + count := uint32(0) + out := test.NewTestOutput(func(msg *plugin.Message) { + if !bytes.Equal(body, msg.Data) { + t.Errorf("expected %q to equal %q", body, msg.Data) + } + atomic.AddUint32(&count, 1) + if atomic.LoadUint32(&count) == 5 { + quit <- struct{}{} + } + }) + pl := &plugin.InOutPlugins{} + pl.Inputs = []plugin.Reader{midd, in} + pl.Outputs = []plugin.Writer{out} + pl.All = []interface{}{midd, out, in} + e := New() + go e.Start(pl) + for i := 0; i < 5; i++ { + in.EmitBytes(body) + } + <-quit + midd.Close() + <-quit +} diff --git a/input_file.go b/pkg/file/input_file.go similarity index 83% rename from input_file.go rename to pkg/file/input_file.go index feaf479d2..735224c6f 100644 --- a/input_file.go +++ b/pkg/file/input_file.go @@ -1,4 +1,4 @@ -package goreplay +package file import ( "bufio" @@ -21,8 +21,22 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) +var inputLogger = log.With().Str("component", "input_file").Logger() + +// InputFileConfig contains config of input file +type InputFileConfig struct { + InputFileLoop bool `json:"input-file-loop"` + InputFileReadDepth int `json:"input-file-read-depth"` + InputFileDryRun bool `json:"input-file-dry-run"` + InputFileMaxWait time.Duration `json:"input-file-max-wait"` +} + type filePayload struct { data []byte timestamp int64 @@ -68,7 +82,7 @@ type fileInputReader struct { } func (f *fileInputReader) parse(init chan struct{}) error { - payloadSeparatorAsBytes := []byte(payloadSeparator) + payloadSeparatorAsBytes := []byte(proto.PayloadSeparator) var buffer bytes.Buffer var initialized bool @@ -80,7 +94,7 @@ func (f *fileInputReader) parse(init chan struct{}) error { if err != nil { if err != io.EOF { - Debug(1, err) + inputLogger.Error().Err(err).Msg("Error reading file") } f.Close() @@ -95,10 +109,10 @@ func (f *fileInputReader) parse(init chan struct{}) error { if bytes.Equal(payloadSeparatorAsBytes[1:], line) { asBytes := buffer.Bytes() - meta := payloadMeta(asBytes) + meta := proto.PayloadMeta(asBytes) if len(meta) < 3 { - Debug(1, fmt.Sprintf("Found malformed record, file: %s, line %d", f.path, lineNum)) + inputLogger.Warn().Msgf("Found malformed record, file: %s, line %d", f.path, lineNum) buffer = bytes.Buffer{} continue } @@ -175,7 +189,7 @@ func newFileInputReader(path string, readDepth int, dryRun bool) *fileInputReade } if err != nil { - Debug(0, fmt.Sprintf("[INPUT-FILE] err: %q", err)) + inputLogger.Error().Err(err).Msg("Error opening file") return nil } @@ -183,7 +197,7 @@ func newFileInputReader(path string, readDepth int, dryRun bool) *fileInputReade if strings.HasSuffix(path, ".gz") { gzReader, err := gzip.NewReader(file) if err != nil { - Debug(0, fmt.Sprintf("[INPUT-FILE] err: %q", err)) + inputLogger.Error().Err(err).Msg("Error opening compressed file") return nil } r.reader = bufio.NewReader(gzReader) @@ -257,7 +271,7 @@ func (i *FileInput) init() (err error) { resp, err := svc.ListObjects(params) if err != nil { - Debug(2, "[INPUT-FILE] Error while retrieving list of files from S3", i.path, err) + inputLogger.Error().Err(err).Msgf("Error while retrieving list of files from S3: %s", i.path) return err } @@ -265,12 +279,12 @@ func (i *FileInput) init() (err error) { matches = append(matches, "s3://"+bucket+"/"+(*c.Key)) } } else if matches, err = filepath.Glob(i.path); err != nil { - Debug(2, "[INPUT-FILE] Wrong file pattern", i.path, err) + inputLogger.Error().Err(err).Msgf("Error while retrieving list of files: %s", i.path) return } if len(matches) == 0 { - Debug(2, "[INPUT-FILE] No files match pattern: ", i.path) + inputLogger.Error().Msgf("No files match pattern: %s", i.path) return errors.New("no matching files") } @@ -285,15 +299,25 @@ func (i *FileInput) init() (err error) { return nil } +// Limited enabled custom logic for file input +func (i *FileInput) Limited() bool { + return false +} + +// SetLimit sets limit for this plugin +func (i *FileInput) SetLimit(limit float64) { + i.speedFactor = limit +} + // PluginRead reads message from this plugin -func (i *FileInput) PluginRead() (*Message, error) { - var msg Message +func (i *FileInput) PluginRead() (*plugin.Message, error) { + var msg plugin.Message select { case <-i.exit: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case buf := <-i.data: i.stats.Add("read_from", 1) - msg.Meta, msg.Data = payloadMetaWithBody(buf) + msg.Meta, msg.Data = proto.PayloadMetaWithBody(buf) return &msg, nil } } @@ -410,7 +434,7 @@ func (i *FileInput) emit() { i.stats.Set("max_wait", time.Duration(maxWait)) i.stats.Set("min_wait", time.Duration(minWait)) - Debug(2, fmt.Sprintf("[INPUT-FILE] FileInput: end of file '%s'\n", i.path)) + inputLogger.Info().Msgf("FileInput: end of file '%s'", i.path) if i.dryRun { fmt.Printf("Records found: %v\nFiles processed: %v\nBytes processed: %v\nMax wait: %v\nMin wait: %v\nFirst wait: %v\nIt will take `%v` to replay at current speed.\nFound %v records with out of order timestamp\n", diff --git a/input_file_test.go b/pkg/file/input_file_test.go similarity index 75% rename from input_file_test.go rename to pkg/file/input_file_test.go index 59d057e1e..53f0fd77d 100644 --- a/input_file_test.go +++ b/pkg/file/input_file_test.go @@ -1,4 +1,4 @@ -package goreplay +package file import ( "bytes" @@ -10,19 +10,24 @@ import ( "sync" "testing" "time" + + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/test" ) func TestInputFileWithGET(t *testing.T) { - input := NewTestInput() - rg := NewRequestGenerator([]PluginReader{input}, func() { input.EmitGET() }, 1) - readPayloads := []*Message{} + input := test.NewTestInput() + rg := NewRequestGenerator([]plugin.Reader{input}, func() { input.EmitGET() }, 1) + readPayloads := []*plugin.Message{} // Given a capture file with a GET request expectedCaptureFile := CreateCaptureFile(rg) defer expectedCaptureFile.TearDown() // When the request is read from the capture file - err := ReadFromCaptureFile(expectedCaptureFile.file, 1, func(msg *Message) { + err := ReadFromCaptureFile(expectedCaptureFile.file, 1, func(msg *plugin.Message) { readPayloads = append(readPayloads, msg) }) @@ -36,16 +41,16 @@ func TestInputFileWithGET(t *testing.T) { } func TestInputFileWithPayloadLargerThan64Kb(t *testing.T) { - input := NewTestInput() - rg := NewRequestGenerator([]PluginReader{input}, func() { input.EmitSizedPOST(64 * 1024) }, 1) - readPayloads := []*Message{} + input := test.NewTestInput() + rg := NewRequestGenerator([]plugin.Reader{input}, func() { input.EmitSizedPOST(64 * 1024) }, 1) + readPayloads := []*plugin.Message{} // Given a capture file with a request over 64Kb expectedCaptureFile := CreateCaptureFile(rg) defer expectedCaptureFile.TearDown() // When the request is read from the capture file - err := ReadFromCaptureFile(expectedCaptureFile.file, 1, func(msg *Message) { + err := ReadFromCaptureFile(expectedCaptureFile.file, 1, func(msg *plugin.Message) { readPayloads = append(readPayloads, msg) }) @@ -61,19 +66,19 @@ func TestInputFileWithPayloadLargerThan64Kb(t *testing.T) { func TestInputFileWithGETAndPOST(t *testing.T) { - input := NewTestInput() - rg := NewRequestGenerator([]PluginReader{input}, func() { + input := test.NewTestInput() + rg := NewRequestGenerator([]plugin.Reader{input}, func() { input.EmitGET() input.EmitPOST() }, 2) - readPayloads := []*Message{} + readPayloads := []*plugin.Message{} // Given a capture file with a GET request expectedCaptureFile := CreateCaptureFile(rg) defer expectedCaptureFile.TearDown() // When the requests are read from the capture file - err := ReadFromCaptureFile(expectedCaptureFile.file, 2, func(msg *Message) { + err := ReadFromCaptureFile(expectedCaptureFile.file, 2, func(msg *plugin.Message) { readPayloads = append(readPayloads, msg) }) @@ -92,16 +97,16 @@ func TestInputFileMultipleFilesWithRequestsOnly(t *testing.T) { file1, _ := os.OpenFile(fmt.Sprintf("/tmp/%d_0", rnd), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0660) file1.Write([]byte("1 1 1\ntest1")) - file1.Write([]byte(payloadSeparator)) + file1.Write([]byte(proto.PayloadSeparator)) file1.Write([]byte("1 1 3\ntest2")) - file1.Write([]byte(payloadSeparator)) + file1.Write([]byte(proto.PayloadSeparator)) file1.Close() file2, _ := os.OpenFile(fmt.Sprintf("/tmp/%d_1", rnd), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0660) file2.Write([]byte("1 1 2\ntest3")) - file2.Write([]byte(payloadSeparator)) + file2.Write([]byte(proto.PayloadSeparator)) file2.Write([]byte("1 1 4\ntest4")) - file2.Write([]byte(payloadSeparator)) + file2.Write([]byte(proto.PayloadSeparator)) file2.Close() input := NewFileInput(fmt.Sprintf("/tmp/%d*", rnd), false, 100, 0, false) @@ -124,11 +129,11 @@ func TestInputFileRequestsWithLatency(t *testing.T) { defer file.Close() file.Write([]byte("1 1 100000000\nrequest1")) - file.Write([]byte(payloadSeparator)) + file.Write([]byte(proto.PayloadSeparator)) file.Write([]byte("1 2 150000000\nrequest2")) - file.Write([]byte(payloadSeparator)) + file.Write([]byte(proto.PayloadSeparator)) file.Write([]byte("1 3 250000000\nrequest3")) - file.Write([]byte(payloadSeparator)) + file.Write([]byte(proto.PayloadSeparator)) input := NewFileInput(fmt.Sprintf("/tmp/%d", rnd), false, 100, 0, false) @@ -150,24 +155,24 @@ func TestInputFileMultipleFilesWithRequestsAndResponses(t *testing.T) { file1, _ := os.OpenFile(fmt.Sprintf("/tmp/%d_0", rnd), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0660) file1.Write([]byte("1 1 1\nrequest1")) - file1.Write([]byte(payloadSeparator)) + file1.Write([]byte(proto.PayloadSeparator)) file1.Write([]byte("2 1 1\nresponse1")) - file1.Write([]byte(payloadSeparator)) + file1.Write([]byte(proto.PayloadSeparator)) file1.Write([]byte("1 2 3\nrequest2")) - file1.Write([]byte(payloadSeparator)) + file1.Write([]byte(proto.PayloadSeparator)) file1.Write([]byte("2 2 3\nresponse2")) - file1.Write([]byte(payloadSeparator)) + file1.Write([]byte(proto.PayloadSeparator)) file1.Close() file2, _ := os.OpenFile(fmt.Sprintf("/tmp/%d_1", rnd), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0660) file2.Write([]byte("1 3 2\nrequest3")) - file2.Write([]byte(payloadSeparator)) + file2.Write([]byte(proto.PayloadSeparator)) file2.Write([]byte("2 3 2\nresponse3")) - file2.Write([]byte(payloadSeparator)) + file2.Write([]byte(proto.PayloadSeparator)) file2.Write([]byte("1 4 4\nrequest4")) - file2.Write([]byte(payloadSeparator)) + file2.Write([]byte(proto.PayloadSeparator)) file2.Write([]byte("2 4 4\nresponse4")) - file2.Write([]byte(payloadSeparator)) + file2.Write([]byte(proto.PayloadSeparator)) file2.Close() input := NewFileInput(fmt.Sprintf("/tmp/%d*", rnd), false, 100, 0, false) @@ -193,9 +198,9 @@ func TestInputFileLoop(t *testing.T) { file, _ := os.OpenFile(fmt.Sprintf("/tmp/%d", rnd), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0660) file.Write([]byte("1 1 1\ntest1")) - file.Write([]byte(payloadSeparator)) + file.Write([]byte(proto.PayloadSeparator)) file.Write([]byte("1 1 2\ntest2")) - file.Write([]byte(payloadSeparator)) + file.Write([]byte(proto.PayloadSeparator)) file.Close() input := NewFileInput(fmt.Sprintf("/tmp/%d", rnd), true, 100, 0, false) @@ -214,14 +219,14 @@ func TestInputFileCompressed(t *testing.T) { output := NewFileOutput(fmt.Sprintf("/tmp/%d_0.gz", rnd), &FileOutputConfig{FlushInterval: time.Minute, Append: true}) for i := 0; i < 1000; i++ { - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) } name1 := output.file.Name() output.Close() output2 := NewFileOutput(fmt.Sprintf("/tmp/%d_1.gz", rnd), &FileOutputConfig{FlushInterval: time.Minute, Append: true}) for i := 0; i < 1000; i++ { - output2.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output2.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) } name2 := output2.file.Name() output2.Close() @@ -236,11 +241,11 @@ func TestInputFileCompressed(t *testing.T) { } type CaptureFile struct { - msgs []*Message + msgs []*plugin.Message file *os.File } -func NewExpectedCaptureFile(msgs []*Message, file *os.File) *CaptureFile { +func NewExpectedCaptureFile(msgs []*plugin.Message, file *os.File) *CaptureFile { ecf := new(CaptureFile) ecf.file = file ecf.msgs = msgs @@ -254,12 +259,12 @@ func (expectedCaptureFile *CaptureFile) TearDown() { } type RequestGenerator struct { - inputs []PluginReader + inputs []plugin.Reader emit func() wg *sync.WaitGroup } -func NewRequestGenerator(inputs []PluginReader, emit func(), count int) (rg *RequestGenerator) { +func NewRequestGenerator(inputs []plugin.Reader, emit func(), count int) (rg *RequestGenerator) { rg = new(RequestGenerator) rg.inputs = inputs rg.emit = emit @@ -268,7 +273,7 @@ func NewRequestGenerator(inputs []PluginReader, emit func(), count int) (rg *Req return } -func (expectedCaptureFile *CaptureFile) PayloadsEqual(other []*Message) bool { +func (expectedCaptureFile *CaptureFile) PayloadsEqual(other []*plugin.Message) bool { if len(expectedCaptureFile.msgs) != len(other) { return false @@ -293,25 +298,25 @@ func CreateCaptureFile(requestGenerator *RequestGenerator) *CaptureFile { panic(err) } - readPayloads := []*Message{} - output := NewTestOutput(func(msg *Message) { + readPayloads := []*plugin.Message{} + output := test.NewTestOutput(func(msg *plugin.Message) { readPayloads = append(readPayloads, msg) requestGenerator.wg.Done() }) outputFile := NewFileOutput(f.Name(), &FileOutputConfig{FlushInterval: time.Second, Append: true}) - plugins := &InOutPlugins{ + plugins := &plugin.InOutPlugins{ Inputs: requestGenerator.inputs, - Outputs: []PluginWriter{output, outputFile}, + Outputs: []plugin.Writer{output, outputFile}, } for _, input := range requestGenerator.inputs { plugins.All = append(plugins.All, input) } plugins.All = append(plugins.All, output, outputFile) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) requestGenerator.emit() requestGenerator.wg.Wait() @@ -323,24 +328,24 @@ func CreateCaptureFile(requestGenerator *RequestGenerator) *CaptureFile { } -func ReadFromCaptureFile(captureFile *os.File, count int, callback writeCallback) (err error) { +func ReadFromCaptureFile(captureFile *os.File, count int, callback test.WriteCallback) (err error) { wg := new(sync.WaitGroup) input := NewFileInput(captureFile.Name(), false, 100, 0, false) - output := NewTestOutput(func(msg *Message) { + output := test.NewTestOutput(func(msg *plugin.Message) { callback(msg) wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) wg.Add(count) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) done := make(chan int, 1) go func() { diff --git a/output_file.go b/pkg/file/output_file.go similarity index 88% rename from output_file.go rename to pkg/file/output_file.go index 4b5a04017..3c433da86 100644 --- a/output_file.go +++ b/pkg/file/output_file.go @@ -1,24 +1,28 @@ -package goreplay +package file import ( "bufio" "compress/gzip" "errors" "fmt" - "github.com/buger/goreplay/internal/size" "io" - "log" "math/rand" "os" "path/filepath" - "runtime/debug" "sort" "strconv" "strings" "sync" "time" + + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) +var outputLogger = log.With().Str("component", "output_file").Logger() var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") var instanceID string @@ -103,7 +107,8 @@ func NewFileOutput(pathTemplate string, config *FileOutputConfig) *FileOutput { return o } -func getFileIndex(name string) int { +// GetFileIndex returns index of file +func GetFileIndex(name string) int { ext := filepath.Ext(name) withoutExt := strings.TrimSuffix(name, ext) @@ -150,7 +155,7 @@ func (s sortByFileIndex) Swap(i, j int) { func (s sortByFileIndex) Less(i, j int) bool { if withoutIndex(s[i]) == withoutIndex(s[j]) { - return getFileIndex(s[i]) < getFileIndex(s[j]) + return GetFileIndex(s[i]) < GetFileIndex(s[j]) } return s[i] < s[j] @@ -187,7 +192,7 @@ func (o *FileOutput) filename() string { last := matches[len(matches)-1] fileIndex := 0 - if idx := getFileIndex(last); idx != -1 { + if idx := GetFileIndex(last); idx != -1 { fileIndex = idx if nextChunk { @@ -210,10 +215,10 @@ func (o *FileOutput) updateName() { } // PluginWrite writes message to this plugin -func (o *FileOutput) PluginWrite(msg *Message) (n int, err error) { +func (o *FileOutput) PluginWrite(msg *plugin.Message) (n int, err error) { if o.requestPerFile { o.Lock() - meta := payloadMeta(msg.Meta) + meta := proto.PayloadMeta(msg.Meta) o.currentID = meta[1] o.payloadType = meta[0] o.Unlock() @@ -236,7 +241,7 @@ func (o *FileOutput) PluginWrite(msg *Message) (n int, err error) { } if err != nil { - log.Fatal(o, "Cannot open file %q. Error: %s", o.currentName, err) + outputLogger.Fatal().Err(err).Str("file", o.currentName).Msg("Cannot open file") } o.QueueLength = 0 @@ -246,14 +251,14 @@ func (o *FileOutput) PluginWrite(msg *Message) (n int, err error) { n, err = o.writer.Write(msg.Meta) nn, err = o.writer.Write(msg.Data) n += nn - nn, err = o.writer.Write(payloadSeparatorAsBytes) + nn, err = o.writer.Write(proto.PayloadSeparatorAsBytes) n += nn o.totalFileSize += size.Size(n) o.currentFileSize += n o.QueueLength++ - if Settings.OutputFileConfig.OutputFileMaxSize > 0 && o.totalFileSize >= Settings.OutputFileConfig.OutputFileMaxSize { + if o.config.OutputFileMaxSize > 0 && o.totalFileSize >= o.config.OutputFileMaxSize { return n, errors.New("File output reached size limit") } @@ -264,7 +269,7 @@ func (o *FileOutput) flush() { // Don't exit on panic defer func() { if r := recover(); r != nil { - Debug(0, "[OUTPUT-FILE] PANIC while file flush: ", r, o, string(debug.Stack())) + outputLogger.Error().Stack().Msgf("PANIC while file flush: %v", r) } }() @@ -281,7 +286,7 @@ func (o *FileOutput) flush() { if stat, err := o.file.Stat(); err == nil { o.currentFileSize = int(stat.Size()) } else { - Debug(0, "[OUTPUT-HTTP] error accessing file size", err) + outputLogger.Error().Err(err).Msgf("Error accessing file size") } } } diff --git a/output_file_test.go b/pkg/file/output_file_test.go similarity index 74% rename from output_file_test.go rename to pkg/file/output_file_test.go index 65d0cf7c3..9e7a0e445 100644 --- a/output_file_test.go +++ b/pkg/file/output_file_test.go @@ -1,8 +1,7 @@ -package goreplay +package file import ( "fmt" - "github.com/buger/goreplay/internal/size" "math/rand" "os" "reflect" @@ -11,22 +10,28 @@ import ( "sync/atomic" "testing" "time" + + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/test" ) func TestFileOutput(t *testing.T) { wg := new(sync.WaitGroup) - input := NewTestInput() + input := test.NewTestInput() output := NewFileOutput("/tmp/test_requests.gor", &FileOutputConfig{FlushInterval: time.Minute, Append: true}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + em := emitter.New() + go em.Start(plugins) for i := 0; i < 100; i++ { wg.Add(2) @@ -35,26 +40,26 @@ func TestFileOutput(t *testing.T) { } time.Sleep(100 * time.Millisecond) output.flush() - emitter.Close() + em.Close() var counter int64 input2 := NewFileInput("/tmp/test_requests.gor", false, 100, 0, false) - output2 := NewTestOutput(func(*Message) { + output2 := test.NewTestOutput(func(*plugin.Message) { atomic.AddInt64(&counter, 1) wg.Done() }) - plugins2 := &InOutPlugins{ - Inputs: []PluginReader{input2}, - Outputs: []PluginWriter{output2}, + plugins2 := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input2}, + Outputs: []plugin.Writer{output2}, } plugins2.All = append(plugins2.All, input2, output2) - emitter2 := NewEmitter() - go emitter2.Start(plugins2, Settings.Middleware) + em2 := emitter.New() + go em2.Start(plugins2) wg.Wait() - emitter2.Close() + em2.Close() } func TestFileOutputWithNameCleaning(t *testing.T) { @@ -87,16 +92,16 @@ func TestFileOutputMultipleFiles(t *testing.T) { t.Error("Should not initialize file if no writes") } - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name1 := output.file.Name() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name2 := output.file.Name() time.Sleep(time.Second) output.updateName() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name3 := output.file.Name() if name2 != name1 { @@ -118,16 +123,16 @@ func TestFileOutputFilePerRequest(t *testing.T) { t.Error("Should not initialize file if no writes") } - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name1 := output.file.Name() - output.PluginWrite(&Message{Meta: []byte("1 2 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 2 1\r\n"), Data: []byte("test")}) name2 := output.file.Name() time.Sleep(time.Second) output.updateName() - output.PluginWrite(&Message{Meta: []byte("1 3 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 3 1\r\n"), Data: []byte("test")}) name3 := output.file.Name() if name3 == name2 || name2 == name1 || name3 == name1 { @@ -147,7 +152,7 @@ func TestFileOutputCompression(t *testing.T) { } for i := 0; i < 1000; i++ { - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) } name := output.file.Name() @@ -173,8 +178,8 @@ func TestGetFileIndex(t *testing.T) { } for _, c := range tests { - if getFileIndex(c.path) != c.index { - t.Error(c.path, "should be", c.index, "instead", getFileIndex(c.path)) + if GetFileIndex(c.path) != c.index { + t.Error(c.path, "should be", c.index, "instead", GetFileIndex(c.path)) } } } @@ -206,15 +211,15 @@ func TestFileOutputAppendQueueLimitOverflow(t *testing.T) { output := NewFileOutput(name, &FileOutputConfig{Append: false, FlushInterval: time.Minute, QueueLimit: 2}) - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name1 := output.file.Name() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name2 := output.file.Name() output.updateName() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name3 := output.file.Name() if name2 != name1 || name1 != fmt.Sprintf("/tmp/%d_0", rnd) { @@ -235,15 +240,15 @@ func TestFileOutputAppendQueueLimitNoOverflow(t *testing.T) { output := NewFileOutput(name, &FileOutputConfig{Append: false, FlushInterval: time.Minute, QueueLimit: 3}) - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name1 := output.file.Name() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name2 := output.file.Name() output.updateName() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name3 := output.file.Name() if name2 != name1 || name1 != fmt.Sprintf("/tmp/%d_0", rnd) { @@ -264,15 +269,15 @@ func TestFileOutputAppendQueueLimitGzips(t *testing.T) { output := NewFileOutput(name, &FileOutputConfig{Append: false, FlushInterval: time.Minute, QueueLimit: 2}) - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name1 := output.file.Name() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name2 := output.file.Name() output.updateName() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name3 := output.file.Name() if name2 != name1 || name1 != fmt.Sprintf("/tmp/%d_0.gz", rnd) { @@ -303,19 +308,19 @@ func TestFileOutputAppendSizeLimitOverflow(t *testing.T) { message := []byte("1 1 1\r\ntest") - messageSize := len(message) + len(payloadSeparator) + messageSize := len(message) + len(proto.PayloadSeparator) output := NewFileOutput(name, &FileOutputConfig{Append: false, FlushInterval: time.Minute, SizeLimit: size.Size(2 * messageSize)}) - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name1 := output.file.Name() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name2 := output.file.Name() output.flush() - output.PluginWrite(&Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 1 1\r\n"), Data: []byte("test")}) name3 := output.file.Name() if name2 != name1 || name1 != fmt.Sprintf("/tmp/%d_0", rnd) { diff --git a/output_s3.go b/pkg/file/output_s3.go similarity index 79% rename from output_s3.go rename to pkg/file/output_s3.go index 2cd575414..8a0725d25 100644 --- a/output_s3.go +++ b/pkg/file/output_s3.go @@ -1,10 +1,9 @@ -package goreplay +package file import ( _ "bufio" "fmt" _ "io" - "log" "math/rand" "os" "path/filepath" @@ -14,8 +13,14 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" _ "github.com/aws/aws-sdk-go/service/s3/s3manager" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/pro" + + "github.com/rs/zerolog/log" ) +var s3Logger = log.With().Str("component", "s3").Logger() + // S3Output output plugin type S3Output struct { pathTemplate string @@ -28,8 +33,8 @@ type S3Output struct { // NewS3Output constructor for FileOutput, accepts path func NewS3Output(pathTemplate string, config *FileOutputConfig) *S3Output { - if !PRO { - log.Fatal("Using S3 output and input requires PRO license") + if !pro.PRO { + s3Logger.Fatal().Msg("Using S3 output and input requires PRO license") return nil } @@ -63,12 +68,12 @@ func NewS3Output(pathTemplate string, config *FileOutputConfig) *S3Output { func (o *S3Output) connect() { if o.session == nil { o.session = session.Must(session.NewSession(awsConfig())) - log.Println("[S3 Output] S3 connection successfully initialized") + s3Logger.Info().Msg("[S3 Output] S3 connection successfully initialized") } } // PluginWrite writes message to this plugin -func (o *S3Output) PluginWrite(msg *Message) (n int, err error) { +func (o *S3Output) PluginWrite(msg *plugin.Message) (n int, err error) { return o.buffer.PluginWrite(msg) } @@ -105,12 +110,12 @@ func (o *S3Output) keyPath(idx int) (bucket, key string) { func (o *S3Output) onBufferUpdate(path string) { svc := s3.New(o.session) - idx := getFileIndex(path) + idx := GetFileIndex(path) bucket, key := o.keyPath(idx) file, err := os.Open(path) if err != nil { - Debug(0, fmt.Sprintf("[S3 Output] Failed to open file %q. err: %q", path, err)) + s3Logger.Error().Err(err).Msgf("[S3 Output] Failed to open file %q", path) return } defer os.Remove(path) @@ -121,7 +126,7 @@ func (o *S3Output) onBufferUpdate(path string) { Key: aws.String(key), }) if err != nil { - Debug(0, fmt.Sprintf("[S3 Output] Failed to upload data to %q/%q, %q", bucket, key, err)) + s3Logger.Error().Err(err).Msgf("[S3 Output] Failed to upload data to %q/%q", bucket, key) return } diff --git a/s3_reader.go b/pkg/file/s3_reader.go similarity index 82% rename from s3_reader.go rename to pkg/file/s3_reader.go index e485cff52..8d28d42e1 100644 --- a/s3_reader.go +++ b/pkg/file/s3_reader.go @@ -1,8 +1,8 @@ -package goreplay +package file import ( "bytes" - "log" + "os" "strconv" "strings" @@ -10,6 +10,7 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" + "github.com/buger/goreplay/pkg/pro" ) // S3ReadCloser ... @@ -36,10 +37,10 @@ func awsConfig() *aws.Config { if endpoint := os.Getenv("AWS_ENDPOINT_URL"); endpoint != "" { config.Endpoint = aws.String(endpoint) - log.Println("Custom endpoint:", endpoint) + s3Logger.Debug().Msgf("Custom endpoint: %s", endpoint) } - log.Println("Connecting to S3. Region: " + region) + s3Logger.Debug().Msgf("Connecting to S3. Region: %s", region) config.CredentialsChainVerboseErrors = aws.Bool(true) @@ -52,15 +53,15 @@ func awsConfig() *aws.Config { // NewS3ReadCloser returns new instance of S3 read closer func NewS3ReadCloser(path string) *S3ReadCloser { - if !PRO { - log.Fatal("Using S3 input and output require PRO license") + if !pro.PRO { + s3Logger.Fatal().Msg("Using S3 input and output require PRO license") return nil } bucket, key := parseS3Url(path) sess := session.Must(session.NewSession(awsConfig())) - log.Println("[S3 Input] S3 connection successfully initialized", path) + s3Logger.Info().Msgf("S3 connection successfully initialized %v", path) return &S3ReadCloser{ bucket: bucket, @@ -87,7 +88,7 @@ func (s *S3ReadCloser) Read(b []byte) (n int, e error) { resp, err := svc.GetObject(params) if err != nil { - log.Println("[S3 Input] Error during getting file", s.bucket, s.key, err) + s3Logger.Error().Err(err).Msgf("Error during getting file %s %s", s.bucket, s.key) } else { s.totalSize, _ = strconv.Atoi(strings.Split(*resp.ContentRange, "/")[1]) s.buf.ReadFrom(resp.Body) diff --git a/s3_test.go b/pkg/file/s3_test.go similarity index 99% rename from s3_test.go rename to pkg/file/s3_test.go index 218ef91b5..edda0ffbb 100644 --- a/s3_test.go +++ b/pkg/file/s3_test.go @@ -1,6 +1,6 @@ //go:build pro -package goreplay +package file import ( "fmt" diff --git a/input_http.go b/pkg/http/input_http.go similarity index 72% rename from input_http.go rename to pkg/http/input_http.go index 7ead5c803..892ab7c85 100644 --- a/input_http.go +++ b/pkg/http/input_http.go @@ -1,13 +1,19 @@ -package goreplay +package http import ( - "log" "net" "net/http" "net/http/httputil" "time" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) +var inputLogger = log.With().Str("component", "input_http").Logger() + // HTTPInput used for sending requests to Gor via http type HTTPInput struct { data chan []byte @@ -28,14 +34,14 @@ func NewHTTPInput(address string) (i *HTTPInput) { } // PluginRead reads message from this plugin -func (i *HTTPInput) PluginRead() (*Message, error) { - var msg Message +func (i *HTTPInput) PluginRead() (*plugin.Message, error) { + var msg plugin.Message select { case <-i.stop: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case buf := <-i.data: msg.Data = buf - msg.Meta = payloadHeader(RequestPayload, uuid(), time.Now().UnixNano(), -1) + msg.Meta = proto.PayloadHeader(proto.RequestPayload, proto.UUID(), time.Now().UnixNano(), -1) return &msg, nil } } @@ -64,14 +70,14 @@ func (i *HTTPInput) listen(address string) { i.listener, err = net.Listen("tcp", address) if err != nil { - log.Fatal("HTTP input listener failure:", err) + inputLogger.Fatal().Err(err).Msg("HTTP input listener failure") } i.address = i.listener.Addr().String() go func() { err = http.Serve(i.listener, mux) if err != nil && err != http.ErrServerClosed { - log.Fatal("HTTP input serve failure ", err) + inputLogger.Fatal().Err(err).Msg("HTTP input serve failure") } }() } diff --git a/input_http_test.go b/pkg/http/input_http_test.go similarity index 67% rename from input_http_test.go rename to pkg/http/input_http_test.go index 873e0965e..4f028db16 100644 --- a/input_http_test.go +++ b/pkg/http/input_http_test.go @@ -1,4 +1,4 @@ -package goreplay +package http import ( "bytes" @@ -7,6 +7,10 @@ import ( "sync" "testing" "time" + + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/test" ) func TestHTTPInput(t *testing.T) { @@ -14,18 +18,18 @@ func TestHTTPInput(t *testing.T) { input := NewHTTPInput("127.0.0.1:0") time.Sleep(time.Millisecond) - output := NewTestOutput(func(*Message) { + output := test.NewTestOutput(func(*plugin.Message) { wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New(&emitter.Config{}) + go emitter.Start(plugins) address := strings.Replace(input.address, "[::]", "127.0.0.1", -1) @@ -45,22 +49,22 @@ func TestInputHTTPLargePayload(t *testing.T) { large[n-1] = '0' input := NewHTTPInput("127.0.0.1:0") - output := NewTestOutput(func(msg *Message) { + output := test.NewTestOutput(func(msg *plugin.Message) { _len := len(msg.Data) if _len >= n { // considering http body CRLF t.Errorf("expected body to be >= %d", n) } wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() + emitter := emitter.New() defer emitter.Close() - go emitter.Start(plugins, Settings.Middleware) + go emitter.Start(plugins) address := strings.Replace(input.address, "[::]", "127.0.0.1", -1) var req *http.Request diff --git a/output_http.go b/pkg/http/output_http.go similarity index 80% rename from output_http.go rename to pkg/http/output_http.go index 7ba57c808..0ec852a67 100644 --- a/output_http.go +++ b/pkg/http/output_http.go @@ -1,24 +1,30 @@ -package goreplay +package http import ( "bufio" "bytes" "crypto/tls" "fmt" - "github.com/buger/goreplay/internal/size" - "log" "math" "net/http" "net/http/httputil" "net/url" "sync/atomic" "time" + + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/pkg/elasticsearch" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/stats" + + "github.com/rs/zerolog/log" ) +var outputLogger = log.With().Str("component", "output_http").Logger() + const ( - initialDynamicWorkers = 10 - readChunkSize = 64 * 1024 - maxResponseSize = 1073741824 + readChunkSize = 64 * 1024 ) type response struct { @@ -71,24 +77,24 @@ func (hoc *HTTPOutputConfig) Copy() *HTTPOutputConfig { type HTTPOutput struct { activeWorkers int32 config *HTTPOutputConfig - queueStats *GorStat - elasticSearch *ESPlugin + queueStats *stats.Stats + elasticSearch *elasticsearch.ESPlugin client *HTTPClient stopWorker chan struct{} - queue chan *Message - responses chan *response + queue chan *plugin.Message + responses chan *plugin.Response stop chan bool // Channel used only to indicate goroutine should shutdown } // NewHTTPOutput constructor for HTTPOutput // Initialize workers -func NewHTTPOutput(address string, config *HTTPOutputConfig) PluginReadWriter { +func NewHTTPOutput(address string, config *HTTPOutputConfig) plugin.ReadWriter { o := new(HTTPOutput) var err error newConfig := config.Copy() newConfig.url, err = url.Parse(address) if err != nil { - log.Fatal(fmt.Sprintf("[OUTPUT-HTTP] parse HTTP output URL error[%q]", err)) + outputLogger.Fatal().Msg(fmt.Sprintf("[OUTPUT-HTTP] parse HTTP output URL error[%q]", err)) } if newConfig.url.Scheme == "" { newConfig.url.Scheme = "http" @@ -124,18 +130,18 @@ func NewHTTPOutput(address string, config *HTTPOutputConfig) PluginReadWriter { o.config = newConfig o.stop = make(chan bool) if o.config.Stats { - o.queueStats = NewGorStat("output_http", o.config.StatsMs) + o.queueStats = stats.New("output_http", o.config.StatsMs) } - o.queue = make(chan *Message, o.config.QueueLen) + o.queue = make(chan *plugin.Message, o.config.QueueLen) if o.config.TrackResponses { - o.responses = make(chan *response, o.config.QueueLen) + o.responses = make(chan *plugin.Response, o.config.QueueLen) } // it should not be buffered to avoid races o.stopWorker = make(chan struct{}) if o.config.ElasticSearch != "" { - o.elasticSearch = new(ESPlugin) + o.elasticSearch = new(elasticsearch.ESPlugin) o.elasticSearch.Init(o.config.ElasticSearch) } o.client = NewHTTPClient(o.config) @@ -186,14 +192,14 @@ func (o *HTTPOutput) startWorker() { } // PluginWrite writes message to this plugin -func (o *HTTPOutput) PluginWrite(msg *Message) (n int, err error) { - if !isRequestPayload(msg.Meta) { +func (o *HTTPOutput) PluginWrite(msg *plugin.Message) (n int, err error) { + if !proto.IsRequestPayload(msg.Meta) { return len(msg.Data), nil } select { case <-o.stop: - return 0, ErrorStopped + return 0, plugin.ErrorStopped case o.queue <- msg: } @@ -211,36 +217,36 @@ func (o *HTTPOutput) PluginWrite(msg *Message) (n int, err error) { } // PluginRead reads message from this plugin -func (o *HTTPOutput) PluginRead() (*Message, error) { +func (o *HTTPOutput) PluginRead() (*plugin.Message, error) { if !o.config.TrackResponses { - return nil, ErrorStopped + return nil, plugin.ErrorStopped } - var resp *response - var msg Message + var resp *plugin.Response + var msg plugin.Message select { case <-o.stop: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case resp = <-o.responses: - msg.Data = resp.payload + msg.Data = resp.Payload } - msg.Meta = payloadHeader(ReplayedResponsePayload, resp.uuid, resp.startedAt, resp.roundTripTime) + msg.Meta = proto.PayloadHeader(proto.ReplayedResponsePayload, resp.UUID, resp.StartedAt, resp.RoundTripTime) return &msg, nil } -func (o *HTTPOutput) sendRequest(client *HTTPClient, msg *Message) { - if !isRequestPayload(msg.Meta) { +func (o *HTTPOutput) sendRequest(client *HTTPClient, msg *plugin.Message) { + if !proto.IsRequestPayload(msg.Meta) { return } - uuid := payloadID(msg.Meta) + uuid := proto.PayloadID(msg.Meta) start := time.Now() resp, err := client.Send(msg.Data) stop := time.Now() if err != nil { - Debug(1, fmt.Sprintf("[HTTP-OUTPUT] error when sending: %q", err)) + outputLogger.Error().Err(err).Msg("[HTTP-OUTPUT]") return } if resp == nil { @@ -248,7 +254,7 @@ func (o *HTTPOutput) sendRequest(client *HTTPClient, msg *Message) { } if o.config.TrackResponses { - o.responses <- &response{resp, uuid, start.UnixNano(), stop.UnixNano() - start.UnixNano()} + o.responses <- &plugin.Response{resp, uuid, start.UnixNano(), stop.UnixNano() - start.UnixNano()} } if o.elasticSearch != nil { @@ -282,12 +288,12 @@ func NewHTTPClient(config *HTTPOutputConfig) *HTTPClient { Timeout: client.config.Timeout, CheckRedirect: func(req *http.Request, via []*http.Request) error { if len(via) >= client.config.RedirectLimit { - Debug(1, fmt.Sprintf("[HTTPCLIENT] maximum output-http-redirects[%d] reached!", client.config.RedirectLimit)) + outputLogger.Warn().Msgf("[HTTPCLIENT] maximum output-http-redirects[%d] reached!", client.config.RedirectLimit) return http.ErrUseLastResponse } lastReq := via[len(via)-1] resp := req.Response - Debug(2, fmt.Sprintf("[HTTPCLIENT] HTTP redirects from %q to %q with %q", lastReq.Host, req.Host, resp.Status)) + outputLogger.Info().Msgf("[HTTPCLIENT] HTTP redirects from %q to %q with %q", lastReq.Host, req.Host, resp.Status) return nil }, } diff --git a/output_http_test.go b/pkg/http/output_http_test.go similarity index 62% rename from output_http_test.go rename to pkg/http/output_http_test.go index d884609be..467bbb744 100644 --- a/output_http_test.go +++ b/pkg/http/output_http_test.go @@ -1,4 +1,4 @@ -package goreplay +package http import ( "io/ioutil" @@ -7,12 +7,19 @@ import ( _ "net/http/httputil" "sync" "testing" + + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/http_modifier" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/pro" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/test" ) func TestHTTPOutput(t *testing.T) { wg := new(sync.WaitGroup) - input := NewTestInput() + input := test.NewTestInput() server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { if req.Header.Get("User-Agent") != "Gor" { @@ -36,23 +43,25 @@ func TestHTTPOutput(t *testing.T) { })) defer server.Close() - headers := HTTPHeaders{httpHeader{"User-Agent", "Gor"}} - methods := HTTPMethods{[]byte("GET"), []byte("PUT"), []byte("POST")} - Settings.ModifierConfig = HTTPModifierConfig{Headers: headers, Methods: methods} + headers := http_modifier.HTTPHeaders{http_modifier.HTTPHeader{"User-Agent", "Gor"}} + methods := http_modifier.HTTPMethods{[]byte("GET"), []byte("PUT"), []byte("POST")} + modifierConfig := http_modifier.HTTPModifierConfig{Headers: headers, Methods: methods} httpOutput := NewHTTPOutput(server.URL, &HTTPOutputConfig{TrackResponses: false}) - output := NewTestOutput(func(*Message) { + output := test.NewTestOutput(func(*plugin.Message) { wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{httpOutput, output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{httpOutput, output}, } plugins.All = append(plugins.All, input, output, httpOutput) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New(&emitter.Config{ + ModifierConfig: modifierConfig, + }) + go emitter.Start(plugins) for i := 0; i < 10; i++ { // 2 http-output, 2 - test output request @@ -64,14 +73,12 @@ func TestHTTPOutput(t *testing.T) { wg.Wait() emitter.Close() - - Settings.ModifierConfig = HTTPModifierConfig{} } func TestHTTPOutputKeepOriginalHost(t *testing.T) { wg := new(sync.WaitGroup) - input := NewTestInput() + input := test.NewTestInput() server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { if req.Host != "custom-host.com" { @@ -82,26 +89,27 @@ func TestHTTPOutputKeepOriginalHost(t *testing.T) { })) defer server.Close() - headers := HTTPHeaders{httpHeader{"Host", "custom-host.com"}} - Settings.ModifierConfig = HTTPModifierConfig{Headers: headers} + headers := http_modifier.HTTPHeaders{http_modifier.HTTPHeader{"Host", "custom-host.com"}} + modifierConfig := http_modifier.HTTPModifierConfig{Headers: headers} output := NewHTTPOutput(server.URL, &HTTPOutputConfig{OriginalHost: true, SkipVerify: true}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New(&emitter.Config{ + ModifierConfig: modifierConfig, + }) + go emitter.Start(plugins) wg.Add(1) input.EmitGET() wg.Wait() emitter.Close() - Settings.ModifierConfig = HTTPModifierConfig{} } func TestHTTPOutputSSL(t *testing.T) { @@ -112,17 +120,17 @@ func TestHTTPOutputSSL(t *testing.T) { wg.Done() })) - input := NewTestInput() + input := test.NewTestInput() output := NewHTTPOutput(server.URL, &HTTPOutputConfig{SkipVerify: true}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) wg.Add(2) @@ -134,49 +142,49 @@ func TestHTTPOutputSSL(t *testing.T) { } func TestHTTPOutputSessions(t *testing.T) { + pro.Enable() + wg := new(sync.WaitGroup) - input := NewTestInput() - input.skipHeader = true + input := test.NewTestInput() + input.SkipHeader = true server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { wg.Done() })) defer server.Close() - Settings.RecognizeTCPSessions = true - Settings.SplitOutput = true output := NewHTTPOutput(server.URL, &HTTPOutputConfig{}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New(&emitter.Config{ + RecognizeTCPSessions: true, + SplitOutput: true, + }) + go emitter.Start(plugins) uuid1 := []byte("1234567890123456789a0000") uuid2 := []byte("1234567890123456789d0000") for i := 0; i < 10; i++ { wg.Add(1) // OPTIONS should be ignored - copy(uuid1[20:], randByte(4)) + copy(uuid1[20:], proto.RandByte(4)) input.EmitBytes([]byte("1 " + string(uuid1) + " 1\n" + "GET / HTTP/1.1\r\n\r\n")) } for i := 0; i < 10; i++ { wg.Add(1) // OPTIONS should be ignored - copy(uuid2[20:], randByte(4)) + copy(uuid2[20:], proto.RandByte(4)) input.EmitBytes([]byte("1 " + string(uuid2) + " 1\n" + "GET / HTTP/1.1\r\n\r\n")) } wg.Wait() emitter.Close() - - Settings.RecognizeTCPSessions = false - Settings.SplitOutput = false } func BenchmarkHTTPOutput(b *testing.B) { @@ -187,17 +195,17 @@ func BenchmarkHTTPOutput(b *testing.B) { })) defer server.Close() - input := NewTestInput() + input := test.NewTestInput() output := NewHTTPOutput(server.URL, &HTTPOutputConfig{WorkersMax: 1}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) for i := 0; i < b.N; i++ { wg.Add(1) @@ -216,17 +224,17 @@ func BenchmarkHTTPOutputTLS(b *testing.B) { })) defer server.Close() - input := NewTestInput() + input := test.NewTestInput() output := NewHTTPOutput(server.URL, &HTTPOutputConfig{SkipVerify: true, WorkersMax: 1}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) for i := 0; i < b.N; i++ { wg.Add(1) diff --git a/http_modifier.go b/pkg/http_modifier/http_modifier.go similarity index 98% rename from http_modifier.go rename to pkg/http_modifier/http_modifier.go index 859b4031d..db808bf06 100644 --- a/http_modifier.go +++ b/pkg/http_modifier/http_modifier.go @@ -1,11 +1,12 @@ -package goreplay +package http_modifier import ( "bytes" "encoding/base64" - "github.com/buger/goreplay/proto" "hash/fnv" "strings" + + proto "github.com/buger/goreplay/pkg/http_proto" ) type HTTPModifier struct { diff --git a/http_modifier_settings.go b/pkg/http_modifier/http_modifier_settings.go similarity index 98% rename from http_modifier_settings.go rename to pkg/http_modifier/http_modifier_settings.go index c77314848..66331fa63 100644 --- a/http_modifier_settings.go +++ b/pkg/http_modifier/http_modifier_settings.go @@ -1,4 +1,4 @@ -package goreplay +package http_modifier import ( "errors" @@ -124,13 +124,13 @@ func (h *HTTPHashFilters) Set(value string) error { } // Handling of --http-set-header option -type httpHeader struct { +type HTTPHeader struct { Name string Value string } // HTTPHeaders is a slice of headers that must appended -type HTTPHeaders []httpHeader +type HTTPHeaders []HTTPHeader func (h *HTTPHeaders) String() string { return fmt.Sprint(*h) @@ -143,7 +143,7 @@ func (h *HTTPHeaders) Set(value string) error { return errors.New("Expected `Key: Value`") } - header := httpHeader{ + header := HTTPHeader{ strings.TrimSpace(v[0]), strings.TrimSpace(v[1]), } diff --git a/http_modifier_settings_test.go b/pkg/http_modifier/http_modifier_settings_test.go similarity index 98% rename from http_modifier_settings_test.go rename to pkg/http_modifier/http_modifier_settings_test.go index 03547fd06..bb30a953f 100644 --- a/http_modifier_settings_test.go +++ b/pkg/http_modifier/http_modifier_settings_test.go @@ -1,4 +1,4 @@ -package goreplay +package http_modifier import ( "testing" diff --git a/http_modifier_test.go b/pkg/http_modifier/http_modifier_test.go similarity index 99% rename from http_modifier_test.go rename to pkg/http_modifier/http_modifier_test.go index e890248e3..532c5af84 100644 --- a/http_modifier_test.go +++ b/pkg/http_modifier/http_modifier_test.go @@ -1,9 +1,10 @@ -package goreplay +package http_modifier import ( "bytes" - "github.com/buger/goreplay/proto" "testing" + + proto "github.com/buger/goreplay/pkg/http_proto" ) func TestHTTPModifierWithoutConfig(t *testing.T) { diff --git a/proto/fuzz.go b/pkg/http_proto/fuzz.go similarity index 81% rename from proto/fuzz.go rename to pkg/http_proto/fuzz.go index 8521391d8..bb645b33b 100644 --- a/proto/fuzz.go +++ b/pkg/http_proto/fuzz.go @@ -1,6 +1,6 @@ //go:build gofuzz -package proto +package http_proto func Fuzz(data []byte) int { diff --git a/proto/proto.go b/pkg/http_proto/proto.go similarity index 99% rename from proto/proto.go rename to pkg/http_proto/proto.go index 1c3abe6ba..2978c37c5 100644 --- a/proto/proto.go +++ b/pkg/http_proto/proto.go @@ -14,11 +14,12 @@ Example of HTTP payload for future references, new line symbols escaped: \r\n \r\n */ -package proto +package http_proto import ( "bufio" "bytes" + "github.com/buger/goreplay/internal/byteutils" _ "fmt" diff --git a/proto/proto_test.go b/pkg/http_proto/proto_test.go similarity index 99% rename from proto/proto_test.go rename to pkg/http_proto/proto_test.go index 3afe1f62a..204194d25 100644 --- a/proto/proto_test.go +++ b/pkg/http_proto/proto_test.go @@ -1,4 +1,4 @@ -package proto +package http_proto import ( "bytes" diff --git a/input_kafka.go b/pkg/kafka/input_kafka.go similarity index 76% rename from input_kafka.go rename to pkg/kafka/input_kafka.go index a4c2fa225..dbe2db334 100644 --- a/input_kafka.go +++ b/pkg/kafka/input_kafka.go @@ -1,12 +1,15 @@ -package goreplay +package kafka import ( "encoding/json" - "log" "strings" "github.com/Shopify/sarama" "github.com/Shopify/sarama/mocks" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) // KafkaInput is used for receiving Kafka messages and @@ -31,13 +34,13 @@ func NewKafkaInput(_ string, config *InputKafkaConfig, tlsConfig *KafkaTLSConfig con, err = sarama.NewConsumer(strings.Split(config.Host, ","), c) if err != nil { - log.Fatalln("Failed to start Sarama(Kafka) consumer:", err) + log.Fatal().Err(err).Msg("Failed to start Sarama(Kafka) consumer") } } partitions, err := con.Partitions(config.Topic) if err != nil { - log.Fatalln("Failed to collect Sarama(Kafka) partitions:", err) + log.Fatal().Err(err).Msg("Failed to collect Sarama(Kafka) partitions") } i := &KafkaInput{ @@ -50,7 +53,7 @@ func NewKafkaInput(_ string, config *InputKafkaConfig, tlsConfig *KafkaTLSConfig for index, partition := range partitions { consumer, err := con.ConsumePartition(config.Topic, partition, sarama.OffsetNewest) if err != nil { - log.Fatalln("Failed to start Sarama(Kafka) partition consumer:", err) + log.Fatal().Err(err).Msg("Failed to start Sarama(Kafka) partition consumer") } go func(consumer sarama.PartitionConsumer) { @@ -72,17 +75,17 @@ func NewKafkaInput(_ string, config *InputKafkaConfig, tlsConfig *KafkaTLSConfig // ErrorHandler should receive errors func (i *KafkaInput) ErrorHandler(consumer sarama.PartitionConsumer) { for err := range consumer.Errors() { - Debug(1, "Failed to read access log entry:", err) + log.Error().Err(err).Msg("Failed to read access log entry") } } // PluginRead a reads message from this plugin -func (i *KafkaInput) PluginRead() (*Message, error) { +func (i *KafkaInput) PluginRead() (*plugin.Message, error) { var message *sarama.ConsumerMessage - var msg Message + var msg plugin.Message select { case <-i.quit: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case message = <-i.messages: } @@ -95,14 +98,14 @@ func (i *KafkaInput) PluginRead() (*Message, error) { var err error msg.Data, err = kafkaMessage.Dump() if err != nil { - Debug(1, "[INPUT-KAFKA] failed to decode access log entry:", err) + log.Error().Err(err).Msg("Failed to decode access log entry") return nil, err } } // does it have meta - if isOriginPayload(msg.Data) { - msg.Meta, msg.Data = payloadMetaWithBody(msg.Data) + if proto.IsOriginPayload(msg.Data) { + msg.Meta, msg.Data = proto.PayloadMetaWithBody(msg.Data) } return &msg, nil diff --git a/input_kafka_test.go b/pkg/kafka/input_kafka_test.go similarity index 98% rename from input_kafka_test.go rename to pkg/kafka/input_kafka_test.go index d632cf824..0fbcacbf9 100644 --- a/input_kafka_test.go +++ b/pkg/kafka/input_kafka_test.go @@ -1,4 +1,4 @@ -package goreplay +package kafka import ( "testing" diff --git a/kafka.go b/pkg/kafka/kafka.go similarity index 97% rename from kafka.go rename to pkg/kafka/kafka.go index f14977014..80c7b471c 100644 --- a/kafka.go +++ b/pkg/kafka/kafka.go @@ -1,4 +1,4 @@ -package goreplay +package kafka import ( "bytes" @@ -8,10 +8,11 @@ import ( "crypto/x509" "errors" "fmt" - "github.com/buger/goreplay/proto" "io/ioutil" "log" + "github.com/buger/goreplay/pkg/http_proto" + "github.com/Shopify/sarama" "github.com/xdg-go/scram" ) @@ -127,13 +128,13 @@ func (m KafkaMessage) Dump() ([]byte, error) { b.WriteString(fmt.Sprintf("%s %s %s\n", m.ReqType, m.ReqID, m.ReqTs)) b.WriteString(fmt.Sprintf("%s %s HTTP/1.1", m.ReqMethod, m.ReqURL)) - b.Write(proto.CRLF) + b.Write(http_proto.CRLF) for key, value := range m.ReqHeaders { b.WriteString(fmt.Sprintf("%s: %s", key, value)) - b.Write(proto.CRLF) + b.Write(http_proto.CRLF) } - b.Write(proto.CRLF) + b.Write(http_proto.CRLF) b.WriteString(m.ReqBody) return b.Bytes(), nil diff --git a/output_kafka.go b/pkg/kafka/output_kafka.go similarity index 75% rename from output_kafka.go rename to pkg/kafka/output_kafka.go index faa50519c..7db9791bd 100644 --- a/output_kafka.go +++ b/pkg/kafka/output_kafka.go @@ -1,15 +1,19 @@ -package goreplay +package kafka import ( "encoding/json" - "github.com/buger/goreplay/internal/byteutils" - "github.com/buger/goreplay/proto" - "log" "strings" "time" + "github.com/buger/goreplay/internal/byteutils" + "github.com/buger/goreplay/pkg/http_proto" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/Shopify/sarama" "github.com/Shopify/sarama/mocks" + + "github.com/rs/zerolog/log" ) // KafkaOutput is used for sending payloads to kafka in JSON format. @@ -22,7 +26,7 @@ type KafkaOutput struct { const KafkaOutputFrequency = 500 // NewKafkaOutput creates instance of kafka producer client with TLS config -func NewKafkaOutput(_ string, config *OutputKafkaConfig, tlsConfig *KafkaTLSConfig) PluginWriter { +func NewKafkaOutput(_ string, config *OutputKafkaConfig, tlsConfig *KafkaTLSConfig) plugin.Writer { c := NewKafkaConfig(&config.SASLConfig, tlsConfig) var producer sarama.AsyncProducer @@ -39,7 +43,7 @@ func NewKafkaOutput(_ string, config *OutputKafkaConfig, tlsConfig *KafkaTLSConf var err error producer, err = sarama.NewAsyncProducer(brokerList, c) if err != nil { - log.Fatalln("Failed to start Sarama(Kafka) producer:", err) + log.Fatal().Err(err).Msg("Failed to start Sarama(Kafka) producer") } } @@ -57,33 +61,33 @@ func NewKafkaOutput(_ string, config *OutputKafkaConfig, tlsConfig *KafkaTLSConf // ErrorHandler should receive errors func (o *KafkaOutput) ErrorHandler() { for err := range o.producer.Errors() { - Debug(1, "Failed to write access log entry:", err) + log.Error().Err(err).Msg("Failed to write access log entry") } } // PluginWrite writes a message to this plugin -func (o *KafkaOutput) PluginWrite(msg *Message) (n int, err error) { +func (o *KafkaOutput) PluginWrite(msg *plugin.Message) (n int, err error) { var message sarama.StringEncoder if !o.config.UseJSON { message = sarama.StringEncoder(byteutils.SliceToString(msg.Meta) + byteutils.SliceToString(msg.Data)) } else { - mimeHeader := proto.ParseHeaders(msg.Data) + mimeHeader := http_proto.ParseHeaders(msg.Data) header := make(map[string]string) for k, v := range mimeHeader { header[k] = strings.Join(v, ", ") } - meta := payloadMeta(msg.Meta) + meta := proto.PayloadMeta(msg.Meta) req := msg.Data kafkaMessage := KafkaMessage{ - ReqURL: byteutils.SliceToString(proto.Path(req)), + ReqURL: byteutils.SliceToString(http_proto.Path(req)), ReqType: byteutils.SliceToString(meta[0]), ReqID: byteutils.SliceToString(meta[1]), ReqTs: byteutils.SliceToString(meta[2]), - ReqMethod: byteutils.SliceToString(proto.Method(req)), - ReqBody: byteutils.SliceToString(proto.Body(req)), + ReqMethod: byteutils.SliceToString(http_proto.Method(req)), + ReqBody: byteutils.SliceToString(http_proto.Body(req)), ReqHeaders: header, } jsonMessage, _ := json.Marshal(&kafkaMessage) diff --git a/output_kafka_test.go b/pkg/kafka/output_kafka_test.go similarity index 80% rename from output_kafka_test.go rename to pkg/kafka/output_kafka_test.go index b4481611b..24c372e37 100644 --- a/output_kafka_test.go +++ b/pkg/kafka/output_kafka_test.go @@ -1,10 +1,11 @@ -package goreplay +package kafka import ( "testing" "github.com/Shopify/sarama" "github.com/Shopify/sarama/mocks" + "github.com/buger/goreplay/pkg/plugin" ) func TestOutputKafkaRAW(t *testing.T) { @@ -19,7 +20,7 @@ func TestOutputKafkaRAW(t *testing.T) { UseJSON: false, }, nil) - output.PluginWrite(&Message{Meta: []byte("1 2 3\n"), Data: []byte("GET / HTTP1.1\r\nHeader: 1\r\n\r\n")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 2 3\n"), Data: []byte("GET / HTTP1.1\r\nHeader: 1\r\n\r\n")}) resp := <-producer.Successes() @@ -42,7 +43,7 @@ func TestOutputKafkaJSON(t *testing.T) { UseJSON: true, }, nil) - output.PluginWrite(&Message{Meta: []byte("1 2 3\n"), Data: []byte("GET / HTTP1.1\r\nHeader: 1\r\n\r\n")}) + output.PluginWrite(&plugin.Message{Meta: []byte("1 2 3\n"), Data: []byte("GET / HTTP1.1\r\nHeader: 1\r\n\r\n")}) resp := <-producer.Successes() diff --git a/middleware.go b/pkg/middleware/middleware.go similarity index 56% rename from middleware.go rename to pkg/middleware/middleware.go index 740b8132a..705e175ce 100644 --- a/middleware.go +++ b/pkg/middleware/middleware.go @@ -1,4 +1,4 @@ -package goreplay +package middleware import ( "bufio" @@ -11,30 +11,47 @@ import ( "strings" "sync" "syscall" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/prettify" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" ) // Middleware represents a middleware object type Middleware struct { - command string - data chan *Message + Command string + Data chan *plugin.Message Stdin io.Writer Stdout io.Reader - commandCancel context.CancelFunc - stop chan bool // Channel used only to indicate goroutine should shutdown + CommandCancel context.CancelFunc + Stop chan bool // Channel used only to indicate goroutine should shutdown closed bool + config *MiddlewareConfig mu sync.RWMutex } +// MiddlewareConfig represents a middleware configuration +type MiddlewareConfig struct { + PrettifyHTTP bool +} + // NewMiddleware returns new middleware -func NewMiddleware(command string) *Middleware { +func NewMiddleware(command string, config *MiddlewareConfig) *Middleware { m := new(Middleware) - m.command = command - m.data = make(chan *Message, 1000) - m.stop = make(chan bool) + m.Command = command + m.Data = make(chan *plugin.Message, 1000) + m.Stop = make(chan bool) + m.config = config + if m.config == nil { + m.config = &MiddlewareConfig{} + } commands := strings.Split(command, " ") ctx, cancl := context.WithCancel(context.Background()) - m.commandCancel = cancl + m.CommandCancel = cancl cmd := exec.CommandContext(ctx, commands[0], commands[1:]...) m.Stdout, _ = cmd.StdoutPipe() @@ -42,7 +59,7 @@ func NewMiddleware(command string) *Middleware { cmd.Stderr = os.Stderr - go m.read(m.Stdout) + go m.Read(m.Stdout) go func() { defer m.Close() @@ -57,7 +74,8 @@ func NewMiddleware(command string) *Middleware { return } } - Debug(0, fmt.Sprintf("[MIDDLEWARE] command[%q] error: %q", command, err.Error())) + + log.Error().Err(err).Msgf("[MIDDLEWARE] command[%q] error", command) } }() @@ -65,12 +83,15 @@ func NewMiddleware(command string) *Middleware { } // ReadFrom start a worker to read from this plugin -func (m *Middleware) ReadFrom(plugin PluginReader) { - Debug(2, fmt.Sprintf("[MIDDLEWARE] command[%q] Starting reading from %q", m.command, plugin)) +func (m *Middleware) ReadFrom(plugin plugin.Reader) { + if log.Logger.GetLevel() == zerolog.DebugLevel { + log.Debug().Msgf("command[%q] Starting reading from %q", m.Command, plugin) + } + go m.copy(m.Stdin, plugin) } -func (m *Middleware) copy(to io.Writer, from PluginReader) { +func (m *Middleware) copy(to io.Writer, from plugin.Reader) { var buf, dst []byte for { @@ -82,8 +103,8 @@ func (m *Middleware) copy(to io.Writer, from PluginReader) { continue } buf = msg.Data - if Settings.PrettifyHTTP { - buf = prettifyHTTP(msg.Data) + if m.config != nil && m.config.PrettifyHTTP { + buf = prettify.PrettifyHTTP(msg.Data) } dstLen := (len(buf)+len(msg.Meta))*2 + 1 // if enough space was previously allocated use it instead @@ -104,7 +125,8 @@ func (m *Middleware) copy(to io.Writer, from PluginReader) { } } -func (m *Middleware) read(from io.Reader) { +// Read reads from this plugin +func (m *Middleware) Read(from io.Reader) { reader := bufio.NewReader(from) var line []byte var e error @@ -117,33 +139,33 @@ func (m *Middleware) read(from io.Reader) { } buf := make([]byte, (len(line)-1)/2) if _, err := hex.Decode(buf, line[:len(line)-1]); err != nil { - Debug(0, fmt.Sprintf("[MIDDLEWARE] command[%q] failed to decode err: %q", m.command, err)) + log.Error().Err(err).Msgf("[MIDDLEWARE] command[%q] failed to decode", m.Command) continue } - var msg Message - msg.Meta, msg.Data = payloadMetaWithBody(buf) + var msg plugin.Message + msg.Meta, msg.Data = proto.PayloadMetaWithBody(buf) select { - case <-m.stop: + case <-m.Stop: return - case m.data <- &msg: + case m.Data <- &msg: } } } // PluginRead reads message from this plugin -func (m *Middleware) PluginRead() (msg *Message, err error) { +func (m *Middleware) PluginRead() (msg *plugin.Message, err error) { select { - case <-m.stop: - return nil, ErrorStopped - case msg = <-m.data: + case <-m.Stop: + return nil, plugin.ErrorStopped + case msg = <-m.Data: } return } func (m *Middleware) String() string { - return fmt.Sprintf("Modifying traffic using %q command", m.command) + return fmt.Sprintf("Modifying traffic using %q command", m.Command) } func (m *Middleware) isClosed() bool { @@ -159,8 +181,8 @@ func (m *Middleware) Close() error { } m.mu.Lock() defer m.mu.Unlock() - m.commandCancel() - close(m.stop) + m.CommandCancel() + close(m.Stop) m.closed = true return nil } diff --git a/middleware_test.go b/pkg/middleware/middleware_test.go similarity index 54% rename from middleware_test.go rename to pkg/middleware/middleware_test.go index cf9807a1e..784ff0ca1 100644 --- a/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -1,95 +1,8 @@ -package goreplay - -import ( - "bytes" - "context" - "os" - "os/exec" - "strings" - "sync/atomic" - "syscall" - "testing" -) - -const echoSh = "./examples/middleware/echo.sh" -const tokenModifier = "go run ./examples/middleware/token_modifier.go" - -var withDebug = append(syscall.Environ(), "GOR_TEST=1") - -func initMiddleware(cmd *exec.Cmd, cancl context.CancelFunc, l PluginReader, c func(error)) *Middleware { - var m Middleware - m.data = make(chan *Message, 1000) - m.stop = make(chan bool) - m.commandCancel = cancl - m.Stdout, _ = cmd.StdoutPipe() - m.Stdin, _ = cmd.StdinPipe() - cmd.Stderr = os.Stderr - go m.read(m.Stdout) - go func() { - defer m.Close() - var err error - if err = cmd.Start(); err == nil { - err = cmd.Wait() - } - if err != nil { - c(err) - } - }() - m.ReadFrom(l) - return &m -} - -func initCmd(command string, env []string) (*exec.Cmd, context.CancelFunc) { - commands := strings.Split(command, " ") - ctx, cancl := context.WithCancel(context.Background()) - cmd := exec.CommandContext(ctx, commands[0], commands[1:]...) - cmd.Env = env - return cmd, cancl -} - -func TestMiddlewareEarlyClose(t *testing.T) { - quit := make(chan struct{}) - in := NewTestInput() - cmd, cancl := initCmd(echoSh, withDebug) - midd := initMiddleware(cmd, cancl, in, func(err error) { - if err != nil { - if e, ok := err.(*exec.ExitError); ok { - status := e.Sys().(syscall.WaitStatus) - if status.Signal() != syscall.SIGKILL { - t.Errorf("expected error to be signal killed. got %s", status.Signal().String()) - } - } - } - quit <- struct{}{} - }) - var body = []byte("OPTIONS / HTTP/1.1\r\nHost: example.org\r\n\r\n") - count := uint32(0) - out := NewTestOutput(func(msg *Message) { - if !bytes.Equal(body, msg.Data) { - t.Errorf("expected %q to equal %q", body, msg.Data) - } - atomic.AddUint32(&count, 1) - if atomic.LoadUint32(&count) == 5 { - quit <- struct{}{} - } - }) - pl := &InOutPlugins{} - pl.Inputs = []PluginReader{midd, in} - pl.Outputs = []PluginWriter{out} - pl.All = []interface{}{midd, out, in} - e := NewEmitter() - go e.Start(pl, "") - for i := 0; i < 5; i++ { - in.EmitBytes(body) - } - <-quit - midd.Close() - <-quit -} +package middleware //func TestTokenMiddleware(t *testing.T) { // quit := make(chan struct{}) -// in := NewTestInput() +// in := test.NewTestInput() // in.skipHeader = true // cmd, cancl := initCmd(tokenModifier, withDebug) // midd := initMiddleware(cmd, cancl, in, func(err error) {}) @@ -97,7 +10,7 @@ func TestMiddlewareEarlyClose(t *testing.T) { // res := []byte("2 932079936fa4306fc308d67588178d17d823647c 1439818823587396305 200\nHTTP/1.1 200 OK\r\nContent-Length: 10\r\nContent-Type: text/plain; charset=utf-8\r\n\r\n17d823647c") // rep := []byte("3 932079936fa4306fc308d67588178d17d823647c 1439818823587396305 200\nHTTP/1.1 200 OK\r\nContent-Length: 15\r\nContent-Type: text/plain; charset=utf-8\r\n\r\n932079936fa4306") // count := uint32(0) -// out := NewTestOutput(func(msg *Message) { +// out := test.NewTestOutput(func(msg *Message) { // if msg.Meta[0] == '1' && !bytes.Equal(payloadID(msg.Meta), payloadID(req)) { // token, _, _ := proto.PathParam(msg.Data, []byte("token")) // if !bytes.Equal(token, proto.Body(rep)) { @@ -128,12 +41,12 @@ func TestMiddlewareEarlyClose(t *testing.T) { //func TestMiddlewareWithPrettify(t *testing.T) { // Settings.PrettifyHTTP = true // quit := make(chan struct{}) -// in := NewTestInput() +// in := test.NewTestInput() // cmd, cancl := initCmd(echoSh, withDebug) // midd := initMiddleware(cmd, cancl, in, func(err error) {}) // var b1 = []byte("POST / HTTP/1.1\r\nHost: example.org\r\nTransfer-Encoding: chunked\r\n\r\n4\r\nWiki\r\n5\r\npedia\r\nE\r\n in\r\n\r\nchunks.\r\n0\r\n\r\n") // var b2 = []byte("POST / HTTP/1.1\r\nHost: example.org\r\nContent-Length: 25\r\n\r\nWikipedia in\r\n\r\nchunks.") -// out := NewTestOutput(func(msg *Message) { +// out := test.NewTestOutput(func(msg *Message) { // if !bytes.Equal(proto.Body(b2), proto.Body(msg.Data)) { // t.Errorf("expected %q body to equal %q body", b2, msg.Data) // } diff --git a/output_null.go b/pkg/null/output_null.go similarity index 72% rename from output_null.go rename to pkg/null/output_null.go index 9d123325b..692c5e82c 100644 --- a/output_null.go +++ b/pkg/null/output_null.go @@ -1,4 +1,6 @@ -package goreplay +package null + +import "github.com/buger/goreplay/pkg/plugin" // NullOutput used for debugging, prints nothing type NullOutput struct { @@ -10,7 +12,7 @@ func NewNullOutput() (o *NullOutput) { } // PluginWrite writes message to this plugin -func (o *NullOutput) PluginWrite(msg *Message) (int, error) { +func (o *NullOutput) PluginWrite(msg *plugin.Message) (int, error) { return len(msg.Data) + len(msg.Meta), nil } diff --git a/limiter.go b/pkg/plugin/limiter.go similarity index 71% rename from limiter.go rename to pkg/plugin/limiter.go index 7ee485479..ca75c2bd3 100644 --- a/limiter.go +++ b/pkg/plugin/limiter.go @@ -1,4 +1,4 @@ -package goreplay +package plugin import ( "fmt" @@ -11,9 +11,9 @@ import ( // Limiter is a wrapper for input or output plugin which adds rate limiting type Limiter struct { - plugin interface{} - limit int - isPercent bool + Plugin interface{} + Limit int + IsPercent bool currentRPS int currentTime int64 @@ -33,15 +33,15 @@ func parseLimitOptions(options string) (limit int, isPercent bool) { // NewLimiter constructor for Limiter, accepts plugin and options // `options` allow to sprcify relatve or absolute limiting -func NewLimiter(plugin interface{}, options string) PluginReadWriter { +func NewLimiter(plugin interface{}, options string) ReadWriter { l := new(Limiter) - l.limit, l.isPercent = parseLimitOptions(options) - l.plugin = plugin + l.Limit, l.IsPercent = parseLimitOptions(options) + l.Plugin = plugin l.currentTime = time.Now().UnixNano() // FileInput have its own rate limiting. Unlike other inputs we not just dropping requests, we can slow down or speed up request emittion. - if fi, ok := l.plugin.(*FileInput); ok && l.isPercent { - fi.speedFactor = float64(l.limit) / float64(100) + if fi, ok := l.Plugin.(Limited); ok && l.IsPercent { + fi.SetLimit(float64(l.Limit) / float64(100)) } return l @@ -49,12 +49,12 @@ func NewLimiter(plugin interface{}, options string) PluginReadWriter { func (l *Limiter) isLimited() bool { // File input have its own limiting algorithm - if _, ok := l.plugin.(*FileInput); ok && l.isPercent { + if _, ok := l.Plugin.(Limited); ok && l.IsPercent { return false } - if l.isPercent { - return l.limit <= rand.Intn(100) + if l.IsPercent { + return l.Limit <= rand.Intn(100) } if (time.Now().UnixNano() - l.currentTime) > time.Second.Nanoseconds() { @@ -62,7 +62,7 @@ func (l *Limiter) isLimited() bool { l.currentRPS = 0 } - if l.currentRPS >= l.limit { + if l.currentRPS >= l.Limit { return true } @@ -76,7 +76,7 @@ func (l *Limiter) PluginWrite(msg *Message) (n int, err error) { if l.isLimited() { return 0, nil } - if w, ok := l.plugin.(PluginWriter); ok { + if w, ok := l.Plugin.(Writer); ok { return w.PluginWrite(msg) } // avoid further writing @@ -85,7 +85,7 @@ func (l *Limiter) PluginWrite(msg *Message) (n int, err error) { // PluginRead reads message from this plugin func (l *Limiter) PluginRead() (msg *Message, err error) { - if r, ok := l.plugin.(PluginReader); ok { + if r, ok := l.Plugin.(Reader); ok { msg, err = r.PluginRead() } else { // avoid further reading @@ -100,12 +100,12 @@ func (l *Limiter) PluginRead() (msg *Message, err error) { } func (l *Limiter) String() string { - return fmt.Sprintf("Limiting %s to: %d (isPercent: %v)", l.plugin, l.limit, l.isPercent) + return fmt.Sprintf("Limiting %s to: %d (isPercent: %v)", l.Plugin, l.Limit, l.IsPercent) } // Close closes the resources. func (l *Limiter) Close() error { - if fi, ok := l.plugin.(io.Closer); ok { + if fi, ok := l.Plugin.(io.Closer); ok { fi.Close() } return nil diff --git a/pkg/plugin/limiter_test.go b/pkg/plugin/limiter_test.go new file mode 100644 index 000000000..939bc597f --- /dev/null +++ b/pkg/plugin/limiter_test.go @@ -0,0 +1,106 @@ +//go:build !race + +package plugin + +// func TestOutputLimiter(t *testing.T) { +// wg := new(sync.WaitGroup) + +// input := test.NewTestInput() +// output := NewLimiter(test.NewTestOutput(func(*plugin.Message) { +// wg.Done() +// }), "10") +// wg.Add(10) + +// plugins := &InOutPlugins{ +// Inputs: []Reader{input}, +// Outputs: []Writer{output}, +// } +// plugins.All = append(plugins.All, input, output) + +// emitter := NewEmitter() +// go emitter.Start(plugins, "") + +// for i := 0; i < 100; i++ { +// input.EmitGET() +// } + +// wg.Wait() +// emitter.Close() +// } + +// func TestInputLimiter(t *testing.T) { +// wg := new(sync.WaitGroup) + +// input := NewLimiter(test.NewTestInput(), "10") +// output := test.NewTestOutput(func(*Message) { +// wg.Done() +// }) +// wg.Add(10) + +// plugins := &InOutPlugins{ +// Inputs: []Reader{input}, +// Outputs: []Writer{output}, +// } +// plugins.All = append(plugins.All, input, output) + +// emitter := NewEmitter() +// go emitter.Start(plugins, Settings.Middleware) + +// for i := 0; i < 100; i++ { +// input.(*Limiter).plugin.(*TestInput).EmitGET() +// } + +// wg.Wait() +// emitter.Close() +// } + +// // Should limit all requests +// func TestPercentLimiter1(t *testing.T) { +// wg := new(sync.WaitGroup) + +// input := test.NewTestInput() +// output := NewLimiter(NewTestOutput(func(*Message) { +// wg.Done() +// }), "0%") + +// plugins := &InOutPlugins{ +// Inputs: []Reader{input}, +// Outputs: []Writer{output}, +// } +// plugins.All = append(plugins.All, input, output) + +// emitter := NewEmitter() +// go emitter.Start(plugins, Settings.Middleware) + +// for i := 0; i < 100; i++ { +// input.EmitGET() +// } + +// wg.Wait() +// } + +// // Should not limit at all +// func TestPercentLimiter2(t *testing.T) { +// wg := new(sync.WaitGroup) + +// input := test.NewTestInput() +// output := NewLimiter(NewTestOutput(func(*Message) { +// wg.Done() +// }), "100%") +// wg.Add(100) + +// plugins := &InOutPlugins{ +// Inputs: []Reader{input}, +// Outputs: []Writer{output}, +// } +// plugins.All = append(plugins.All, input, output) + +// emitter := NewEmitter() +// go emitter.Start(plugins, Settings.Middleware) + +// for i := 0; i < 100; i++ { +// input.EmitGET() +// } + +// wg.Wait() +// } diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go new file mode 100644 index 000000000..80b2945d5 --- /dev/null +++ b/pkg/plugin/plugin.go @@ -0,0 +1,104 @@ +package plugin + +import ( + "errors" + "reflect" + "strings" +) + +// ErrorStopped is the error returned when the go routines reading the input is stopped. +var ErrorStopped = errors.New("reading stopped") + +// Message represents data across plugins +type Message struct { + Meta []byte // metadata + Data []byte // actual data +} + +// Reader is an interface for input plugins +type Reader interface { + PluginRead() (msg *Message, err error) +} + +// Writer is an interface for output plugins +type Writer interface { + PluginWrite(msg *Message) (n int, err error) +} + +// Limited is an interface for plugins that support limiting +type Limited interface { + Limited() bool + SetLimit(float64) +} + +// ReadWriter is an interface for plugins that support reading and writing +type ReadWriter interface { + Reader + Writer +} + +// Response is a response from a plugin +type Response struct { + Payload []byte + UUID []byte + StartedAt int64 + RoundTripTime int64 +} + +// extractLimitOptions detects if plugin get called with limiter support +// Returns address and limit +func extractLimitOptions(options string) (string, string) { + split := strings.Split(options, "|") + + if len(split) > 1 { + return split[0], split[1] + } + + return split[0], "" +} + +// InOutPlugins struct for holding references to plugins +type InOutPlugins struct { + Inputs []Reader + Outputs []Writer + All []interface{} +} + +// RegisterPlugin automatically detects type of plugin and initialize it +// +// See this article if curious about reflect stuff below: http://blog.burntsushi.net/type-parametric-functions-golang +func (plugins *InOutPlugins) RegisterPlugin(constructor interface{}, options ...interface{}) { + var path, limit string + vc := reflect.ValueOf(constructor) + + // Pre-processing options to make it work with reflect + vo := []reflect.Value{} + for _, oi := range options { + vo = append(vo, reflect.ValueOf(oi)) + } + + if len(vo) > 0 { + // Removing limit options from path + path, limit = extractLimitOptions(vo[0].String()) + + // Writing value back without limiter "|" options + vo[0] = reflect.ValueOf(path) + } + + // Calling our constructor with list of given options + p := vc.Call(vo)[0].Interface() + + if limit != "" { + p = NewLimiter(p, limit) + } + + // Some of the output can be Readers as well because return responses + if r, ok := p.(Reader); ok { + plugins.Inputs = append(plugins.Inputs, r) + } + + if w, ok := p.(Writer); ok { + plugins.Outputs = append(plugins.Outputs, w) + } + plugins.All = append(plugins.All, p) +} diff --git a/pkg/prettify/http_prettifier.go b/pkg/prettify/http_prettifier.go new file mode 100644 index 000000000..93e0e156f --- /dev/null +++ b/pkg/prettify/http_prettifier.go @@ -0,0 +1,69 @@ +package prettify + +import ( + "bytes" + "compress/gzip" + "io/ioutil" + "net/http/httputil" + "strconv" + + "github.com/buger/goreplay/pkg/http_proto" + + "github.com/rs/zerolog/log" +) + +// PrettifyHTTP prettifies HTTP payload +func PrettifyHTTP(p []byte) []byte { + + tEnc := bytes.Equal(http_proto.Header(p, []byte("Transfer-Encoding")), []byte("chunked")) + cEnc := bytes.Equal(http_proto.Header(p, []byte("Content-Encoding")), []byte("gzip")) + + if !(tEnc || cEnc) { + return p + } + + headersPos := http_proto.MIMEHeadersEndPos(p) + + if headersPos < 5 || headersPos > len(p) { + return p + } + + headers := p[:headersPos] + content := p[headersPos:] + + if tEnc { + buf := bytes.NewReader(content) + r := httputil.NewChunkedReader(buf) + content, _ = ioutil.ReadAll(r) + + headers = http_proto.DeleteHeader(headers, []byte("Transfer-Encoding")) + + newLen := strconv.Itoa(len(content)) + headers = http_proto.SetHeader(headers, []byte("Content-Length"), []byte(newLen)) + } + + if cEnc { + buf := bytes.NewReader(content) + g, err := gzip.NewReader(buf) + + if err != nil { + log.Error().Err(err).Msg("GZIP encoding error") + return []byte{} + } + + content, err = ioutil.ReadAll(g) + if err != nil { + log.Error().Err(err).Msg("read error") + return p + } + + headers = http_proto.DeleteHeader(headers, []byte("Content-Encoding")) + + newLen := strconv.Itoa(len(content)) + headers = http_proto.SetHeader(headers, []byte("Content-Length"), []byte(newLen)) + } + + newPayload := append(headers, content...) + + return newPayload +} diff --git a/http_prettifier_test.go b/pkg/prettify/http_prettifier_test.go similarity index 86% rename from http_prettifier_test.go rename to pkg/prettify/http_prettifier_test.go index 8bd30b751..621ac378d 100644 --- a/http_prettifier_test.go +++ b/pkg/prettify/http_prettifier_test.go @@ -1,11 +1,12 @@ -package goreplay +package prettify import ( "bytes" "compress/gzip" - "github.com/buger/goreplay/proto" "strconv" "testing" + + proto "github.com/buger/goreplay/pkg/http_proto" ) func TestHTTPPrettifierGzip(t *testing.T) { @@ -19,7 +20,7 @@ func TestHTTPPrettifierGzip(t *testing.T) { payload := []byte("HTTP/1.1 200 OK\r\nContent-Length: " + size + "\r\nContent-Encoding: gzip\r\n\r\n") payload = append(payload, b.Bytes()...) - newPayload := prettifyHTTP(payload) + newPayload := PrettifyHTTP(payload) if string(newPayload) != "HTTP/1.1 200 OK\r\nContent-Length: 4\r\n\r\ntest" { t.Errorf("Payload not match %q", string(newPayload)) @@ -29,7 +30,7 @@ func TestHTTPPrettifierGzip(t *testing.T) { func TestHTTPPrettifierChunked(t *testing.T) { payload := []byte("POST / HTTP/1.1\r\nHost: www.w3.org\r\nTransfer-Encoding: chunked\r\n\r\n4\r\nWiki\r\n5\r\npedia\r\ne\r\n in\r\n\r\nchunks.\r\n0\r\n\r\n") - payload = prettifyHTTP(payload) + payload = PrettifyHTTP(payload) if string(proto.Header(payload, []byte("Content-Length"))) != "23" { t.Errorf("payload should have content length of 23") } diff --git a/pro.go b/pkg/pro/pro.go similarity index 58% rename from pro.go rename to pkg/pro/pro.go index b06b1f9ff..10d9b29bb 100644 --- a/pro.go +++ b/pkg/pro/pro.go @@ -1,5 +1,10 @@ -package goreplay +package pro // PRO this value indicates if goreplay is running in PRO mode.. // it must not be modified explicitly in production var PRO = false + +// Enable enables PRO mode. Can be used ony in tests. +func Enable() { + PRO = true +} diff --git a/protocol.go b/pkg/proto/proto.go similarity index 50% rename from protocol.go rename to pkg/proto/proto.go index fc9b9052e..a50171089 100644 --- a/protocol.go +++ b/pkg/proto/proto.go @@ -1,4 +1,4 @@ -package goreplay +package proto import ( "bytes" @@ -14,7 +14,8 @@ const ( ReplayedResponsePayload = '3' ) -func randByte(len int) []byte { +// RandByte generates a random byte array of length len +func RandByte(len int) []byte { b := make([]byte, len/2) rand.Read(b) @@ -24,20 +25,26 @@ func randByte(len int) []byte { return h } -func uuid() []byte { - return randByte(24) +// UUID generates a random 24 byte string +func UUID() []byte { + return RandByte(24) } -var payloadSeparator = "\nšŸµšŸ™ˆšŸ™‰\n" +// PayloadSeparator is used to separate payloads +var PayloadSeparator = "\nšŸµšŸ™ˆšŸ™‰\n" -func payloadScanner(data []byte, atEOF bool) (advance int, token []byte, err error) { +// PayloadSeparatorAsBytes is used to separate payloads +var PayloadSeparatorAsBytes = []byte(PayloadSeparator) + +// PayloadScanner is used to split payloads +func PayloadScanner(data []byte, atEOF bool) (advance int, token []byte, err error) { if atEOF && len(data) == 0 { return 0, nil, nil } - if i := bytes.Index(data, []byte(payloadSeparator)); i >= 0 { + if i := bytes.Index(data, []byte(PayloadSeparator)); i >= 0 { // We have a full newline-terminated line. - return i + len([]byte(payloadSeparator)), data[0:i], nil + return i + len([]byte(PayloadSeparator)), data[0:i], nil } if atEOF { @@ -47,18 +54,20 @@ func payloadScanner(data []byte, atEOF bool) (advance int, token []byte, err err } // Timing is request start or round-trip time, depending on payloadType -func payloadHeader(payloadType byte, uuid []byte, timing int64, latency int64) (header []byte) { +func PayloadHeader(payloadType byte, uuid []byte, timing int64, latency int64) (header []byte) { //Example: // 3 f45590522cd1838b4a0d5c5aab80b77929dea3b3 13923489726487326 1231\n return []byte(fmt.Sprintf("%c %s %d %d\n", payloadType, uuid, timing, latency)) } -func payloadBody(payload []byte) []byte { +// PayloadBody returns the body of the payload +func PayloadBody(payload []byte) []byte { headerSize := bytes.IndexByte(payload, '\n') return payload[headerSize+1:] } -func payloadMeta(payload []byte) [][]byte { +// PayloadMeta returns the meta data of the payload +func PayloadMeta(payload []byte) [][]byte { headerSize := bytes.IndexByte(payload, '\n') if headerSize < 0 { return nil @@ -66,7 +75,8 @@ func payloadMeta(payload []byte) [][]byte { return bytes.Split(payload[:headerSize], []byte{' '}) } -func payloadMetaWithBody(payload []byte) (meta, body []byte) { +// PayloadMetaWithBody returns the meta data and body of the payload +func PayloadMetaWithBody(payload []byte) (meta, body []byte) { if i := bytes.IndexByte(payload, '\n'); i > 0 && len(payload) > i+1 { meta = payload[:i+1] body = payload[i+1:] @@ -76,8 +86,9 @@ func payloadMetaWithBody(payload []byte) (meta, body []byte) { return nil, payload } -func payloadID(payload []byte) (id []byte) { - meta := payloadMeta(payload) +// PayloadID returns the ID of the payload +func PayloadID(payload []byte) (id []byte) { + meta := PayloadMeta(payload) if len(meta) < 2 { return @@ -85,10 +96,12 @@ func payloadID(payload []byte) (id []byte) { return meta[1] } -func isOriginPayload(payload []byte) bool { +// IsOriginPayload returns true if the payload is a request or response payload +func IsOriginPayload(payload []byte) bool { return payload[0] == RequestPayload || payload[0] == ResponsePayload } -func isRequestPayload(payload []byte) bool { +// IsRequestPayload returns true if the payload is a request payload +func IsRequestPayload(payload []byte) bool { return payload[0] == RequestPayload } diff --git a/input_raw.go b/pkg/raw/input_raw.go similarity index 78% rename from input_raw.go rename to pkg/raw/input_raw.go index a3de66f0c..311e108c4 100644 --- a/input_raw.go +++ b/pkg/raw/input_raw.go @@ -1,16 +1,20 @@ -package goreplay +package raw import ( "context" "fmt" - "github.com/buger/goreplay/internal/capture" - "github.com/buger/goreplay/internal/tcp" - "github.com/buger/goreplay/proto" - "log" "net" "strconv" "strings" "sync" + + "github.com/buger/goreplay/internal/capture" + "github.com/buger/goreplay/internal/tcp" + "github.com/buger/goreplay/pkg/http_proto" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) // RAWInputConfig represents configuration that can be applied on raw input @@ -49,7 +53,7 @@ func NewRAWInput(address string, config RAWInputConfig) (i *RAWInput) { host = address[:portIndex] _ports = address[portIndex+1:] } else { - log.Fatalf("input-raw: error while parsing address: %s", err) + log.Fatal().Err(err).Msg("input-raw: error while parsing address") } } @@ -64,7 +68,7 @@ func NewRAWInput(address string, config RAWInputConfig) (i *RAWInput) { for _, portStr := range portsStr { port, err := strconv.Atoi(strings.TrimSpace(portStr)) if err != nil { - log.Fatalf("parsing port error: %v", err) + log.Fatal().Err(err).Msg("input-raw: error while parsing port") } ports = append(ports, uint16(port)) @@ -80,32 +84,32 @@ func NewRAWInput(address string, config RAWInputConfig) (i *RAWInput) { } // PluginRead reads meassage from this plugin -func (i *RAWInput) PluginRead() (*Message, error) { +func (i *RAWInput) PluginRead() (*plugin.Message, error) { var msgTCP *tcp.Message - var msg Message + var msg plugin.Message select { case <-i.quit: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case msgTCP = <-i.listener.Messages(): msg.Data = msgTCP.Data() } - var msgType byte = ResponsePayload + var msgType byte = proto.ResponsePayload if msgTCP.Direction == tcp.DirIncoming { - msgType = RequestPayload + msgType = proto.RequestPayload if i.config.RealIPHeader != "" { - msg.Data = proto.SetHeader(msg.Data, []byte(i.config.RealIPHeader), []byte(msgTCP.SrcAddr)) + msg.Data = http_proto.SetHeader(msg.Data, []byte(i.config.RealIPHeader), []byte(msgTCP.SrcAddr)) } } - msg.Meta = payloadHeader(msgType, msgTCP.UUID(), msgTCP.Start.UnixNano(), msgTCP.End.UnixNano()-msgTCP.Start.UnixNano()) + msg.Meta = proto.PayloadHeader(msgType, msgTCP.UUID(), msgTCP.Start.UnixNano(), msgTCP.End.UnixNano()-msgTCP.Start.UnixNano()) // to be removed.... if msgTCP.Truncated { - Debug(2, "[INPUT-RAW] message truncated, increase copy-buffer-size") + log.Debug().Msg("message truncated, increase copy-buffer-size") } // to be removed... if msgTCP.TimedOut { - Debug(2, "[INPUT-RAW] message timeout reached, increase input-raw-expire") + log.Debug().Msg("message timeout reached, increase input-raw-expire") } if i.config.Stats { stat := msgTCP.Stats @@ -119,19 +123,21 @@ func (i *RAWInput) listen(address string) { var err error i.listener, err = capture.NewListener(i.host, i.ports, i.config) if err != nil { - log.Fatal(err) + log.Fatal().Err(err).Msg("error while creating listener") } err = i.listener.Activate() if err != nil { - log.Fatal(err) + log.Fatal().Err(err).Msg("error while activating listener") } var ctx context.Context ctx, i.cancelListener = context.WithCancel(context.Background()) errCh := i.listener.ListenBackground(ctx) <-i.listener.Reading - Debug(1, i) + + log.Info().Msg(i.String()) + go func() { <-errCh // the listener closed voluntarily i.Close() diff --git a/input_raw_test.go b/pkg/raw/input_raw_test.go similarity index 82% rename from input_raw_test.go rename to pkg/raw/input_raw_test.go index cf5e66737..5f9bd4150 100644 --- a/input_raw_test.go +++ b/pkg/raw/input_raw_test.go @@ -1,10 +1,7 @@ -package goreplay +package raw import ( "bytes" - "github.com/buger/goreplay/internal/capture" - "github.com/buger/goreplay/internal/tcp" - "github.com/buger/goreplay/proto" "io/ioutil" "net" "net/http" @@ -16,6 +13,14 @@ import ( "sync/atomic" "testing" "time" + + "github.com/buger/goreplay/internal/capture" + "github.com/buger/goreplay/internal/tcp" + "github.com/buger/goreplay/pkg/emitter" + gor_http "github.com/buger/goreplay/pkg/http" + gor_proto "github.com/buger/goreplay/pkg/http_proto" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/test" ) const testRawExpire = time.Millisecond * 200 @@ -49,9 +54,9 @@ func TestRAWInputIPv4(t *testing.T) { } input := NewRAWInput(listener.Addr().String(), conf) - output := NewTestOutput(func(msg *Message) { + output := test.NewTestOutput(func(msg *plugin.Message) { if msg.Meta[0] == '1' { - if len(proto.Header(msg.Data, []byte("X-Real-IP"))) == 0 { + if len(gor_proto.Header(msg.Data, []byte("X-Real-IP"))) == 0 { t.Error("Should have X-Real-IP header") } reqCounter++ @@ -62,16 +67,16 @@ func TestRAWInputIPv4(t *testing.T) { wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) addr := "http://127.0.0.1:" + port - emitter := NewEmitter() + emitter := emitter.New() defer emitter.Close() - go emitter.Start(plugins, Settings.Middleware) + go emitter.Start(plugins) // time.Sleep(time.Second) for i := 0; i < 1; i++ { @@ -118,7 +123,7 @@ func TestRAWInputNoKeepAlive(t *testing.T) { } input := NewRAWInput(":"+port, conf) var respCounter, reqCounter int64 - output := NewTestOutput(func(msg *Message) { + output := test.NewTestOutput(func(msg *plugin.Message) { if msg.Meta[0] == '1' { atomic.AddInt64(&reqCounter, 1) wg.Done() @@ -128,16 +133,16 @@ func TestRAWInputNoKeepAlive(t *testing.T) { } }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) addr := "http://127.0.0.1:" + port - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) for i := 0; i < 10; i++ { // request + response @@ -184,7 +189,7 @@ func TestRAWInputIPv6(t *testing.T) { } input := NewRAWInput(originAddr, conf) - output := NewTestOutput(func(msg *Message) { + output := test.NewTestOutput(func(msg *plugin.Message) { if msg.Meta[0] == '1' { atomic.AddInt64(&reqCounter, 1) } else { @@ -193,14 +198,14 @@ func TestRAWInputIPv6(t *testing.T) { wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } - emitter := NewEmitter() + emitter := emitter.New() addr := "http://" + originAddr - go emitter.Start(plugins, Settings.Middleware) + go emitter.Start(plugins) for i := 0; i < 10; i++ { // request + response wg.Add(2) @@ -255,17 +260,17 @@ func TestInputRAWChunkedEncoding(t *testing.T) { })) defer replay.Close() - httpOutput := NewHTTPOutput(replay.URL, &HTTPOutputConfig{}) + httpOutput := gor_http.NewHTTPOutput(replay.URL, &gor_http.HTTPOutputConfig{}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{httpOutput}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{httpOutput}, } plugins.All = append(plugins.All, input, httpOutput) - emitter := NewEmitter() + emitter := emitter.New() defer emitter.Close() - go emitter.Start(plugins, Settings.Middleware) + go emitter.Start(plugins) wg.Add(2) curl := exec.Command("curl", "http://"+originAddr, "--header", "Transfer-Encoding: chunked", "--header", "Expect:", "--data-binary", "@README.md") @@ -321,7 +326,7 @@ func BenchmarkRAWInputWithReplay(b *testing.B) { } input := NewRAWInput(originAddr, conf) - testOutput := NewTestOutput(func(msg *Message) { + testOutput := test.NewTestOutput(func(msg *plugin.Message) { if msg.Meta[0] == '1' { reqCounter++ } else { @@ -329,15 +334,15 @@ func BenchmarkRAWInputWithReplay(b *testing.B) { } wg.Done() }) - httpOutput := NewHTTPOutput("http://"+replayAddr, &HTTPOutputConfig{}) + httpOutput := gor_http.NewHTTPOutput("http://"+replayAddr, &gor_http.HTTPOutputConfig{}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{testOutput, httpOutput}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{testOutput, httpOutput}, } - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) addr := "http://" + originAddr b.ResetTimer() for i := 0; i < b.N; i++ { diff --git a/settings.go b/pkg/settings/settings.go similarity index 92% rename from settings.go rename to pkg/settings/settings.go index 884803e6a..b32a4681d 100644 --- a/settings.go +++ b/pkg/settings/settings.go @@ -1,13 +1,22 @@ -package goreplay +package settings import ( "flag" "fmt" - "github.com/buger/goreplay/internal/size" "os" "strconv" - "sync" "time" + + "github.com/buger/goreplay/internal/size" + "github.com/buger/goreplay/pkg/binary" + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/file" + "github.com/buger/goreplay/pkg/http" + "github.com/buger/goreplay/pkg/http_modifier" + "github.com/buger/goreplay/pkg/kafka" + "github.com/buger/goreplay/pkg/raw" + "github.com/buger/goreplay/pkg/tcp" + "github.com/buger/goreplay/pkg/ws" ) // DEMO indicates that goreplay is running in demo mode @@ -65,9 +74,7 @@ type AppSettings struct { Stats bool `json:"stats"` ExitAfter time.Duration `json:"exit-after"` - SplitOutput bool `json:"split-output"` - RecognizeTCPSessions bool `json:"recognize-tcp-sessions"` - Pprof string `json:"http-pprof"` + Pprof string `json:"http-pprof"` CopyBufferSize size.Size `json:"copy-buffer-size"` @@ -77,13 +84,13 @@ type AppSettings struct { OutputNull bool `json:"output-null"` InputTCP []string `json:"input-tcp"` - InputTCPConfig TCPInputConfig + InputTCPConfig tcp.TCPInputConfig OutputTCP []string `json:"output-tcp"` - OutputTCPConfig TCPOutputConfig + OutputTCPConfig tcp.TCPOutputConfig OutputTCPStats bool `json:"output-tcp-stats"` OutputWebSocket []string `json:"output-ws"` - OutputWebSocketConfig WebSocketOutputConfig + OutputWebSocketConfig ws.WebSocketOutputConfig OutputWebSocketStats bool `json:"output-ws-stats"` InputFile []string `json:"input-file"` @@ -92,32 +99,35 @@ type AppSettings struct { InputFileDryRun bool `json:"input-file-dry-run"` InputFileMaxWait time.Duration `json:"input-file-max-wait"` OutputFile []string `json:"output-file"` - OutputFileConfig FileOutputConfig + OutputFileConfig file.FileOutputConfig InputRAW []string `json:"input_raw"` - InputRAWConfig RAWInputConfig + InputRAWConfig raw.RAWInputConfig Middleware string `json:"middleware"` - InputHTTP []string - OutputHTTP []string `json:"output-http"` - PrettifyHTTP bool `json:"prettify-http"` + InputHTTP []string + OutputHTTP []string `json:"output-http"` - OutputHTTPConfig HTTPOutputConfig + OutputHTTPConfig http.HTTPOutputConfig + EmitterConfig emitter.Config OutputBinary []string `json:"output-binary"` - OutputBinaryConfig BinaryOutputConfig + OutputBinaryConfig binary.BinaryOutputConfig - ModifierConfig HTTPModifierConfig + ModifierConfig http_modifier.HTTPModifierConfig - InputKafkaConfig InputKafkaConfig - OutputKafkaConfig OutputKafkaConfig - KafkaTLSConfig KafkaTLSConfig + InputKafkaConfig kafka.InputKafkaConfig + OutputKafkaConfig kafka.OutputKafkaConfig + KafkaTLSConfig kafka.KafkaTLSConfig } // Settings holds Gor configuration var Settings AppSettings +// VERSION the current version of goreplay +var VERSION = "1.3.0" + func usage() { fmt.Printf("Gor is a simple http traffic replication tool written in Go. Its main goal is to replay traffic from production servers to staging and dev environments.\nProject page: https://github.com/buger/gor\nAuthor: leonsbox@gmail.com\nCurrent Version: v%s\n\n", VERSION) flag.PrintDefaults() @@ -136,8 +146,8 @@ func init() { Settings.ExitAfter = 5 * time.Minute } - flag.BoolVar(&Settings.SplitOutput, "split-output", false, "By default each output gets same traffic. If set to `true` it splits traffic equally among all outputs.") - flag.BoolVar(&Settings.RecognizeTCPSessions, "recognize-tcp-sessions", false, "[PRO] If turned on http output will create separate worker for each TCP session. Splitting output will session based as well.") + flag.BoolVar(&Settings.EmitterConfig.SplitOutput, "split-output", false, "By default each output gets same traffic. If set to `true` it splits traffic equally among all outputs.") + flag.BoolVar(&Settings.EmitterConfig.RecognizeTCPSessions, "recognize-tcp-sessions", false, "[PRO] If turned on http output will create separate worker for each TCP session. Splitting output will session based as well.") flag.Var(&MultiOption{&Settings.InputDummy}, "input-dummy", "Used for testing outputs. Emits 'Get /' request every 1s") flag.BoolVar(&Settings.OutputStdout, "output-stdout", false, "Used for testing inputs. Just prints to console data coming from inputs.") @@ -176,7 +186,7 @@ func init() { flag.StringVar(&Settings.OutputFileConfig.BufferPath, "output-file-buffer", "/tmp", "The path for temporary storing current buffer: \n\tgor --input-raw :80 --output-file s3://mybucket/logs/%Y-%m-%d.gz --output-file-buffer /mnt/logs") - flag.BoolVar(&Settings.PrettifyHTTP, "prettify-http", false, "If enabled, will automatically decode requests and responses with: Content-Encoding: gzip and Transfer-Encoding: chunked. Useful for debugging, in conjunction with --output-stdout") + flag.BoolVar(&Settings.EmitterConfig.PrettifyHTTP, "prettify-http", false, "If enabled, will automatically decode requests and responses with: Content-Encoding: gzip and Transfer-Encoding: chunked. Useful for debugging, in conjunction with --output-stdout") flag.Var(&Settings.CopyBufferSize, "copy-buffer-size", "Set the buffer size for an individual request (default 5MB)") @@ -287,18 +297,7 @@ func CheckSettings() { } } -var previousDebugTime = time.Now() -var debugMutex sync.Mutex - -// Debug take an effect only if --verbose greater than 0 is specified -func Debug(level int, args ...interface{}) { - if Settings.Verbose >= level { - debugMutex.Lock() - defer debugMutex.Unlock() - now := time.Now() - diff := now.Sub(previousDebugTime) - previousDebugTime = now - fmt.Fprintf(os.Stderr, "[DEBUG][elapsed %s]: ", diff) - fmt.Fprintln(os.Stderr, args...) - } +// Set sets the global settings +func Set(s AppSettings) { + Settings = s } diff --git a/settings_test.go b/pkg/settings/settings_test.go similarity index 90% rename from settings_test.go rename to pkg/settings/settings_test.go index 7d797fb1c..186b8b666 100644 --- a/settings_test.go +++ b/pkg/settings/settings_test.go @@ -1,4 +1,4 @@ -package goreplay +package settings import ( "encoding/json" diff --git a/pkg/stats/gor_stat.go b/pkg/stats/gor_stat.go new file mode 100644 index 000000000..139f41838 --- /dev/null +++ b/pkg/stats/gor_stat.go @@ -0,0 +1,66 @@ +package stats + +import ( + "runtime" + "strconv" + "time" + + "github.com/rs/zerolog/log" +) + +// Stats is a struct that holds the stats for a given statName +type Stats struct { + statName string + rateMs int + latest int + mean int + max int + count int +} + +// New creates a new Stats struct +func New(statName string, rateMs int) (s *Stats) { + s = new(Stats) + s.statName = statName + s.rateMs = rateMs + s.latest = 0 + s.mean = 0 + s.max = 0 + s.count = 0 + + go s.reportStats() + + return +} + +func (s *Stats) Write(latest int) { + if latest > s.max { + s.max = latest + } + if latest != 0 { + s.mean = ((s.mean * s.count) + latest) / (s.count + 1) + } + s.latest = latest + s.count = s.count + 1 +} + +// Reset resets the stats +func (s *Stats) Reset() { + s.latest = 0 + s.max = 0 + s.mean = 0 + s.count = 0 +} + +func (s *Stats) String() string { + return s.statName + ":" + strconv.Itoa(s.latest) + "," + strconv.Itoa(s.mean) + "," + strconv.Itoa(s.max) + "," + strconv.Itoa(s.count) + "," + strconv.Itoa(s.count/(s.rateMs/1000.0)) + "," + strconv.Itoa(runtime.NumGoroutine()) +} + +func (s *Stats) reportStats() { + log.Info().Msg(s.statName + ":latest,mean,max,count,count/second,gcount") + for { + log.Info().Msg(s.String()) + s.Reset() + time.Sleep(time.Duration(s.rateMs) * time.Millisecond) + } +} diff --git a/input_tcp.go b/pkg/tcp/input_tcp.go similarity index 76% rename from input_tcp.go rename to pkg/tcp/input_tcp.go index e31217cc4..bc237ab8b 100644 --- a/input_tcp.go +++ b/pkg/tcp/input_tcp.go @@ -1,18 +1,21 @@ -package goreplay +package tcp import ( "bufio" "bytes" "crypto/tls" - "fmt" "io" - "log" "net" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + + "github.com/rs/zerolog/log" ) // TCPInput used for internal communication type TCPInput struct { - data chan *Message + data chan *plugin.Message listener net.Listener address string config *TCPInputConfig @@ -29,7 +32,7 @@ type TCPInputConfig struct { // NewTCPInput constructor for TCPInput, accepts address with port func NewTCPInput(address string, config *TCPInputConfig) (i *TCPInput) { i = new(TCPInput) - i.data = make(chan *Message, 1000) + i.data = make(chan *plugin.Message, 1000) i.address = address i.config = config i.stop = make(chan bool) @@ -40,10 +43,10 @@ func NewTCPInput(address string, config *TCPInputConfig) (i *TCPInput) { } // PluginRead returns data and details read from plugin -func (i *TCPInput) PluginRead() (msg *Message, err error) { +func (i *TCPInput) PluginRead() (msg *plugin.Message, err error) { select { case <-i.stop: - return nil, ErrorStopped + return nil, plugin.ErrorStopped case msg = <-i.data: return msg, nil } @@ -61,19 +64,19 @@ func (i *TCPInput) listen(address string) { if i.config.Secure { cer, err := tls.LoadX509KeyPair(i.config.CertificatePath, i.config.KeyPath) if err != nil { - log.Fatalln("error while loading --input-tcp TLS certificate:", err) + log.Fatal().Err(err).Msg("error while loading --input-tcp TLS certificate") } config := &tls.Config{Certificates: []tls.Certificate{cer}} listener, err := tls.Listen("tcp", address, config) if err != nil { - log.Fatalln("[INPUT-TCP] failed to start INPUT-TCP listener:", err) + log.Fatal().Err(err).Msg("failed to start INPUT-TCP listener") } i.listener = listener } else { listener, err := net.Listen("tcp", address) if err != nil { - log.Fatalln("failed to start INPUT-TCP listener:", err) + log.Fatal().Err(err).Msg("failed to start INPUT-TCP listener") } i.listener = listener } @@ -88,15 +91,13 @@ func (i *TCPInput) listen(address string) { continue } if operr, ok := err.(*net.OpError); ok && operr.Err.Error() != "use of closed network connection" { - Debug(0, fmt.Sprintf("[INPUT-TCP] listener closed, err: %q", err)) + log.Error().Err(err).Msg("failed to accept connection") } break } }() } -var payloadSeparatorAsBytes = []byte(payloadSeparator) - func (i *TCPInput) handleConnection(conn net.Conn) { defer conn.Close() @@ -110,16 +111,16 @@ func (i *TCPInput) handleConnection(conn net.Conn) { continue } if err != io.EOF { - Debug(0, fmt.Sprintf("[INPUT-TCP] connection error: %q", err)) + log.Err(err).Msg("failed to read from connection") } break } - if bytes.Equal(payloadSeparatorAsBytes[1:], line) { + if bytes.Equal(proto.PayloadSeparatorAsBytes[1:], line) { // unread the '\n' before monkeys buffer.UnreadByte() - var msg Message - msg.Meta, msg.Data = payloadMetaWithBody(buffer.Bytes()) + var msg plugin.Message + msg.Meta, msg.Data = proto.PayloadMetaWithBody(buffer.Bytes()) i.data <- &msg buffer.Reset() } else { diff --git a/input_tcp_test.go b/pkg/tcp/input_tcp_test.go similarity index 79% rename from input_tcp_test.go rename to pkg/tcp/input_tcp_test.go index 543b0d08b..3751f19b8 100644 --- a/input_tcp_test.go +++ b/pkg/tcp/input_tcp_test.go @@ -1,4 +1,4 @@ -package goreplay +package tcp import ( "bytes" @@ -15,24 +15,29 @@ import ( "sync" "testing" "time" + + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/test" ) func TestTCPInput(t *testing.T) { wg := new(sync.WaitGroup) input := NewTCPInput("127.0.0.1:0", &TCPInputConfig{}) - output := NewTestOutput(func(*Message) { + output := test.NewTestOutput(func(*plugin.Message) { wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) tcpAddr, err := net.ResolveTCPAddr("tcp", input.listener.Addr().String()) @@ -50,7 +55,7 @@ func TestTCPInput(t *testing.T) { for i := 0; i < 100; i++ { wg.Add(1) if _, err = conn.Write(msg); err == nil { - _, err = conn.Write(payloadSeparatorAsBytes) + _, err = conn.Write(proto.PayloadSeparatorAsBytes) } if err != nil { t.Error(err) @@ -106,18 +111,18 @@ func TestTCPInputSecure(t *testing.T) { CertificatePath: serverCertPemFile.Name(), KeyPath: serverPrivPemFile.Name(), }) - output := NewTestOutput(func(*Message) { + output := test.NewTestOutput(func(*plugin.Message) { wg.Done() }) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } plugins.All = append(plugins.All, input, output) - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) conf := &tls.Config{ InsecureSkipVerify: true, @@ -134,7 +139,7 @@ func TestTCPInputSecure(t *testing.T) { for i := 0; i < 100; i++ { wg.Add(1) conn.Write(msg) - conn.Write([]byte(payloadSeparator)) + conn.Write([]byte(proto.PayloadSeparator)) } wg.Wait() diff --git a/output_tcp.go b/pkg/tcp/output_tcp.go similarity index 69% rename from output_tcp.go rename to pkg/tcp/output_tcp.go index 2a3680cf1..993ab8b62 100644 --- a/output_tcp.go +++ b/pkg/tcp/output_tcp.go @@ -1,4 +1,4 @@ -package goreplay +package tcp import ( "context" @@ -7,16 +7,23 @@ import ( "hash/fnv" "net" "time" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/stats" + + "github.com/rs/zerolog/log" ) // TCPOutput used for sending raw tcp payloads // Currently used for internal communication between listener and replay server // Can be used for transferring binary payloads like protocol buffers type TCPOutput struct { - address string - limit int - buf []chan *Message - bufStats *GorStat + address string + limit int + + buf []chan *plugin.Message + bufStats *stats.Stats config *TCPOutputConfig workerIndex uint32 @@ -29,24 +36,25 @@ type TCPOutputConfig struct { Sticky bool `json:"output-tcp-sticky"` SkipVerify bool `json:"output-tcp-skip-verify"` Workers int `json:"output-tcp-workers"` + Stats bool `json:"output-tcp-stats"` } // NewTCPOutput constructor for TCPOutput // Initialize X workers which hold keep-alive connection -func NewTCPOutput(address string, config *TCPOutputConfig) PluginWriter { +func NewTCPOutput(address string, config *TCPOutputConfig) plugin.Writer { o := new(TCPOutput) o.address = address o.config = config - if Settings.OutputTCPStats { - o.bufStats = NewGorStat("output_tcp", 5000) + if config.Stats { + o.bufStats = stats.New("output_tcp", 5000) } // create X buffers and send the buffer index to the worker - o.buf = make([]chan *Message, o.config.Workers) + o.buf = make([]chan *plugin.Message, o.config.Workers) for i := 0; i < o.config.Workers; i++ { - o.buf[i] = make(chan *Message, 100) + o.buf[i] = make(chan *plugin.Message, 100) go o.worker(i) } @@ -65,7 +73,7 @@ func (o *TCPOutput) worker(bufferIndex int) { break } - Debug(1, fmt.Sprintf("Can't connect to aggregator instance, reconnecting in 1 second. Retries:%d", retries)) + log.Error().Msgf("Can't connect to aggregator instance, reconnecting in 1 second. Retries:%d", retries) time.Sleep(1 * time.Second) conn, err = o.connect(o.address) @@ -73,7 +81,7 @@ func (o *TCPOutput) worker(bufferIndex int) { } if retries > 0 { - Debug(2, fmt.Sprintf("Connected to aggregator instance after %d retries", retries)) + log.Info().Msgf("Connected to aggregator instance after %d retries", retries) } defer conn.Close() @@ -82,12 +90,12 @@ func (o *TCPOutput) worker(bufferIndex int) { msg := <-o.buf[bufferIndex] if _, err = conn.Write(msg.Meta); err == nil { if _, err = conn.Write(msg.Data); err == nil { - _, err = conn.Write(payloadSeparatorAsBytes) + _, err = conn.Write(proto.PayloadSeparatorAsBytes) } } if err != nil { - Debug(2, "INFO: TCP output connection closed, reconnecting") + log.Info().Err(err).Msg("INFO: TCP output connection closed, reconnecting") o.buf[bufferIndex] <- msg go o.worker(bufferIndex) break @@ -95,27 +103,27 @@ func (o *TCPOutput) worker(bufferIndex int) { } } -func (o *TCPOutput) getBufferIndex(msg *Message) int { +func (o *TCPOutput) getBufferIndex(msg *plugin.Message) int { if !o.config.Sticky { o.workerIndex++ return int(o.workerIndex) % o.config.Workers } hasher := fnv.New32a() - hasher.Write(payloadID(msg.Meta)) + hasher.Write(proto.PayloadID(msg.Meta)) return int(hasher.Sum32()) % o.config.Workers } // PluginWrite writes message to this plugin -func (o *TCPOutput) PluginWrite(msg *Message) (n int, err error) { - if !isOriginPayload(msg.Meta) { +func (o *TCPOutput) PluginWrite(msg *plugin.Message) (n int, err error) { + if !proto.IsOriginPayload(msg.Meta) { return len(msg.Data), nil } bufferIndex := o.getBufferIndex(msg) o.buf[bufferIndex] <- msg - if Settings.OutputTCPStats { + if o.config.Stats { o.bufStats.Write(len(o.buf[bufferIndex])) } diff --git a/output_tcp_test.go b/pkg/tcp/output_tcp_test.go similarity index 76% rename from output_tcp_test.go rename to pkg/tcp/output_tcp_test.go index 8a860b164..7ccf4b270 100644 --- a/output_tcp_test.go +++ b/pkg/tcp/output_tcp_test.go @@ -1,4 +1,4 @@ -package goreplay +package tcp import ( "bufio" @@ -7,6 +7,11 @@ import ( "sync" "testing" "time" + + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/test" ) func TestTCPOutput(t *testing.T) { @@ -15,16 +20,16 @@ func TestTCPOutput(t *testing.T) { listener := startTCP(func(data []byte) { wg.Done() }) - input := NewTestInput() + input := test.NewTestInput() output := NewTCPOutput(listener.Addr().String(), &TCPOutputConfig{Workers: 10}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) for i := 0; i < 10; i++ { wg.Add(1) @@ -50,7 +55,7 @@ func startTCP(cb func([]byte)) net.Listener { defer conn.Close() reader := bufio.NewReader(conn) scanner := bufio.NewScanner(reader) - scanner.Split(payloadScanner) + scanner.Split(proto.PayloadScanner) for scanner.Scan() { cb(scanner.Bytes()) @@ -68,23 +73,23 @@ func BenchmarkTCPOutput(b *testing.B) { listener := startTCP(func(data []byte) { wg.Done() }) - input := NewTestInput() - input.data = make(chan []byte, b.N) + input := test.NewTestInput() + input.Data = make(chan []byte, b.N) for i := 0; i < b.N; i++ { input.EmitGET() } wg.Add(b.N) output := NewTCPOutput(listener.Addr().String(), &TCPOutputConfig{Workers: 10}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } - emitter := NewEmitter() + emitter := emitter.New() // avoid counting above initialization b.ResetTimer() - go emitter.Start(plugins, Settings.Middleware) + go emitter.Start(plugins) wg.Wait() emitter.Close() @@ -125,9 +130,9 @@ func TestBufferDistribution(t *testing.T) { } } -func getTestBytes() *Message { - return &Message{ - Meta: payloadHeader(RequestPayload, uuid(), time.Now().UnixNano(), -1), +func getTestBytes() *plugin.Message { + return &plugin.Message{ + Meta: proto.PayloadHeader(proto.RequestPayload, proto.UUID(), time.Now().UnixNano(), -1), Data: []byte("GET / HTTP/1.1\r\nHost: www.w3.org\r\nUser-Agent: Go 1.1 package http\r\nAccept-Encoding: gzip\r\n\r\n"), } } diff --git a/test_input.go b/pkg/test/test_input.go similarity index 66% rename from test_input.go rename to pkg/test/test_input.go index 64e47a298..ce304d6f7 100644 --- a/test_input.go +++ b/pkg/test/test_input.go @@ -1,45 +1,44 @@ -package goreplay +package test import ( "encoding/base64" - "errors" "math/rand" "time" -) -// ErrorStopped is the error returned when the go routines reading the input is stopped. -var ErrorStopped = errors.New("reading stopped") + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" +) // TestInput used for testing purpose, it allows emitting requests on demand type TestInput struct { - data chan []byte - skipHeader bool + SkipHeader bool + Data chan []byte stop chan bool // Channel used only to indicate goroutine should shutdown } // NewTestInput constructor for TestInput func NewTestInput() (i *TestInput) { i = new(TestInput) - i.data = make(chan []byte, 100) + i.Data = make(chan []byte, 100) i.stop = make(chan bool) return } // PluginRead reads message from this plugin -func (i *TestInput) PluginRead() (*Message, error) { - var msg Message +func (i *TestInput) PluginRead() (*plugin.Message, error) { + var msg plugin.Message select { - case buf := <-i.data: + case buf := <-i.Data: msg.Data = buf - if !i.skipHeader { - msg.Meta = payloadHeader(RequestPayload, uuid(), time.Now().UnixNano(), -1) + if !i.SkipHeader { + msg.Meta = proto.PayloadHeader(proto.RequestPayload, proto.UUID(), time.Now().UnixNano(), -1) } else { - msg.Meta, msg.Data = payloadMetaWithBody(msg.Data) + msg.Meta, msg.Data = proto.PayloadMetaWithBody(msg.Data) } return &msg, nil case <-i.stop: - return nil, ErrorStopped + return nil, plugin.ErrorStopped } } @@ -51,22 +50,22 @@ func (i *TestInput) Close() error { // EmitBytes sends data func (i *TestInput) EmitBytes(data []byte) { - i.data <- data + i.Data <- data } // EmitGET emits GET request without headers func (i *TestInput) EmitGET() { - i.data <- []byte("GET / HTTP/1.1\r\n\r\n") + i.Data <- []byte("GET / HTTP/1.1\r\n\r\n") } // EmitPOST emits POST request with Content-Length func (i *TestInput) EmitPOST() { - i.data <- []byte("POST /pub/WWW/ HTTP/1.1\r\nContent-Length: 7\r\nHost: www.w3.org\r\n\r\na=1&b=2") + i.Data <- []byte("POST /pub/WWW/ HTTP/1.1\r\nContent-Length: 7\r\nHost: www.w3.org\r\n\r\na=1&b=2") } // EmitChunkedPOST emits POST request with `Transfer-Encoding: chunked` and chunked body func (i *TestInput) EmitChunkedPOST() { - i.data <- []byte("POST /pub/WWW/ HTTP/1.1\r\nHost: www.w3.org\r\nTransfer-Encoding: chunked\r\n\r\n4\r\nWiki\r\n5\r\npedia\r\ne\r\n in\r\n\r\nchunks.\r\n0\r\n\r\n") + i.Data <- []byte("POST /pub/WWW/ HTTP/1.1\r\nHost: www.w3.org\r\nTransfer-Encoding: chunked\r\n\r\n4\r\nWiki\r\n5\r\npedia\r\ne\r\n in\r\n\r\nchunks.\r\n0\r\n\r\n") } // EmitLargePOST emits POST request with large payload (5mb) @@ -77,7 +76,7 @@ func (i *TestInput) EmitLargePOST() { rs := base64.URLEncoding.EncodeToString(rb) - i.data <- []byte("POST / HTTP/1.1\r\nHost: www.w3.org\nContent-Length:5242880\r\n\r\n" + rs) + i.Data <- []byte("POST / HTTP/1.1\r\nHost: www.w3.org\nContent-Length:5242880\r\n\r\n" + rs) } // EmitSizedPOST emit a POST with a payload set to a supplied size @@ -87,12 +86,12 @@ func (i *TestInput) EmitSizedPOST(payloadSize int) { rs := base64.URLEncoding.EncodeToString(rb) - i.data <- []byte("POST / HTTP/1.1\r\nHost: www.w3.org\nContent-Length:5242880\r\n\r\n" + rs) + i.Data <- []byte("POST / HTTP/1.1\r\nHost: www.w3.org\nContent-Length:5242880\r\n\r\n" + rs) } // EmitOPTIONS emits OPTIONS request, similar to GET func (i *TestInput) EmitOPTIONS() { - i.data <- []byte("OPTIONS / HTTP/1.1\r\nHost: www.w3.org\r\n\r\n") + i.Data <- []byte("OPTIONS / HTTP/1.1\r\nHost: www.w3.org\r\n\r\n") } func (i *TestInput) String() string { diff --git a/test_output.go b/pkg/test/test_output.go similarity index 59% rename from test_output.go rename to pkg/test/test_output.go index 82c1301a2..5888f7ac2 100644 --- a/test_output.go +++ b/pkg/test/test_output.go @@ -1,14 +1,17 @@ -package goreplay +package test -type writeCallback func(*Message) +import "github.com/buger/goreplay/pkg/plugin" + +// WriteCallback callback for TestOutput +type WriteCallback func(*plugin.Message) // TestOutput used in testing to intercept any output into callback type TestOutput struct { - cb writeCallback + cb WriteCallback } // NewTestOutput constructor for TestOutput, accepts callback which get called on each incoming Write -func NewTestOutput(cb writeCallback) PluginWriter { +func NewTestOutput(cb WriteCallback) plugin.Writer { i := new(TestOutput) i.cb = cb @@ -16,7 +19,7 @@ func NewTestOutput(cb writeCallback) PluginWriter { } // PluginWrite write message to this plugin -func (i *TestOutput) PluginWrite(msg *Message) (int, error) { +func (i *TestOutput) PluginWrite(msg *plugin.Message) (int, error) { i.cb(msg) return len(msg.Data) + len(msg.Meta), nil diff --git a/output_ws.go b/pkg/ws/output_ws.go similarity index 72% rename from output_ws.go rename to pkg/ws/output_ws.go index 3f2a10255..733fddbc6 100644 --- a/output_ws.go +++ b/pkg/ws/output_ws.go @@ -1,17 +1,22 @@ -package goreplay +package ws import ( "context" "crypto/tls" "encoding/base64" "fmt" - "github.com/gorilla/websocket" "hash/fnv" - "log" "net/http" "net/url" "strings" "time" + + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/proto" + "github.com/buger/goreplay/pkg/stats" + "github.com/gorilla/websocket" + + "github.com/rs/zerolog/log" ) // WebSocketOutput used for sending raw tcp payloads @@ -19,8 +24,8 @@ import ( type WebSocketOutput struct { address string limit int - buf []chan *Message - bufStats *GorStat + buf []chan *plugin.Message + bufStats *stats.Stats config *WebSocketOutputConfig workerIndex uint32 headers http.Header @@ -33,16 +38,17 @@ type WebSocketOutputConfig struct { Sticky bool `json:"output-ws-sticky"` SkipVerify bool `json:"output-ws-skip-verify"` Workers int `json:"output-ws-workers"` + Stats bool `json:"output-ws-stats"` } // NewWebSocketOutput constructor for WebSocketOutput // Initialize X workers which hold keep-alive connection -func NewWebSocketOutput(address string, config *WebSocketOutputConfig) PluginWriter { +func NewWebSocketOutput(address string, config *WebSocketOutputConfig) plugin.Writer { o := new(WebSocketOutput) u, err := url.Parse(address) if err != nil { - log.Fatal(fmt.Sprintf("[OUTPUT-WS] parse WS output URL error[%q]", err)) + log.Fatal().Err(err).Msgf("parse WS output URL error") } o.config = config @@ -53,14 +59,14 @@ func NewWebSocketOutput(address string, config *WebSocketOutputConfig) PluginWri u.User = nil // must be after creating the headers o.address = u.String() - if Settings.OutputWebSocketStats { - o.bufStats = NewGorStat("output_ws", 5000) + if config.Stats { + o.bufStats = stats.New("output_ws", 5000) } // create X buffers and send the buffer index to the worker - o.buf = make([]chan *Message, o.config.Workers) + o.buf = make([]chan *plugin.Message, o.config.Workers) for i := 0; i < o.config.Workers; i++ { - o.buf[i] = make(chan *Message, 100) + o.buf[i] = make(chan *plugin.Message, 100) go o.worker(i) } @@ -79,7 +85,7 @@ func (o *WebSocketOutput) worker(bufferIndex int) { break } - Debug(1, fmt.Sprintf("Can't connect to aggregator instance, reconnecting in 1 second. Retries:%d", retries)) + log.Error().Err(err).Msgf("Can't connect to aggregator instance, reconnecting in 1 second. Retries:%d", retries) time.Sleep(1 * time.Second) conn, err = o.connect(o.address) @@ -87,7 +93,7 @@ func (o *WebSocketOutput) worker(bufferIndex int) { } if retries > 0 { - Debug(2, fmt.Sprintf("Connected to aggregator instance after %d retries", retries)) + log.Info().Msgf("Connected to aggregator instance after %d retries", retries) } defer conn.Close() @@ -96,7 +102,7 @@ func (o *WebSocketOutput) worker(bufferIndex int) { msg := <-o.buf[bufferIndex] err = conn.WriteMessage(websocket.BinaryMessage, append(msg.Meta, msg.Data...)) if err != nil { - Debug(2, "INFO: WebSocket output connection closed, reconnecting "+err.Error()) + log.Error().Err(err).Msgf("Can't write to aggregator instance, reconnecting in 1 second") o.buf[bufferIndex] <- msg go o.worker(bufferIndex) break @@ -104,27 +110,27 @@ func (o *WebSocketOutput) worker(bufferIndex int) { } } -func (o *WebSocketOutput) getBufferIndex(msg *Message) int { +func (o *WebSocketOutput) getBufferIndex(msg *plugin.Message) int { if !o.config.Sticky { o.workerIndex++ return int(o.workerIndex) % o.config.Workers } hasher := fnv.New32a() - hasher.Write(payloadID(msg.Meta)) + hasher.Write(proto.PayloadID(msg.Meta)) return int(hasher.Sum32()) % o.config.Workers } // PluginWrite writes message to this plugin -func (o *WebSocketOutput) PluginWrite(msg *Message) (n int, err error) { - if !isOriginPayload(msg.Meta) { +func (o *WebSocketOutput) PluginWrite(msg *plugin.Message) (n int, err error) { + if !proto.IsOriginPayload(msg.Meta) { return len(msg.Data), nil } bufferIndex := o.getBufferIndex(msg) o.buf[bufferIndex] <- msg - if Settings.OutputTCPStats { + if o.config.Stats { o.bufStats.Write(len(o.buf[bufferIndex])) } diff --git a/output_ws_test.go b/pkg/ws/output_ws_test.go similarity index 74% rename from output_ws_test.go rename to pkg/ws/output_ws_test.go index c80314df7..250717684 100644 --- a/output_ws_test.go +++ b/pkg/ws/output_ws_test.go @@ -1,11 +1,16 @@ -package goreplay +package ws import ( - "github.com/gorilla/websocket" "log" "net/http" "sync" "testing" + + "github.com/buger/goreplay/pkg/emitter" + "github.com/buger/goreplay/pkg/plugin" + "github.com/buger/goreplay/pkg/test" + + "github.com/gorilla/websocket" ) func TestWebSocketOutput(t *testing.T) { @@ -14,16 +19,16 @@ func TestWebSocketOutput(t *testing.T) { wsAddr := startWebsocket(func(data []byte) { wg.Done() }) - input := NewTestInput() + input := test.NewTestInput() output := NewWebSocketOutput(wsAddr, &WebSocketOutputConfig{Workers: 1}) - plugins := &InOutPlugins{ - Inputs: []PluginReader{input}, - Outputs: []PluginWriter{output}, + plugins := &plugin.InOutPlugins{ + Inputs: []plugin.Reader{input}, + Outputs: []plugin.Writer{output}, } - emitter := NewEmitter() - go emitter.Start(plugins, Settings.Middleware) + emitter := emitter.New() + go emitter.Start(plugins) for i := 0; i < 10; i++ { wg.Add(1) diff --git a/plugins.go b/plugins.go deleted file mode 100644 index d37b68324..000000000 --- a/plugins.go +++ /dev/null @@ -1,166 +0,0 @@ -package goreplay - -import ( - "reflect" - "strings" -) - -// Message represents data across plugins -type Message struct { - Meta []byte // metadata - Data []byte // actual data -} - -// PluginReader is an interface for input plugins -type PluginReader interface { - PluginRead() (msg *Message, err error) -} - -// PluginWriter is an interface for output plugins -type PluginWriter interface { - PluginWrite(msg *Message) (n int, err error) -} - -// PluginReadWriter is an interface for plugins that support reading and writing -type PluginReadWriter interface { - PluginReader - PluginWriter -} - -// InOutPlugins struct for holding references to plugins -type InOutPlugins struct { - Inputs []PluginReader - Outputs []PluginWriter - All []interface{} -} - -// extractLimitOptions detects if plugin get called with limiter support -// Returns address and limit -func extractLimitOptions(options string) (string, string) { - split := strings.Split(options, "|") - - if len(split) > 1 { - return split[0], split[1] - } - - return split[0], "" -} - -// Automatically detects type of plugin and initialize it -// -// See this article if curious about reflect stuff below: http://blog.burntsushi.net/type-parametric-functions-golang -func (plugins *InOutPlugins) registerPlugin(constructor interface{}, options ...interface{}) { - var path, limit string - vc := reflect.ValueOf(constructor) - - // Pre-processing options to make it work with reflect - vo := []reflect.Value{} - for _, oi := range options { - vo = append(vo, reflect.ValueOf(oi)) - } - - if len(vo) > 0 { - // Removing limit options from path - path, limit = extractLimitOptions(vo[0].String()) - - // Writing value back without limiter "|" options - vo[0] = reflect.ValueOf(path) - } - - // Calling our constructor with list of given options - plugin := vc.Call(vo)[0].Interface() - - if limit != "" { - plugin = NewLimiter(plugin, limit) - } - - // Some of the output can be Readers as well because return responses - if r, ok := plugin.(PluginReader); ok { - plugins.Inputs = append(plugins.Inputs, r) - } - - if w, ok := plugin.(PluginWriter); ok { - plugins.Outputs = append(plugins.Outputs, w) - } - plugins.All = append(plugins.All, plugin) -} - -// NewPlugins specify and initialize all available plugins -func NewPlugins() *InOutPlugins { - plugins := new(InOutPlugins) - - for _, options := range Settings.InputDummy { - plugins.registerPlugin(NewDummyInput, options) - } - - for range Settings.OutputDummy { - plugins.registerPlugin(NewDummyOutput) - } - - if Settings.OutputStdout { - plugins.registerPlugin(NewDummyOutput) - } - - if Settings.OutputNull { - plugins.registerPlugin(NewNullOutput) - } - - for _, options := range Settings.InputRAW { - plugins.registerPlugin(NewRAWInput, options, Settings.InputRAWConfig) - } - - for _, options := range Settings.InputTCP { - plugins.registerPlugin(NewTCPInput, options, &Settings.InputTCPConfig) - } - - for _, options := range Settings.OutputTCP { - plugins.registerPlugin(NewTCPOutput, options, &Settings.OutputTCPConfig) - } - - for _, options := range Settings.OutputWebSocket { - plugins.registerPlugin(NewWebSocketOutput, options, &Settings.OutputWebSocketConfig) - } - - for _, options := range Settings.InputFile { - plugins.registerPlugin(NewFileInput, options, Settings.InputFileLoop, Settings.InputFileReadDepth, Settings.InputFileMaxWait, Settings.InputFileDryRun) - } - - for _, path := range Settings.OutputFile { - if strings.HasPrefix(path, "s3://") { - plugins.registerPlugin(NewS3Output, path, &Settings.OutputFileConfig) - } else { - plugins.registerPlugin(NewFileOutput, path, &Settings.OutputFileConfig) - } - } - - for _, options := range Settings.InputHTTP { - plugins.registerPlugin(NewHTTPInput, options) - } - - // If we explicitly set Host header http output should not rewrite it - // Fix: https://github.com/buger/gor/issues/174 - for _, header := range Settings.ModifierConfig.Headers { - if header.Name == "Host" { - Settings.OutputHTTPConfig.OriginalHost = true - break - } - } - - for _, options := range Settings.OutputHTTP { - plugins.registerPlugin(NewHTTPOutput, options, &Settings.OutputHTTPConfig) - } - - for _, options := range Settings.OutputBinary { - plugins.registerPlugin(NewBinaryOutput, options, &Settings.OutputBinaryConfig) - } - - if Settings.OutputKafkaConfig.Host != "" && Settings.OutputKafkaConfig.Topic != "" { - plugins.registerPlugin(NewKafkaOutput, "", &Settings.OutputKafkaConfig, &Settings.KafkaTLSConfig) - } - - if Settings.InputKafkaConfig.Host != "" && Settings.InputKafkaConfig.Topic != "" { - plugins.registerPlugin(NewKafkaInput, "", &Settings.InputKafkaConfig, &Settings.KafkaTLSConfig) - } - - return plugins -} diff --git a/plugins_test.go b/plugins_test.go deleted file mode 100644 index 7e50d2393..000000000 --- a/plugins_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package goreplay - -import ( - "testing" -) - -func TestPluginsRegistration(t *testing.T) { - Settings.InputDummy = []string{"[]"} - Settings.OutputDummy = []string{"[]"} - Settings.OutputHTTP = []string{"www.example.com|10"} - Settings.InputFile = []string{"/dev/null"} - - plugins := NewPlugins() - - if len(plugins.Inputs) != 3 { - t.Errorf("Should be 3 inputs got %d", len(plugins.Inputs)) - } - - if _, ok := plugins.Inputs[0].(*DummyInput); !ok { - t.Errorf("First input should be DummyInput") - } - - if _, ok := plugins.Inputs[1].(*FileInput); !ok { - t.Errorf("Second input should be FileInput") - } - - if len(plugins.Outputs) != 2 { - t.Errorf("Should be 2 output %d", len(plugins.Outputs)) - } - - if _, ok := plugins.Outputs[0].(*DummyOutput); !ok { - t.Errorf("First output should be DummyOutput") - } - - if l, ok := plugins.Outputs[1].(*Limiter); ok { - if _, ok := l.plugin.(*HTTPOutput); !ok { - t.Errorf("HTTPOutput should be wrapped in limiter") - } - } else { - t.Errorf("Second output should be Limiter") - } - -} diff --git a/version.go b/version.go deleted file mode 100644 index cd449582b..000000000 --- a/version.go +++ /dev/null @@ -1,4 +0,0 @@ -package goreplay - -// VERSION the current version of goreplay -var VERSION = "1.3.0"