fix: read socket response data in chunks

Read the response data from the socket in chunks to prevent errors when
processing large payloads. The initial implementation solved large payloads
by just defining a very large buffer, but this is not a solution. The new
code reads the socket data in a loop until a terminator is found and
appends all the data into a single byte array.
Reduce the buffer size to `1024` bytes.
This commit is contained in:
Hector 2021-08-29 16:02:31 +01:00
parent e083b48461
commit e5714b7485

View File

@ -1,6 +1,7 @@
package socket package socket
import ( import (
"bufio"
"bytes" "bytes"
"fmt" "fmt"
"github.com/nlpodyssey/gopickle/pickle" "github.com/nlpodyssey/gopickle/pickle"
@ -9,7 +10,7 @@ import (
const ( const (
commandTerminator = "<F2B_END_COMMAND>" commandTerminator = "<F2B_END_COMMAND>"
pingCommand = "ping" pingCommand = "ping"
socketReadBufferSize = 10000 socketReadBufferSize = 1024
) )
func (s *Fail2BanSocket) sendCommand(command []string) (interface{}, error) { func (s *Fail2BanSocket) sendCommand(command []string) (interface{}, error) {
@ -33,13 +34,23 @@ func (s *Fail2BanSocket) write(command []string) error {
} }
func (s *Fail2BanSocket) read() (interface{}, error) { func (s *Fail2BanSocket) read() (interface{}, error) {
buf := make([]byte, socketReadBufferSize) reader := bufio.NewReader(s.socket)
_, err := s.socket.Read(buf)
if err != nil { data := []byte{}
return nil, err for {
buf := make([]byte, socketReadBufferSize)
_, err := reader.Read(buf)
if err != nil {
return nil, err
}
data = append(data, buf...)
containsTerminator := bytes.Contains(data, []byte(commandTerminator))
if containsTerminator {
break
}
} }
bufReader := bytes.NewReader(buf) bufReader := bytes.NewReader(data)
unpickler := pickle.NewUnpickler(bufReader) unpickler := pickle.NewUnpickler(bufReader)
unpickler.FindClass = func(module, name string) (interface{}, error) { unpickler.FindClass = func(module, name string) (interface{}, error) {