Skip to content

Commit 8b38b65

Browse files
authored
helper/hasher general improvements (#41863)
* Fix a TOCTOU by opening the file handle and then doing stat(), instead of doning stat() and then opening. * Make sure this is a regular file, otherwise you could trick auditbeat into hashing an infinite source like a pipe. * Allow for rate (but not file size) to be infinite, this is needed for an upcoming new backend for module/system/process. * Finally, fix error messages that show up on ECS, see below. before: ``` failed to hash executable /d/e/beats/x-pack/auditbeat/auditbeat for PID 50751: failed to hash file /d/e/beats/x-pack/auditbeat/auditbeat: hasher: file size 143673152 exceeds max file size ``` after: ``` failed to hash executable /d/e/beats/x-pack/auditbeat/auditbeat for PID 50804: size 143673152 exceeds max file size ```
1 parent ab0f3c8 commit 8b38b65

2 files changed

Lines changed: 30 additions & 14 deletions

File tree

CHANGELOG.next.asciidoc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
123123
- auditd: Request status from a separate socket to avoid data congestion {pull}41207[41207]
124124
- auditd: Use ECS `event.type: end` instead of `stop` for SERVICE_STOP, DAEMON_ABORT, and DAEMON_END messages. {pull}41558[41558]
125125
- auditd: Update syscall names for Linux 6.11. {pull}41558[41558]
126+
- hasher: Geneneral improvements and fixes. {pull}41863[41863]
126127

127128
*Filebeat*
128129

auditbeat/helper/hasher/hasher.go

Lines changed: 29 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import (
2626
"fmt"
2727
"hash"
2828
"io"
29-
"os"
3029
"strings"
3130
"time"
3231

@@ -124,7 +123,7 @@ type FileTooLargeError struct {
124123

125124
// Error returns the error message for FileTooLargeError.
126125
func (e FileTooLargeError) Error() string {
127-
return fmt.Sprintf("hasher: file size %d exceeds max file size", e.fileSize)
126+
return fmt.Sprintf("size %d exceeds max file size", e.fileSize)
128127
}
129128

130129
// Config contains the configuration of a FileHasher.
@@ -174,28 +173,46 @@ type FileHasher struct {
174173

175174
// NewFileHasher creates a new FileHasher.
176175
func NewFileHasher(c Config, done <-chan struct{}) (*FileHasher, error) {
176+
var limit rate.Limit
177+
178+
if c.ScanRateBytesPerSec == 0 {
179+
limit = rate.Inf
180+
} else {
181+
limit = rate.Limit(c.ScanRateBytesPerSec)
182+
}
183+
177184
return &FileHasher{
178185
config: c,
179186
limiter: rate.NewLimiter(
180-
rate.Limit(c.ScanRateBytesPerSec), // Rate
181-
int(c.MaxFileSizeBytes), // Burst
187+
limit, // Rate
188+
int(c.MaxFileSizeBytes), // Burst
182189
),
183190
done: done,
184191
}, nil
185192
}
186193

187194
// HashFile hashes the contents of a file.
188195
func (hasher *FileHasher) HashFile(path string) (map[HashType]Digest, error) {
189-
info, err := os.Stat(path)
196+
f, err := file.ReadOpen(path)
190197
if err != nil {
191-
return nil, fmt.Errorf("failed to stat file %v: %w", path, err)
198+
return nil, fmt.Errorf("open: %w", err)
199+
}
200+
defer f.Close()
201+
202+
info, err := f.Stat()
203+
if err != nil {
204+
return nil, fmt.Errorf("stat: %w", err)
205+
}
206+
if !info.Mode().IsRegular() {
207+
return nil, fmt.Errorf("not a regular file")
208+
192209
}
193210

194211
// Throttle reading and hashing rate.
195212
if len(hasher.config.HashTypes) > 0 {
196213
err = hasher.throttle(info.Size())
197214
if err != nil {
198-
return nil, fmt.Errorf("failed to hash file %v: %w", path, err)
215+
return nil, err
199216
}
200217
}
201218

@@ -210,15 +227,9 @@ func (hasher *FileHasher) HashFile(path string) (map[HashType]Digest, error) {
210227
}
211228

212229
if len(hashes) > 0 {
213-
f, err := file.ReadOpen(path)
214-
if err != nil {
215-
return nil, fmt.Errorf("failed to open file for hashing: %w", err)
216-
}
217-
defer f.Close()
218-
219230
hashWriter := multiWriter(hashes)
220231
if _, err := io.Copy(hashWriter, f); err != nil {
221-
return nil, fmt.Errorf("failed to calculate file hashes: %w", err)
232+
return nil, err
222233
}
223234

224235
nameToHash := make(map[HashType]Digest, len(hashes))
@@ -233,6 +244,10 @@ func (hasher *FileHasher) HashFile(path string) (map[HashType]Digest, error) {
233244
}
234245

235246
func (hasher *FileHasher) throttle(fileSize int64) error {
247+
// Burst is ignored if limit is infinite, so check it manually
248+
if hasher.limiter.Limit() == rate.Inf && int(fileSize) > hasher.limiter.Burst() {
249+
return FileTooLargeError{fileSize}
250+
}
236251
reservation := hasher.limiter.ReserveN(time.Now(), int(fileSize))
237252
if !reservation.OK() {
238253
// File is bigger than the max file size

0 commit comments

Comments
 (0)