-
Notifications
You must be signed in to change notification settings - Fork 0
/
list_objects.go
216 lines (179 loc) · 5.03 KB
/
list_objects.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
package ls3
import (
"errors"
"github.com/relvacode/ls3/exception"
"io/fs"
"net/http"
"path"
"sort"
"strconv"
"strings"
"syscall"
"time"
)
// errEndOfIteration is a special sentinel error used for walking filesystem paths in ListObjectsV2.
var errEndOfIteration = errors.New("end iteration")
func listObjectsMaxKeys(r *http.Request) (int, error) {
maxKeysQuery := r.URL.Query().Get("max-keys")
if maxKeysQuery == "" {
return 1000, nil
}
maxKeys, err := strconv.Atoi(maxKeysQuery)
if err != nil || maxKeys < 0 {
return 0, &exception.Error{
ErrorCode: exception.InvalidArgument,
Message: "Invalid value for max-keys",
}
}
return maxKeys, nil
}
func listObjectsUrlEncodingType(r *http.Request) (string, error) {
switch r.URL.Query().Get("encoding-type") {
case "url":
return "url", nil
case "":
return "", nil
default:
return "", &exception.Error{
ErrorCode: exception.InvalidArgument,
Message: "Only \"url\" is supported for encoding-type",
}
}
}
type Contents struct {
ChecksumAlgorithm string
ETag string
Key string
LastModified time.Time
Size int
StorageClass string
}
type CommonPrefixes struct {
Prefix string
}
func NewBucketIterator(fs fs.FS) *BucketIterator {
return &BucketIterator{
IsTruncated: false,
Continue: "",
fs: fs,
prefixes: make(map[string]struct{}),
}
}
type BucketIterator struct {
IsTruncated bool
Continue string
seekObject string
fs fs.FS
prefixes map[string]struct{}
}
func (it *BucketIterator) CommonPrefixes() (prefixes []CommonPrefixes) {
var commonPrefixKeys = make([]string, 0, len(it.prefixes))
for k := range it.prefixes {
commonPrefixKeys = append(commonPrefixKeys, k)
}
sort.Strings(commonPrefixKeys)
prefixes = make([]CommonPrefixes, 0, len(commonPrefixKeys))
for _, k := range commonPrefixKeys {
prefixes = append(prefixes, CommonPrefixes{
Prefix: k,
})
}
return
}
// Seek sets the starting object key to begin seeking during the next PrefixScan.
// It set, all objects will be discarded until the first occurrence of after.
func (it *BucketIterator) Seek(after string) {
it.seekObject = after
}
func (it *BucketIterator) PrefixScan(prefix string, delimiter string, objectKeyEncoding bool, maxKeys int) ([]Contents, error) {
var (
contents []Contents
basePath string
objectPrefix string
shouldSkip = it.seekObject != ""
)
if prefix != "" {
basePath, objectPrefix = path.Split(prefix)
}
var scanPath = strings.Trim(basePath, "/")
if scanPath == "" {
scanPath = "."
}
_ = fs.WalkDir(it.fs, scanPath, func(filePath string, d fs.DirEntry, err error) error {
if err != nil {
// Unwrap error looking for common filesystem errors
for unwrap := err; unwrap != nil; unwrap = errors.Unwrap(unwrap) {
switch unwrap {
case syscall.EPERM:
return fs.SkipDir
}
}
return err
}
var relPath = strings.Trim(strings.TrimPrefix(filePath, scanPath), "/")
var objectPath = path.Join(basePath, relPath)
// It an iteration seek is requested,
// then ignore all objects until the object path equals the seek object
if shouldSkip {
if objectPath == it.seekObject {
shouldSkip = false
}
return nil
}
// Directory handling
if d.IsDir() {
// Inner directory pruning, as long as this path is not the root path
if filePath != scanPath {
// If entry is a directory, and an object prefix is set,
// Signal to WalkDir that this directory should be skipped if it doesn't have the prefix
if objectPrefix != "" && !strings.HasPrefix(relPath, objectPrefix) {
return fs.SkipDir
}
// If entry is a directory, but not the root, and the delimiter is "/"
// then we can skip this directory entirely, adding it to the common prefixes.
if delimiter == "/" {
it.prefixes[encodePath(objectPath+"/")] = struct{}{}
return fs.SkipDir
}
}
return nil
}
// If a delimiter is provided, check if this relpath contains the delimiter.
// If it does then don't add the object as a key, but instead add it to the list of common prefixes.
if delimiter != "" {
ix := strings.Index(relPath, delimiter)
if ix > -1 {
it.prefixes[encodePath(prefix+relPath[:ix])] = struct{}{}
return nil
}
}
// File is an object
// Ignore if it doesn't have the prefix
if objectPrefix != "" && !strings.HasPrefix(relPath, objectPrefix) {
return nil
}
fi, err := d.Info()
if err != nil {
return err
}
var urlEncodedObjectPath = objectPath
if objectKeyEncoding {
urlEncodedObjectPath = encodePath(urlEncodedObjectPath)
}
contents = append(contents, Contents{
LastModified: fi.ModTime().UTC(),
Size: int(fi.Size()),
Key: urlEncodedObjectPath,
})
canContinue := len(contents) < maxKeys
// If number of objects exceeds maxKeys then set result to be truncated.
// Continue from the next object after this one.
if !canContinue {
it.IsTruncated = true
it.Continue = objectPath
return errEndOfIteration
}
return nil
})
return contents, nil
}