Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 263b010

Browse files
committedFeb 13, 2025·
pack: support .packignore
In some cases, there are files that may be useful, but should not be part of artifact. The ability to add files and directories to the .packignore file has been added, which allows you to ignore these files and directories when packing. Closes #812 @TarantoolBot document Title: `tt pack` support `.packignore` Use `.packignore` in the same way as `.gitignore` allows to exclude unnecessary files while preparing application package with `tt pack command.
1 parent 7e746c4 commit 263b010

File tree

5 files changed

+727
-12
lines changed

5 files changed

+727
-12
lines changed
 

‎CHANGELOG.md

+2
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
1010
### Added
1111

1212
- `tt pack `: added TCM file packaging.
13+
- `tt pack `: support `.packignore` file to specify files that should not be included
14+
in package (works the same as `.gitignore`).
1315

1416
### Changed
1517

‎cli/pack/common.go

+22-12
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package pack
22

33
import (
4+
"errors"
45
"fmt"
56
"io/fs"
67
"os"
@@ -33,6 +34,8 @@ const (
3334
versionLuaFileName = "VERSION.lua"
3435

3536
rocksManifestPath = ".rocks/share/tarantool/rocks/manifest"
37+
38+
ignoreFile = ".packignore"
3639
)
3740

3841
var (
@@ -51,6 +54,8 @@ var (
5154
}
5255
)
5356

57+
type skipFilter func(srcInfo os.FileInfo, src string) bool
58+
5459
type RocksVersions map[string][]string
5560

5661
// packFileInfo contains information to set for files/dirs in rpm/deb packages.
@@ -76,9 +81,8 @@ func skipDefaults(srcInfo os.FileInfo, src string) bool {
7681
}
7782

7883
// appArtifactsFilters returns a slice of skip functions to avoid copying application artifacts.
79-
func appArtifactsFilters(cliOpts *config.CliOpts, srcAppPath string) []func(
80-
srcInfo os.FileInfo, src string) bool {
81-
filters := make([]func(srcInfo os.FileInfo, src string) bool, 0)
84+
func appArtifactsFilters(cliOpts *config.CliOpts, srcAppPath string) []skipFilter {
85+
filters := make([]skipFilter, 0)
8286
if cliOpts.App == nil {
8387
return filters
8488
}
@@ -102,9 +106,8 @@ func appArtifactsFilters(cliOpts *config.CliOpts, srcAppPath string) []func(
102106
}
103107

104108
// ttEnvironmentFilters prepares a slice of filters for tt environment directories/files.
105-
func ttEnvironmentFilters(packCtx *PackCtx, cliOpts *config.CliOpts) []func(
106-
srcInfo os.FileInfo, src string) bool {
107-
filters := make([]func(srcInfo os.FileInfo, src string) bool, 0)
109+
func ttEnvironmentFilters(packCtx *PackCtx, cliOpts *config.CliOpts) []skipFilter {
110+
filters := make([]skipFilter, 0)
108111
if cliOpts == nil {
109112
return filters
110113
}
@@ -139,10 +142,9 @@ func ttEnvironmentFilters(packCtx *PackCtx, cliOpts *config.CliOpts) []func(
139142
}
140143

141144
// previousPackageFilters returns filters for the previously built packages.
142-
func previousPackageFilters(packCtx *PackCtx) []func(
143-
srcInfo os.FileInfo, src string) bool {
145+
func previousPackageFilters(packCtx *PackCtx) []skipFilter {
144146
pkgName := packCtx.Name
145-
return []func(srcInfo os.FileInfo, src string) bool{
147+
return []skipFilter{
146148
func(srcInfo os.FileInfo, src string) bool {
147149
name := srcInfo.Name()
148150
if strings.HasPrefix(name, pkgName) {
@@ -159,13 +161,18 @@ func previousPackageFilters(packCtx *PackCtx) []func(
159161

160162
// appSrcCopySkip returns a filter func to filter out artifacts paths.
161163
func appSrcCopySkip(packCtx *PackCtx, cliOpts *config.CliOpts,
162-
srcAppPath string) func(srcinfo os.FileInfo, src, dest string) (bool, error) {
164+
srcAppPath string) (func(srcinfo os.FileInfo, src, dest string) (bool, error), error) {
163165
appCopyFilters := appArtifactsFilters(cliOpts, srcAppPath)
164166
appCopyFilters = append(appCopyFilters, ttEnvironmentFilters(packCtx, cliOpts)...)
165167
appCopyFilters = append(appCopyFilters, previousPackageFilters(packCtx)...)
166168
appCopyFilters = append(appCopyFilters, func(srcInfo os.FileInfo, src string) bool {
167169
return skipDefaults(srcInfo, src)
168170
})
171+
if f, err := ignoreFilter(util.GetOsFS(), filepath.Join(srcAppPath, ignoreFile)); err == nil {
172+
appCopyFilters = append(appCopyFilters, f)
173+
} else if !errors.Is(err, fs.ErrNotExist) {
174+
return nil, fmt.Errorf("failed to load %q: %w", ignoreFile, err)
175+
}
169176

170177
return func(srcinfo os.FileInfo, src, dest string) (bool, error) {
171178
for _, shouldSkip := range appCopyFilters {
@@ -174,7 +181,7 @@ func appSrcCopySkip(packCtx *PackCtx, cliOpts *config.CliOpts,
174181
}
175182
}
176183
return false, nil
177-
}
184+
}, nil
178185
}
179186

180187
// getAppNamesToPack generates application names list to pack.
@@ -430,7 +437,10 @@ func copyAppSrc(packCtx *PackCtx, cliOpts *config.CliOpts, srcAppPath, dstAppPat
430437
return err
431438
}
432439

433-
skipFunc := appSrcCopySkip(packCtx, cliOpts, resolvedAppPath)
440+
skipFunc, err := appSrcCopySkip(packCtx, cliOpts, resolvedAppPath)
441+
if err != nil {
442+
return err
443+
}
434444

435445
// Copying application.
436446
log.Debugf("Copying application source %q -> %q", resolvedAppPath, dstAppPath)

‎cli/pack/ignore.go

+111
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
package pack
2+
3+
import (
4+
"bufio"
5+
"bytes"
6+
"fmt"
7+
"io/fs"
8+
"os"
9+
"path/filepath"
10+
"regexp"
11+
"slices"
12+
"strings"
13+
)
14+
15+
type ignorePattern struct {
16+
re *regexp.Regexp
17+
dirOnly bool
18+
isNegate bool
19+
}
20+
21+
func createIgnorePattern(pattern string, basepath string) (ignorePattern, error) {
22+
var p ignorePattern
23+
var err error
24+
25+
pattern, p.dirOnly = strings.CutSuffix(pattern, "/")
26+
pattern, p.isNegate = strings.CutPrefix(pattern, "!")
27+
28+
if !p.isNegate && (strings.HasPrefix(pattern, "\\!") || strings.HasPrefix(pattern, "\\#")) {
29+
pattern = pattern[1:]
30+
}
31+
32+
expr := pattern
33+
expr, found := strings.CutSuffix(expr, "/**")
34+
if found {
35+
expr = expr + "/([^/]+/)*[^/]*"
36+
}
37+
expr = strings.ReplaceAll(expr, "**/", "([^/]+/)*")
38+
expr = strings.ReplaceAll(expr, "*", "[^/]*")
39+
expr = strings.ReplaceAll(expr, "?", "[^/]")
40+
41+
basepath, _ = strings.CutSuffix(basepath, "/")
42+
if basepath == "." {
43+
basepath = ""
44+
}
45+
46+
if strings.Contains(pattern, "/") {
47+
expr = basepath + expr
48+
} else {
49+
expr = basepath + "/?([^/]+/)*" + expr
50+
}
51+
52+
p.re, err = regexp.Compile("^" + expr + "$")
53+
if err != nil {
54+
return ignorePattern{}, fmt.Errorf("failed to compile expression: %w", err)
55+
}
56+
57+
return p, nil
58+
}
59+
60+
// loadIgnorePatterns returns filter that excludes files based on the patterns.
61+
func loadIgnorePatterns(fsys fs.FS, ignoreFile string) ([]ignorePattern, error) {
62+
contents, err := fs.ReadFile(fsys, ignoreFile)
63+
if err != nil {
64+
return nil, err
65+
}
66+
67+
basepath := filepath.Dir(ignoreFile)
68+
69+
var patterns []ignorePattern
70+
s := bufio.NewScanner(bytes.NewReader(contents))
71+
for s.Scan() {
72+
pattern := strings.TrimSpace(s.Text())
73+
if pattern == "" || strings.HasPrefix(pattern, "#") {
74+
continue
75+
}
76+
77+
p, err := createIgnorePattern(pattern, basepath)
78+
if err != nil {
79+
return nil, err
80+
}
81+
82+
patterns = append(patterns, p)
83+
}
84+
return patterns, nil
85+
}
86+
87+
// ignoreFilter returns filter function that implements .gitignore approach of filtering files.
88+
func ignoreFilter(fsys fs.FS, patternsFile string) (skipFilter, error) {
89+
patterns, err := loadIgnorePatterns(fsys, patternsFile)
90+
if err != nil {
91+
return nil, err
92+
}
93+
94+
// According to .gitignore documentation "the last matching pattern decides the outcome"
95+
// so we need to iterate in reverse order until the first match.
96+
slices.Reverse(patterns)
97+
98+
return func(srcInfo os.FileInfo, src string) bool {
99+
// Skip ignore file itself.
100+
if src == patternsFile {
101+
return true
102+
}
103+
for _, p := range patterns {
104+
isApplicable := srcInfo.IsDir() || !p.dirOnly
105+
if isApplicable && p.re.MatchString(src) {
106+
return !p.isNegate
107+
}
108+
}
109+
return false
110+
}, nil
111+
}

‎cli/pack/ignore_test.go

+570
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,570 @@
1+
package pack
2+
3+
import (
4+
"errors"
5+
"io/fs"
6+
"os"
7+
"path"
8+
"path/filepath"
9+
"strings"
10+
"testing"
11+
"testing/fstest"
12+
13+
"github.com/apex/log"
14+
"github.com/otiai10/copy"
15+
"github.com/stretchr/testify/assert"
16+
)
17+
18+
func Test_ignoreFilter(t *testing.T) {
19+
type testFile struct {
20+
path string
21+
expected bool
22+
}
23+
24+
type testCase struct {
25+
name string
26+
patterns []string
27+
files []testFile
28+
}
29+
30+
createFS := func(tc testCase) fs.FS {
31+
fsys := fstest.MapFS{}
32+
if tc.patterns != nil {
33+
fsys[ignoreFile] = &fstest.MapFile{
34+
Data: []byte(strings.Join(tc.patterns, "\n")),
35+
Mode: fs.FileMode(0644),
36+
}
37+
}
38+
for _, file := range tc.files {
39+
fsys[file.path] = &fstest.MapFile{
40+
Mode: fs.FileMode(0644),
41+
}
42+
}
43+
return fsys
44+
}
45+
46+
runTest := func(t *testing.T, dst string, tc testCase) {
47+
fsys := createFS(tc)
48+
49+
filter, err := ignoreFilter(fsys, ignoreFile)
50+
assert.Nil(t, err)
51+
assert.NotNil(t, filter)
52+
53+
dst = filepath.Join(dst, tc.name)
54+
log.Infof("do_test: dst=%s", dst)
55+
err = os.MkdirAll(dst, 0755)
56+
if err != nil {
57+
assert.Nil(t, err)
58+
}
59+
60+
err = copy.Copy(".", dst, copy.Options{
61+
FS: fsys,
62+
Skip: func(srcinfo os.FileInfo, src, dest string) (bool, error) {
63+
return filter(srcinfo, src), nil
64+
},
65+
PermissionControl: copy.AddPermission(0755),
66+
})
67+
assert.Nil(t, err)
68+
for _, f := range tc.files {
69+
if f.expected {
70+
assert.NoFileExists(t, path.Join(dst, f.path))
71+
} else {
72+
assert.FileExists(t, path.Join(dst, f.path))
73+
}
74+
}
75+
}
76+
77+
runTestSet := func(t *testing.T, dst string, name string, testCases []testCase) {
78+
for _, tc := range testCases {
79+
t.Run(name+"_"+tc.name, func(t *testing.T) {
80+
runTest(t, filepath.Join(dst, name), tc)
81+
})
82+
}
83+
}
84+
85+
dst := t.TempDir()
86+
87+
// No ignore file.
88+
t.Run("no ignore file", func(t *testing.T) {
89+
f, err := ignoreFilter(fstest.MapFS{}, ignoreFile)
90+
assert.NotNil(t, err)
91+
assert.True(t, errors.Is(err, fs.ErrNotExist))
92+
assert.Nil(t, f)
93+
})
94+
95+
// Single ignore pattern.
96+
testCases_single := []testCase{
97+
{
98+
name: "simple",
99+
patterns: []string{
100+
"foo",
101+
},
102+
files: []testFile{
103+
{"foo", true},
104+
{"foo_blabla", false},
105+
{"blabla_foo", false},
106+
{"bla_foo_bla", false},
107+
{"bar", false},
108+
{"dir/foo", true},
109+
{"dir/foo_blabla", false},
110+
{"dir/blabla_foo", false},
111+
{"dir/bla_foo_bla", false},
112+
{"dir/bar", false},
113+
{"dir2/foo/bar", true},
114+
{"dir2/foo/blabla", true},
115+
},
116+
},
117+
{
118+
name: "question_prefix",
119+
patterns: []string{
120+
"?foo",
121+
},
122+
files: []testFile{
123+
{"foo", false},
124+
{".foo", true},
125+
{"2foo", true},
126+
{"foo_blabla", false},
127+
{"blabla_foo", false},
128+
{"bla_foo_bla", false},
129+
{"bar", false},
130+
{"dir/foo", false},
131+
{"dir/.foo", true},
132+
{"dir/2foo", true},
133+
{"dir/foo_blabla", false},
134+
{"dir/blabla_foo", false},
135+
{"dir/bla_foo_bla", false},
136+
{"dir/bar", false},
137+
{"dir2/2foo/bar", true},
138+
{"dir2/2foo/blabla", true},
139+
},
140+
},
141+
{
142+
name: "question_suffix",
143+
patterns: []string{
144+
"foo?",
145+
},
146+
files: []testFile{
147+
{"foo", false},
148+
{"foo.", true},
149+
{"foo2", true},
150+
{"foo_blabla", false},
151+
{"blabla_foo", false},
152+
{"bla_foo_bla", false},
153+
{"bar", false},
154+
{"dir/foo", false},
155+
{"dir/foo.", true},
156+
{"dir/foo2", true},
157+
{"dir/foo_blabla", false},
158+
{"dir/blabla_foo", false},
159+
{"dir/bla_foo_bla", false},
160+
{"dir/bar", false},
161+
{"dir2/foo2/bar", true},
162+
{"dir2/foo2/blabla", true},
163+
},
164+
},
165+
{
166+
name: "question",
167+
patterns: []string{
168+
"f?o",
169+
},
170+
files: []testFile{
171+
{"foo", true},
172+
{"f2o", true},
173+
{"fooo", false},
174+
{"2foo", false},
175+
{"foo_blabla", false},
176+
{"blabla_foo", false},
177+
{"bla_foo_bla", false},
178+
{"bar", false},
179+
{"dir/foo", true},
180+
{"dir/f2o", true},
181+
{"dir/fooo", false},
182+
{"dir/2foo", false},
183+
{"dir/foo_blabla", false},
184+
{"dir/blabla_foo", false},
185+
{"dir/bla_foo_bla", false},
186+
{"dir/bar", false},
187+
{"dir2/f2o/bar", true},
188+
{"dir2/f2o/blabla", true},
189+
},
190+
},
191+
{
192+
name: "asterisk_prefix",
193+
patterns: []string{
194+
"*foo",
195+
},
196+
files: []testFile{
197+
{"foo", true},
198+
{".foo", true},
199+
{"foo_blabla", false},
200+
{"blabla_foo", true},
201+
{"bla_foo_bla", false},
202+
{"bar", false},
203+
{"dir/foo", true},
204+
{"dir/.foo", true},
205+
{"dir/foo_blabla", false},
206+
{"dir/blabla_foo", true},
207+
{"dir/bla_foo_bla", false},
208+
{"dir/bar", false},
209+
{"dir2/blabla_foo/bar", true},
210+
{"dir2/blabla_foo/blabla", true},
211+
},
212+
},
213+
{
214+
name: "asterisk_suffix",
215+
patterns: []string{
216+
"foo*",
217+
},
218+
files: []testFile{
219+
{"foo", true},
220+
{"foo.", true},
221+
{"foo_blabla", true},
222+
{"blabla_foo", false},
223+
{"bla_foo_bla", false},
224+
{"bar", false},
225+
{"dir/foo", true},
226+
{"dir/foo.", true},
227+
{"dir/foo_blabla", true},
228+
{"dir/blabla_foo", false},
229+
{"dir/bla_foo_bla", false},
230+
{"dir/bar", false},
231+
{"dir2/foo_blabla/bar", true},
232+
{"dir2/foo_blabla/blabla", true},
233+
},
234+
},
235+
{
236+
name: "asterisk",
237+
patterns: []string{
238+
"f*o",
239+
},
240+
files: []testFile{
241+
{"fo", true},
242+
{"foo", true},
243+
{"foo_blabla_foo", true},
244+
{"foo_blabla", false},
245+
{"blabla_foo", false},
246+
{"bla_foo_bla", false},
247+
{"bar", false},
248+
{"dir/fo", true},
249+
{"dir/foo", true},
250+
{"dir/foo_blabla_foo", true},
251+
{"dir/foo_blabla", false},
252+
{"dir/blabla_foo", false},
253+
{"dir/bla_foo_bla", false},
254+
{"dir/bar", false},
255+
{"dir2/foo_blabla_foo/bar", true},
256+
{"dir2/foo_blabla_foo/blabla", true},
257+
},
258+
},
259+
{
260+
name: "range",
261+
patterns: []string{
262+
"f[m-p]o",
263+
},
264+
files: []testFile{
265+
{"foo", true},
266+
{"fmo", true},
267+
{"f2o", false},
268+
{"fo", false},
269+
{"foo_blabla", false},
270+
{"blabla_foo", false},
271+
{"bla_foo_bla", false},
272+
{"bar", false},
273+
{"dir/foo", true},
274+
{"dir/fmo", true},
275+
{"dir/f2o", false},
276+
{"dir/fo", false},
277+
{"dir/foo_blabla", false},
278+
{"dir/blabla_foo", false},
279+
{"dir/bla_foo_bla", false},
280+
{"dir/bar", false},
281+
{"dir2/foo/bar", true},
282+
{"dir2/foo/blabla", true},
283+
},
284+
},
285+
{
286+
name: "range_exclude",
287+
patterns: []string{
288+
"[^a-eg-z]oo",
289+
},
290+
files: []testFile{
291+
{"foo", true},
292+
{"goo", false},
293+
{"foo_blabla", false},
294+
{"blabla_foo", false},
295+
{"bla_foo_bla", false},
296+
{"bar", false},
297+
{"dir/foo", true},
298+
{"dir/goo", false},
299+
{"dir/foo_blabla", false},
300+
{"dir/blabla_foo", false},
301+
{"dir/bla_foo_bla", false},
302+
{"dir/bar", false},
303+
{"dir2/foo/bar", true},
304+
{"dir2/foo/blabla", true},
305+
},
306+
},
307+
{
308+
name: "mix",
309+
patterns: []string{
310+
"?o[2ox]*",
311+
},
312+
files: []testFile{
313+
{"foo", true},
314+
{"foo_blabla", true},
315+
{"blabla_foo", false},
316+
{"bla_foo_bla", false},
317+
{"bar", false},
318+
{"dir/foo", true},
319+
{"dir/foo_blabla", true},
320+
{"dir/blabla_foo", false},
321+
{"dir/bla_foo_bla", false},
322+
{"dir/bar", false},
323+
{"dir2/go2_blabla/bar", true},
324+
{"dir2/go2_blabla/blabla", true},
325+
},
326+
},
327+
{
328+
name: "dir_simple",
329+
patterns: []string{
330+
"dir/",
331+
},
332+
files: []testFile{
333+
{"foo", false},
334+
{"dir/foo", true},
335+
{"2dir/foo", false},
336+
{"dir2/foo", false},
337+
},
338+
},
339+
{
340+
name: "dir_question_prefix",
341+
patterns: []string{
342+
"?dir/",
343+
},
344+
files: []testFile{
345+
{"foo", false},
346+
{"dir/foo", false},
347+
{"2dir/foo", true},
348+
{"22dir/foo", false},
349+
{"dir2/foo", false},
350+
},
351+
},
352+
{
353+
name: "dir_question_suffix",
354+
patterns: []string{
355+
"dir?/",
356+
},
357+
files: []testFile{
358+
{"foo", false},
359+
{"dir/foo", false},
360+
{"2dir/foo", false},
361+
{"dir2/foo", true},
362+
{"dir22/foo", false},
363+
},
364+
},
365+
{
366+
name: "dir_question",
367+
patterns: []string{
368+
"d?r/",
369+
},
370+
files: []testFile{
371+
{"foo", false},
372+
{"dir/foo", true},
373+
{"d2r/foo", true},
374+
{"d22r/foo", false},
375+
{"2dir/foo", false},
376+
{"dir2/foo", false},
377+
},
378+
},
379+
{
380+
name: "dir_asterisk_prefix",
381+
patterns: []string{
382+
"*dir/",
383+
},
384+
files: []testFile{
385+
{"foo", false},
386+
{"dir/foo", true},
387+
{"2dir/foo", true},
388+
{"22dir/foo", true},
389+
{"dir2/foo", false},
390+
},
391+
},
392+
{
393+
name: "dir_asterisk_suffix",
394+
patterns: []string{
395+
"dir*/",
396+
},
397+
files: []testFile{
398+
{"foo", false},
399+
{"dir/foo", true},
400+
{"2dir/foo", false},
401+
{"dir2/foo", true},
402+
{"dir22/foo", true},
403+
},
404+
},
405+
{
406+
name: "dir_asterisk",
407+
patterns: []string{
408+
"d*r/",
409+
},
410+
files: []testFile{
411+
{"foo", false},
412+
{"dir/foo", true},
413+
{"d2r/foo", true},
414+
{"d22r/foo", true},
415+
{"2dir/foo", false},
416+
{"dir2/foo", false},
417+
},
418+
},
419+
}
420+
runTestSet(t, dst, "singlepattern", testCases_single)
421+
422+
// An ignore pattern followed with the same but negated (started with '!').
423+
var testCases_selfNegate []testCase
424+
for _, tc := range testCases_single {
425+
var files []testFile
426+
for _, f := range tc.files {
427+
files = append(files, testFile{f.path, false})
428+
}
429+
testCases_selfNegate = append(testCases_selfNegate, testCase{
430+
name: tc.name,
431+
patterns: []string{tc.patterns[0], "!" + tc.patterns[0]},
432+
files: files,
433+
})
434+
}
435+
runTestSet(t, dst, "singlepattern_and_selfnegate", testCases_selfNegate)
436+
437+
// An ignore pattern preceded with the same but negated (thus negated one has no effect).
438+
var testCases_selfNegateWrongOrder []testCase
439+
for i, tc := range testCases_selfNegate {
440+
testCases_selfNegateWrongOrder = append(testCases_selfNegateWrongOrder, testCase{
441+
name: tc.name,
442+
patterns: []string{tc.patterns[1], tc.patterns[0]},
443+
files: testCases_single[i].files,
444+
})
445+
}
446+
runTestSet(t, dst, "singlepattern_and_selfnegate_wrong_order", testCases_selfNegateWrongOrder)
447+
448+
// Single ignore pattern that is negate.
449+
var testCases_negateOnly []testCase
450+
for _, tc := range testCases_selfNegate {
451+
testCases_negateOnly = append(testCases_negateOnly, testCase{
452+
name: tc.name,
453+
patterns: []string{tc.patterns[1]},
454+
files: tc.files,
455+
})
456+
}
457+
runTestSet(t, dst, "singlepattern_negate", testCases_negateOnly)
458+
459+
// Multiple patterns.
460+
testCases_multi := []testCase{
461+
{
462+
name: "simple",
463+
patterns: []string{
464+
"foo",
465+
"dir",
466+
},
467+
files: []testFile{
468+
{"foo", true},
469+
{"foo_blabla", false},
470+
{"blabla_foo", false},
471+
{"bla_foo_bla", false},
472+
{"bar", false},
473+
{"dir/foo", true},
474+
{"dir/foo_blabla", false},
475+
{"dir/blabla_foo", false},
476+
{"dir/bla_foo_bla", false},
477+
{"dir/bar", false},
478+
},
479+
},
480+
{
481+
name: "question",
482+
patterns: []string{
483+
"?foo",
484+
"?bla?",
485+
},
486+
files: []testFile{
487+
{"foo", false},
488+
{".foo", true},
489+
{"2foo", true},
490+
{"foo_blabla", false},
491+
{"blabla_foo", false},
492+
{"bla_foo_bla", false},
493+
{"bar", true},
494+
{"dir/foo", false},
495+
{"dir/.foo", true},
496+
{"dir/2foo", true},
497+
{"dir/foo_blabla", false},
498+
{"dir/blabla_foo", false},
499+
{"dir/bla_foo_bla", false},
500+
{"dir/bar", true},
501+
},
502+
},
503+
{
504+
name: "asterisk",
505+
patterns: []string{
506+
"foo*",
507+
"*bla*",
508+
},
509+
files: []testFile{
510+
{"foo", false},
511+
{"foo.", true},
512+
{"foo2", true},
513+
{"foo_blabla", false},
514+
{"blabla_foo", false},
515+
{"bla_foo_bla", false},
516+
{"bar", true},
517+
{"dir/foo", false},
518+
{"dir/foo.", true},
519+
{"dir/foo2", true},
520+
{"dir/foo_blabla", false},
521+
{"dir/blabla_foo", false},
522+
{"dir/bla_foo_bla", false},
523+
{"dir/bar", true},
524+
},
525+
},
526+
{
527+
name: "range",
528+
patterns: []string{
529+
"bar",
530+
"*foo",
531+
},
532+
files: []testFile{
533+
{"foo", true},
534+
{".foo", true},
535+
{"foo_blabla", false},
536+
{"blabla_foo", true},
537+
{"bla_foo_bla", false},
538+
{"bar", true},
539+
{"dir/foo", true},
540+
{"dir/.foo", true},
541+
{"dir/foo_blabla", false},
542+
{"dir/blabla_foo", true},
543+
{"dir/bla_foo_bla", false},
544+
{"dir/bar", true},
545+
},
546+
},
547+
{
548+
name: "asterisk_suffix",
549+
patterns: []string{
550+
"bar",
551+
"foo*",
552+
},
553+
files: []testFile{
554+
{"foo", true},
555+
{"foo.", true},
556+
{"foo_blabla", true},
557+
{"blabla_foo", false},
558+
{"bla_foo_bla", false},
559+
{"bar", true},
560+
{"dir/foo", true},
561+
{"dir/foo.", true},
562+
{"dir/foo_blabla", true},
563+
{"dir/blabla_foo", false},
564+
{"dir/bla_foo_bla", false},
565+
{"dir/bar", true},
566+
},
567+
},
568+
}
569+
runTestSet(t, dst, "multipatterns", testCases_multi)
570+
}

‎cli/util/osfs.go

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
package util
2+
3+
import (
4+
"io/fs"
5+
"os"
6+
)
7+
8+
type osFS struct{}
9+
10+
var osfs osFS
11+
12+
func GetOsFS() fs.FS {
13+
return osfs
14+
}
15+
16+
func (fs osFS) Open(name string) (fs.File, error) {
17+
return os.Open(name)
18+
}
19+
20+
func (fs osFS) ReadFile(name string) ([]byte, error) {
21+
return os.ReadFile(name)
22+
}

0 commit comments

Comments
 (0)
Please sign in to comment.