mirror of
https://github.com/moby/moby.git
synced 2022-11-09 12:21:53 -05:00
pkg/archive:CopyTo(): fix for long dest filename
As reported in docker/for-linux/issues/484, since Docker 18.06 docker cp with a destination file name fails with the following error: > archive/tar: cannot encode header: Format specifies USTAR; and USTAR cannot encode Name="a_very_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx_long_filename_that_is_101_characters" The problem is caused by changes in Go 1.10 archive/tar, which mis-guesses the tar stream format as USTAR (rather than PAX), which, in turn, leads to inability to specify file names longer than 100 characters. This tar stream is sent by TarWithOptions() (which, since we switched to Go 1.10, explicitly sets format=PAX for every file, see FileInfoHeader(), and before Go 1.10 it was PAX by default). Unfortunately, the receiving side, RebaseArchiveEntries(), which calls tar.Next(), mistakenly guesses header format as USTAR, which leads to the above error. The fix is easy: set the format to PAX in RebaseArchiveEntries() where we read the tar stream and change the file name. A unit test is added to prevent future regressions. NOTE this code is not used by dockerd, but rather but docker cli (also possibly other clients), so this needs to be re-vendored to cli in order to take effect. Signed-off-by: Kir Kolyshkin <kolyshkin@gmail.com>
This commit is contained in:
parent
8d7889e510
commit
f55a4176fe
2 changed files with 32 additions and 0 deletions
|
@ -336,6 +336,14 @@ func RebaseArchiveEntries(srcContent io.Reader, oldBase, newBase string) io.Read
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// srcContent tar stream, as served by TarWithOptions(), is
|
||||||
|
// definitely in PAX format, but tar.Next() mistakenly guesses it
|
||||||
|
// as USTAR, which creates a problem: if the newBase is >100
|
||||||
|
// characters long, WriteHeader() returns an error like
|
||||||
|
// "archive/tar: cannot encode header: Format specifies USTAR; and USTAR cannot encode Name=...".
|
||||||
|
//
|
||||||
|
// To fix, set the format to PAX here. See docker/for-linux issue #484.
|
||||||
|
hdr.Format = tar.FormatPAX
|
||||||
hdr.Name = strings.Replace(hdr.Name, oldBase, newBase, 1)
|
hdr.Name = strings.Replace(hdr.Name, oldBase, newBase, 1)
|
||||||
if hdr.Typeflag == tar.TypeLink {
|
if hdr.Typeflag == tar.TypeLink {
|
||||||
hdr.Linkname = strings.Replace(hdr.Linkname, oldBase, newBase, 1)
|
hdr.Linkname = strings.Replace(hdr.Linkname, oldBase, newBase, 1)
|
||||||
|
|
|
@ -257,6 +257,30 @@ func TestCopyErrDstNotDir(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test to check if CopyTo works with a long (>100 characters) destination file name.
|
||||||
|
// This is a regression (see https://github.com/docker/for-linux/issues/484).
|
||||||
|
func TestCopyLongDstFilename(t *testing.T) {
|
||||||
|
const longName = "a_very_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx_long_filename_that_is_101_characters"
|
||||||
|
tmpDirA, tmpDirB := getTestTempDirs(t)
|
||||||
|
defer removeAllPaths(tmpDirA, tmpDirB)
|
||||||
|
|
||||||
|
// Load A with some sample files and directories.
|
||||||
|
createSampleDir(t, tmpDirA)
|
||||||
|
|
||||||
|
srcInfo := CopyInfo{Path: filepath.Join(tmpDirA, "file1"), Exists: true, IsDir: false}
|
||||||
|
|
||||||
|
content, err := TarResource(srcInfo)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error %T: %s", err, err)
|
||||||
|
}
|
||||||
|
defer content.Close()
|
||||||
|
|
||||||
|
err = CopyTo(content, srcInfo, filepath.Join(tmpDirB, longName))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error %T: %s", err, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Possibilities are reduced to the remaining 10 cases:
|
// Possibilities are reduced to the remaining 10 cases:
|
||||||
//
|
//
|
||||||
// case | srcIsDir | onlyDirContents | dstExists | dstIsDir | dstTrSep | action
|
// case | srcIsDir | onlyDirContents | dstExists | dstIsDir | dstTrSep | action
|
||||||
|
|
Loading…
Reference in a new issue