2017-03-07 04:39:22 +08:00
|
|
|
package buildah
|
|
|
|
|
|
|
|
import (
|
|
|
|
"io"
|
|
|
|
"net/http"
|
2017-03-08 02:26:17 +08:00
|
|
|
"net/url"
|
2017-03-07 04:39:22 +08:00
|
|
|
"os"
|
|
|
|
"path"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
2017-07-17 22:42:58 +08:00
|
|
|
"syscall"
|
2017-03-28 15:01:59 +08:00
|
|
|
"time"
|
2017-03-07 04:39:22 +08:00
|
|
|
|
2019-03-25 18:23:56 +08:00
|
|
|
"github.com/containers/buildah/pkg/chrootuser"
|
2018-09-18 03:20:16 +08:00
|
|
|
"github.com/containers/buildah/util"
|
2017-03-07 04:39:22 +08:00
|
|
|
"github.com/containers/storage/pkg/archive"
|
2019-05-30 07:56:28 +08:00
|
|
|
"github.com/containers/storage/pkg/fileutils"
|
2018-03-17 05:19:29 +08:00
|
|
|
"github.com/containers/storage/pkg/idtools"
|
2017-11-30 22:34:02 +08:00
|
|
|
"github.com/opencontainers/runtime-spec/specs-go"
|
2017-06-02 03:23:02 +08:00
|
|
|
"github.com/pkg/errors"
|
2017-10-10 03:05:56 +08:00
|
|
|
"github.com/sirupsen/logrus"
|
2017-03-07 04:39:22 +08:00
|
|
|
)
|
|
|
|
|
2018-06-08 22:52:52 +08:00
|
|
|
// AddAndCopyOptions holds options for add and copy commands.
|
2017-11-30 22:34:02 +08:00
|
|
|
type AddAndCopyOptions struct {
|
2018-06-08 22:52:52 +08:00
|
|
|
// Chown is a spec for the user who should be given ownership over the
|
|
|
|
// newly-added content, potentially overriding permissions which would
|
|
|
|
// otherwise match those of local files and directories being copied.
|
2017-11-30 22:34:02 +08:00
|
|
|
Chown string
|
2018-06-08 22:52:52 +08:00
|
|
|
// All of the data being copied will pass through Hasher, if set.
|
|
|
|
// If the sources are URLs or files, their contents will be passed to
|
|
|
|
// Hasher.
|
|
|
|
// If the sources include directory trees, Hasher will be passed
|
|
|
|
// tar-format archives of the directory trees.
|
|
|
|
Hasher io.Writer
|
2019-05-30 12:22:53 +08:00
|
|
|
// Excludes is the contents of the .dockerignore file
|
2019-03-20 02:28:54 +08:00
|
|
|
Excludes []string
|
2019-06-08 06:02:50 +08:00
|
|
|
// ContextDir is the base directory for Excludes for content being copied
|
2019-03-20 02:28:54 +08:00
|
|
|
ContextDir string
|
2019-05-30 12:22:53 +08:00
|
|
|
// ID mapping options to use when contents to be copied are part of
|
|
|
|
// another container, and need ownerships to be mapped from the host to
|
|
|
|
// that container's values before copying them into the container.
|
|
|
|
IDMappingOptions *IDMappingOptions
|
2019-06-08 06:02:50 +08:00
|
|
|
// DryRun indicates that the content should be digested, but not actually
|
|
|
|
// copied into the container.
|
|
|
|
DryRun bool
|
2017-11-30 22:34:02 +08:00
|
|
|
}
|
|
|
|
|
2017-04-04 01:43:34 +08:00
|
|
|
// addURL copies the contents of the source URL to the destination. This is
|
2017-03-07 04:39:22 +08:00
|
|
|
// its own function so that deferred closes happen after we're done pulling
|
|
|
|
// down each item of potentially many.
|
2019-08-10 04:21:24 +08:00
|
|
|
func (b *Builder) addURL(destination, srcurl string, owner idtools.IDPair, hasher io.Writer, dryRun bool) error {
|
2017-03-07 04:39:22 +08:00
|
|
|
resp, err := http.Get(srcurl)
|
|
|
|
if err != nil {
|
2017-06-02 03:23:02 +08:00
|
|
|
return errors.Wrapf(err, "error getting %q", srcurl)
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
2019-06-08 06:02:50 +08:00
|
|
|
|
2019-08-10 04:21:24 +08:00
|
|
|
thisHasher := hasher
|
|
|
|
if thisHasher != nil && b.ContentDigester.Hash() != nil {
|
|
|
|
thisHasher = io.MultiWriter(thisHasher, b.ContentDigester.Hash())
|
|
|
|
}
|
|
|
|
if thisHasher == nil {
|
|
|
|
thisHasher = b.ContentDigester.Hash()
|
|
|
|
}
|
|
|
|
thisWriter := thisHasher
|
2019-06-08 06:02:50 +08:00
|
|
|
|
|
|
|
if !dryRun {
|
|
|
|
logrus.Debugf("saving %q to %q", srcurl, destination)
|
|
|
|
f, err := os.Create(destination)
|
|
|
|
if err != nil {
|
|
|
|
return errors.Wrapf(err, "error creating %q", destination)
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
if err = f.Chown(owner.UID, owner.GID); err != nil {
|
|
|
|
return errors.Wrapf(err, "error setting owner of %q to %d:%d", destination, owner.UID, owner.GID)
|
|
|
|
}
|
|
|
|
if last := resp.Header.Get("Last-Modified"); last != "" {
|
|
|
|
if mtime, err2 := time.Parse(time.RFC1123, last); err2 != nil {
|
|
|
|
logrus.Debugf("error parsing Last-Modified time %q: %v", last, err2)
|
|
|
|
} else {
|
|
|
|
defer func() {
|
|
|
|
if err3 := os.Chtimes(destination, time.Now(), mtime); err3 != nil {
|
|
|
|
logrus.Debugf("error setting mtime on %q to Last-Modified time %q: %v", destination, last, err3)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
if err2 := f.Chmod(0600); err2 != nil {
|
|
|
|
logrus.Debugf("error setting permissions on %q: %v", destination, err2)
|
|
|
|
}
|
|
|
|
}()
|
2019-08-10 04:21:24 +08:00
|
|
|
thisWriter = io.MultiWriter(f, thisWriter)
|
2017-03-28 15:01:59 +08:00
|
|
|
}
|
2019-08-10 04:21:24 +08:00
|
|
|
|
2019-06-08 06:02:50 +08:00
|
|
|
n, err := io.Copy(thisWriter, resp.Body)
|
2017-04-04 05:44:23 +08:00
|
|
|
if err != nil {
|
2018-10-03 22:05:46 +08:00
|
|
|
return errors.Wrapf(err, "error reading contents for %q from %q", destination, srcurl)
|
2017-04-04 05:44:23 +08:00
|
|
|
}
|
2017-03-07 04:39:22 +08:00
|
|
|
if resp.ContentLength >= 0 && n != resp.ContentLength {
|
2018-10-03 22:05:46 +08:00
|
|
|
return errors.Errorf("error reading contents for %q from %q: wrong length (%d != %d)", destination, srcurl, n, resp.ContentLength)
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-03-24 01:48:23 +08:00
|
|
|
// Add copies the contents of the specified sources into the container's root
|
|
|
|
// filesystem, optionally extracting contents of local files that look like
|
|
|
|
// non-empty archives.
|
2017-11-30 22:34:02 +08:00
|
|
|
func (b *Builder) Add(destination string, extract bool, options AddAndCopyOptions, source ...string) error {
|
2019-05-30 07:56:28 +08:00
|
|
|
excludes, err := dockerIgnoreMatcher(options.Excludes, options.ContextDir)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-11-22 21:57:31 +08:00
|
|
|
mountPoint, err := b.Mount(b.MountLabel)
|
2017-03-24 01:48:23 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
2017-03-24 01:48:23 +08:00
|
|
|
defer func() {
|
|
|
|
if err2 := b.Unmount(); err2 != nil {
|
|
|
|
logrus.Errorf("error unmounting container: %v", err2)
|
|
|
|
}
|
|
|
|
}()
|
2018-01-12 18:54:30 +08:00
|
|
|
// Find out which user (and group) the destination should belong to.
|
2019-05-16 06:40:15 +08:00
|
|
|
user, _, err := b.user(mountPoint, options.Chown)
|
2018-01-12 18:54:30 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2018-03-17 05:19:29 +08:00
|
|
|
containerOwner := idtools.IDPair{UID: int(user.UID), GID: int(user.GID)}
|
2018-06-07 23:25:47 +08:00
|
|
|
hostUID, hostGID, err := util.GetHostIDs(b.IDMappingOptions.UIDMap, b.IDMappingOptions.GIDMap, user.UID, user.GID)
|
2018-03-17 05:19:29 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
hostOwner := idtools.IDPair{UID: int(hostUID), GID: int(hostGID)}
|
2017-03-24 01:48:23 +08:00
|
|
|
dest := mountPoint
|
2019-06-08 06:02:50 +08:00
|
|
|
if !options.DryRun {
|
|
|
|
// Resolve the destination if it was specified as a relative path.
|
|
|
|
if destination != "" && filepath.IsAbs(destination) {
|
|
|
|
dir := filepath.Dir(destination)
|
|
|
|
if dir != "." && dir != "/" {
|
|
|
|
if err = idtools.MkdirAllAndChownNew(filepath.Join(dest, dir), 0755, hostOwner); err != nil {
|
|
|
|
return errors.Wrapf(err, "error creating directory %q", filepath.Join(dest, dir))
|
|
|
|
}
|
2019-05-26 20:56:13 +08:00
|
|
|
}
|
2019-06-08 06:02:50 +08:00
|
|
|
dest = filepath.Join(dest, destination)
|
|
|
|
} else {
|
|
|
|
if err = idtools.MkdirAllAndChownNew(filepath.Join(dest, b.WorkDir()), 0755, hostOwner); err != nil {
|
|
|
|
return errors.Wrapf(err, "error creating directory %q", filepath.Join(dest, b.WorkDir()))
|
|
|
|
}
|
|
|
|
dest = filepath.Join(dest, b.WorkDir(), destination)
|
2019-05-26 20:56:13 +08:00
|
|
|
}
|
2019-06-08 06:02:50 +08:00
|
|
|
// If the destination was explicitly marked as a directory by ending it
|
|
|
|
// with a '/', create it so that we can be sure that it's a directory,
|
|
|
|
// and any files we're copying will be placed in the directory.
|
|
|
|
if len(destination) > 0 && destination[len(destination)-1] == os.PathSeparator {
|
|
|
|
if err = idtools.MkdirAllAndChownNew(dest, 0755, hostOwner); err != nil {
|
|
|
|
return errors.Wrapf(err, "error creating directory %q", dest)
|
|
|
|
}
|
2017-03-24 01:47:07 +08:00
|
|
|
}
|
2019-06-08 06:02:50 +08:00
|
|
|
// Make sure the destination's parent directory is usable.
|
|
|
|
if destpfi, err2 := os.Stat(filepath.Dir(dest)); err2 == nil && !destpfi.IsDir() {
|
|
|
|
return errors.Errorf("%q already exists, but is not a subdirectory)", filepath.Dir(dest))
|
2017-03-28 03:35:09 +08:00
|
|
|
}
|
|
|
|
}
|
2017-03-24 01:49:38 +08:00
|
|
|
// Now look at the destination itself.
|
|
|
|
destfi, err := os.Stat(dest)
|
|
|
|
if err != nil {
|
|
|
|
if !os.IsNotExist(err) {
|
2017-06-02 03:23:02 +08:00
|
|
|
return errors.Wrapf(err, "couldn't determine what %q is", dest)
|
2017-03-24 01:49:38 +08:00
|
|
|
}
|
|
|
|
destfi = nil
|
|
|
|
}
|
2017-05-24 03:00:57 +08:00
|
|
|
if len(source) > 1 && (destfi == nil || !destfi.IsDir()) {
|
2017-06-03 00:17:27 +08:00
|
|
|
return errors.Errorf("destination %q is not a directory", dest)
|
2017-03-08 02:26:17 +08:00
|
|
|
}
|
2019-06-08 06:02:50 +08:00
|
|
|
copyFileWithTar := b.copyFileWithTar(options.IDMappingOptions, &containerOwner, options.Hasher, options.DryRun)
|
|
|
|
copyWithTar := b.copyWithTar(options.IDMappingOptions, &containerOwner, options.Hasher, options.DryRun)
|
|
|
|
untarPath := b.untarPath(nil, options.Hasher, options.DryRun)
|
2019-08-10 04:21:24 +08:00
|
|
|
err = b.addHelper(excludes, extract, dest, destfi, hostOwner, options, copyFileWithTar, copyWithTar, untarPath, source...)
|
2019-03-20 02:28:54 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// user returns the user (and group) information which the destination should belong to.
|
2019-05-16 06:40:15 +08:00
|
|
|
func (b *Builder) user(mountPoint string, userspec string) (specs.User, string, error) {
|
2019-03-20 02:28:54 +08:00
|
|
|
if userspec == "" {
|
|
|
|
userspec = b.User()
|
|
|
|
}
|
|
|
|
|
2019-05-16 06:40:15 +08:00
|
|
|
uid, gid, homeDir, err := chrootuser.GetUser(mountPoint, userspec)
|
2019-03-20 02:28:54 +08:00
|
|
|
u := specs.User{
|
|
|
|
UID: uid,
|
|
|
|
GID: gid,
|
|
|
|
Username: userspec,
|
|
|
|
}
|
|
|
|
if !strings.Contains(userspec, ":") {
|
|
|
|
groups, err2 := chrootuser.GetAdditionalGroupsForUser(mountPoint, uint64(u.UID))
|
|
|
|
if err2 != nil {
|
|
|
|
if errors.Cause(err2) != chrootuser.ErrNoSuchUser && err == nil {
|
|
|
|
err = err2
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
u.AdditionalGids = groups
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2019-05-16 06:40:15 +08:00
|
|
|
return u, homeDir, err
|
2019-03-20 02:28:54 +08:00
|
|
|
}
|
|
|
|
|
2019-05-30 07:56:28 +08:00
|
|
|
// dockerIgnoreMatcher returns a matcher based on the contents of the .dockerignore file under contextDir
|
|
|
|
func dockerIgnoreMatcher(lines []string, contextDir string) (*fileutils.PatternMatcher, error) {
|
|
|
|
// if there's no context dir, there's no .dockerignore file to consult
|
|
|
|
if contextDir == "" {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
patterns := []string{".dockerignore"}
|
|
|
|
for _, ignoreSpec := range lines {
|
|
|
|
ignoreSpec = strings.TrimSpace(ignoreSpec)
|
|
|
|
// ignore comments passed back from .dockerignore
|
|
|
|
if ignoreSpec == "" || ignoreSpec[0] == '#' {
|
2019-03-20 02:28:54 +08:00
|
|
|
continue
|
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
// if the spec starts with '!' it means the pattern
|
|
|
|
// should be included. make a note so that we can move
|
|
|
|
// it to the front of the updated pattern
|
|
|
|
includeFlag := ""
|
|
|
|
if strings.HasPrefix(ignoreSpec, "!") {
|
|
|
|
includeFlag = "!"
|
|
|
|
ignoreSpec = ignoreSpec[1:]
|
2019-03-20 02:28:54 +08:00
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
if ignoreSpec == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
patterns = append(patterns, includeFlag+filepath.Join(contextDir, ignoreSpec))
|
2019-03-20 02:28:54 +08:00
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
// if there are no patterns, save time by not constructing the object
|
|
|
|
if len(patterns) == 0 {
|
|
|
|
return nil, nil
|
2019-03-20 02:28:54 +08:00
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
// return a matcher object
|
|
|
|
matcher, err := fileutils.NewPatternMatcher(patterns)
|
2019-04-04 04:32:12 +08:00
|
|
|
if err != nil {
|
2019-05-30 07:56:28 +08:00
|
|
|
return nil, errors.Wrapf(err, "error creating file matcher using patterns %v", patterns)
|
2019-04-04 04:32:12 +08:00
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
return matcher, nil
|
|
|
|
}
|
|
|
|
|
2019-08-10 04:21:24 +08:00
|
|
|
func (b *Builder) addHelper(excludes *fileutils.PatternMatcher, extract bool, dest string, destfi os.FileInfo, hostOwner idtools.IDPair, options AddAndCopyOptions, copyFileWithTar, copyWithTar, untarPath func(src, dest string) error, source ...string) error {
|
|
|
|
for n, src := range source {
|
2017-03-07 04:39:22 +08:00
|
|
|
if strings.HasPrefix(src, "http://") || strings.HasPrefix(src, "https://") {
|
2019-08-10 04:21:24 +08:00
|
|
|
b.ContentDigester.Start("")
|
2017-03-07 04:39:22 +08:00
|
|
|
// We assume that source is a file, and we're copying
|
2017-03-28 15:02:41 +08:00
|
|
|
// it to the destination. If the destination is
|
|
|
|
// already a directory, create a file inside of it.
|
|
|
|
// Otherwise, the destination is the file to which
|
|
|
|
// we'll save the contents.
|
2017-03-08 02:26:17 +08:00
|
|
|
url, err := url.Parse(src)
|
|
|
|
if err != nil {
|
2017-06-02 03:23:02 +08:00
|
|
|
return errors.Wrapf(err, "error parsing URL %q", src)
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
2017-03-24 01:49:38 +08:00
|
|
|
d := dest
|
2017-05-24 03:00:57 +08:00
|
|
|
if destfi != nil && destfi.IsDir() {
|
2017-03-24 01:49:38 +08:00
|
|
|
d = filepath.Join(dest, path.Base(url.Path))
|
|
|
|
}
|
2019-08-10 04:21:24 +08:00
|
|
|
if err = b.addURL(d, src, hostOwner, options.Hasher, options.DryRun); err != nil {
|
2017-11-30 22:34:02 +08:00
|
|
|
return err
|
|
|
|
}
|
2017-03-07 04:39:22 +08:00
|
|
|
continue
|
|
|
|
}
|
2017-07-17 22:42:58 +08:00
|
|
|
|
|
|
|
glob, err := filepath.Glob(src)
|
2017-03-07 04:39:22 +08:00
|
|
|
if err != nil {
|
2017-07-17 22:42:58 +08:00
|
|
|
return errors.Wrapf(err, "invalid glob %q", src)
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
2017-07-17 22:42:58 +08:00
|
|
|
if len(glob) == 0 {
|
|
|
|
return errors.Wrapf(syscall.ENOENT, "no files found matching %q", src)
|
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
|
2017-07-17 22:42:58 +08:00
|
|
|
for _, gsrc := range glob {
|
2018-09-12 21:28:38 +08:00
|
|
|
esrc, err := filepath.EvalSymlinks(gsrc)
|
2017-07-17 22:42:58 +08:00
|
|
|
if err != nil {
|
2018-09-12 21:28:38 +08:00
|
|
|
return errors.Wrapf(err, "error evaluating symlinks %q", gsrc)
|
|
|
|
}
|
|
|
|
srcfi, err := os.Stat(esrc)
|
|
|
|
if err != nil {
|
|
|
|
return errors.Wrapf(err, "error reading %q", esrc)
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
2017-07-17 22:42:58 +08:00
|
|
|
if srcfi.IsDir() {
|
2019-08-10 04:21:24 +08:00
|
|
|
b.ContentDigester.Start("dir")
|
2017-07-17 22:42:58 +08:00
|
|
|
// The source is a directory, so copy the contents of
|
|
|
|
// the source directory into the target directory. Try
|
|
|
|
// to create it first, so that if there's a problem,
|
|
|
|
// we'll discover why that won't work.
|
2019-06-08 06:02:50 +08:00
|
|
|
if !options.DryRun {
|
|
|
|
if err = idtools.MkdirAllAndChownNew(dest, 0755, hostOwner); err != nil {
|
|
|
|
return errors.Wrapf(err, "error creating directory %q", dest)
|
|
|
|
}
|
2017-07-17 22:42:58 +08:00
|
|
|
}
|
2019-08-10 04:21:24 +08:00
|
|
|
logrus.Debugf("copying[%d] %q to %q", n, esrc+string(os.PathSeparator)+"*", dest+string(os.PathSeparator)+"*")
|
2019-07-12 16:55:01 +08:00
|
|
|
if excludes == nil || !excludes.Exclusions() {
|
2019-03-20 02:28:54 +08:00
|
|
|
if err = copyWithTar(esrc, dest); err != nil {
|
|
|
|
return errors.Wrapf(err, "error copying %q to %q", esrc, dest)
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
err := filepath.Walk(esrc, func(path string, info os.FileInfo, err error) error {
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
skip, err := excludes.Matches(path)
|
|
|
|
if err != nil {
|
|
|
|
return errors.Wrapf(err, "error checking if %s is an excluded path", path)
|
|
|
|
}
|
|
|
|
if skip {
|
|
|
|
return nil
|
2019-03-20 02:28:54 +08:00
|
|
|
}
|
2019-06-08 03:59:39 +08:00
|
|
|
// combine the source's basename with the dest directory
|
2019-05-09 03:59:27 +08:00
|
|
|
fpath, err := filepath.Rel(esrc, path)
|
|
|
|
if err != nil {
|
|
|
|
return errors.Wrapf(err, "error converting %s to a path relative to %s", path, esrc)
|
|
|
|
}
|
2019-03-20 02:28:54 +08:00
|
|
|
if err = copyFileWithTar(path, filepath.Join(dest, fpath)); err != nil {
|
|
|
|
return errors.Wrapf(err, "error copying %q to %q", path, dest)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2017-07-17 22:42:58 +08:00
|
|
|
}
|
|
|
|
continue
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
2019-03-20 02:28:54 +08:00
|
|
|
|
2019-08-10 04:21:24 +08:00
|
|
|
b.ContentDigester.Start("file")
|
|
|
|
|
2018-09-12 21:28:38 +08:00
|
|
|
if !extract || !archive.IsArchivePath(esrc) {
|
2017-07-17 22:42:58 +08:00
|
|
|
// This source is a file, and either it's not an
|
|
|
|
// archive, or we don't care whether or not it's an
|
|
|
|
// archive.
|
|
|
|
d := dest
|
|
|
|
if destfi != nil && destfi.IsDir() {
|
|
|
|
d = filepath.Join(dest, filepath.Base(gsrc))
|
|
|
|
}
|
|
|
|
// Copy the file, preserving attributes.
|
2019-08-10 04:21:24 +08:00
|
|
|
logrus.Debugf("copying[%d] %q to %q", n, esrc, d)
|
2018-10-03 22:05:46 +08:00
|
|
|
if err = copyFileWithTar(esrc, d); err != nil {
|
2018-09-12 21:28:38 +08:00
|
|
|
return errors.Wrapf(err, "error copying %q to %q", esrc, d)
|
2017-07-17 22:42:58 +08:00
|
|
|
}
|
|
|
|
continue
|
2017-03-24 01:49:38 +08:00
|
|
|
}
|
2019-05-30 07:56:28 +08:00
|
|
|
|
2017-07-17 22:42:58 +08:00
|
|
|
// We're extracting an archive into the destination directory.
|
2019-08-10 04:21:24 +08:00
|
|
|
logrus.Debugf("extracting contents[%d] of %q into %q", n, esrc, dest)
|
2018-10-03 22:05:46 +08:00
|
|
|
if err = untarPath(esrc, dest); err != nil {
|
2018-09-12 21:28:38 +08:00
|
|
|
return errors.Wrapf(err, "error extracting %q into %q", esrc, dest)
|
2017-03-07 04:39:22 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|