Kaydet (Commit) 081b78f1 authored tarafından Joffrey F's avatar Joffrey F Kaydeden (comit) Joffrey F

Support building with Dockerfile outside of context

Signed-off-by: 's avatarJoffrey F <joffrey@docker.com>
üst 12a6833e
import json
import logging
import os
import random
from .. import auth
from .. import constants
......@@ -148,6 +149,15 @@ class BuildApiMixin(object):
lambda x: x != '' and x[0] != '#',
[l.strip() for l in f.read().splitlines()]
))
if dockerfile and os.path.relpath(dockerfile, path).startswith(
'..'):
with open(dockerfile, 'r') as df:
dockerfile = (
'.dockerfile.{0:x}'.format(random.getrandbits(160)),
df.read()
)
else:
dockerfile = (dockerfile, None)
context = utils.tar(
path, exclude=exclude, dockerfile=dockerfile, gzip=gzip
)
......
......@@ -2,23 +2,33 @@ import os
import re
from ..constants import IS_WINDOWS_PLATFORM
from .utils import create_archive
from fnmatch import fnmatch
from itertools import chain
from .utils import create_archive
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):
root = os.path.abspath(path)
exclude = exclude or []
dockerfile = dockerfile or (None, None)
extra_files = []
if dockerfile[1] is not None:
dockerignore_contents = '\n'.join(
(exclude or ['.dockerignore']) + [dockerfile[0]]
)
extra_files = [
('.dockerignore', dockerignore_contents),
dockerfile,
]
return create_archive(
files=sorted(exclude_paths(root, exclude, dockerfile=dockerfile)),
root=root, fileobj=fileobj, gzip=gzip
files=sorted(exclude_paths(root, exclude, dockerfile=dockerfile[0])),
root=root, fileobj=fileobj, gzip=gzip, extra_files=extra_files
)
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
def exclude_paths(root, patterns, dockerfile=None):
"""
Given a root directory path and a list of .dockerignore patterns, return
......
......@@ -88,13 +88,17 @@ def build_file_list(root):
return files
def create_archive(root, files=None, fileobj=None, gzip=False):
def create_archive(root, files=None, fileobj=None, gzip=False,
extra_files=None):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj)
if files is None:
files = build_file_list(root)
for path in files:
if path in [e[0] for e in extra_files]:
# Extra files override context files with the same name
continue
full_path = os.path.join(root, path)
i = t.gettarinfo(full_path, arcname=path)
......@@ -123,6 +127,12 @@ def create_archive(root, files=None, fileobj=None, gzip=False):
else:
# Directories, FIFOs, symlinks... don't need to be read.
t.addfile(i, None)
for name, contents in extra_files:
info = tarfile.TarInfo(name)
info.size = len(contents)
t.addfile(info, io.BytesIO(contents.encode('utf-8')))
t.close()
fileobj.seek(0)
return fileobj
......
......@@ -407,3 +407,36 @@ class BuildTest(BaseAPIIntegrationTest):
assert excinfo.value.status_code == 400
assert 'invalid platform' in excinfo.exconly()
def test_build_out_of_context_dockerfile(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
f.write('hello world')
with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
f.write('.dockerignore\n')
df = tempfile.NamedTemporaryFile()
self.addCleanup(df.close)
df.write(('\n'.join([
'FROM busybox',
'COPY . /src',
'WORKDIR /src',
])).encode('utf-8'))
df.flush()
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
path=base_dir, dockerfile=df.name, tag=img_name,
decode=True
)
lines = []
for chunk in stream:
lines.append(chunk)
assert 'Successfully tagged' in lines[-1]['stream']
ctnr = self.client.create_container(img_name, 'ls -a')
self.tmp_containers.append(ctnr)
self.client.start(ctnr)
lsdata = self.client.logs(ctnr).strip().split(b'\n')
assert len(lsdata) == 3
assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment