From 4143e3b45c16cbae5e3e3419ef479a71810e7df3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 13 Nov 2017 08:13:38 -0800 Subject: [PATCH] New command: datasette package - packages a docker container Example usage: datasette package fivethirtyeight.db \ --tag fivethirtyeight \ --metadata=538-metadata.json This will create a temporary directory, generate a Dockerfile, copy in the SQLite database and metadata file, then build that as a new docker image and tag that in your local Docker repository as fivethirtyeight:latest. You can then run the image like so: docker run -p 8006:8001 fivethirtyeight This will expose port 8001 in the container (the default) as port 8006 on your host. Closes #67 --- datasette/cli.py | 59 +++++++++++++++++++++++++++------------------- datasette/utils.py | 32 +++++++++++++++++++++++-- 2 files changed, 65 insertions(+), 26 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2d1e7539..753731fa 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -1,13 +1,13 @@ import click from click_default_group import DefaultGroup import json -import os import shutil from subprocess import call import sys -import tempfile from .app import Datasette -from .utils import make_dockerfile +from .utils import ( + temporary_docker_directory, +) @click.group(cls=DefaultGroup, default='serve', default_if_no_args=True) @@ -46,28 +46,39 @@ def publish(files, name, metadata): ) click.echo('Follow the instructions at https://zeit.co/now#whats-now', err=True) sys.exit(1) - tmp = tempfile.TemporaryDirectory() - # We create a datasette folder in there to get a nicer now deploy name - datasette_dir = os.path.join(tmp.name, name) - os.mkdir(datasette_dir) - saved_cwd = os.getcwd() - file_paths = [ - os.path.join(saved_cwd, name) - for name in files - ] - file_names = [os.path.split(f)[-1] for f in files] - try: - dockerfile = make_dockerfile(file_names, metadata and 'metadata.json') - os.chdir(datasette_dir) - open('Dockerfile', 'w').write(dockerfile) - if metadata: - open('metadata.json', 'w').write(metadata.read()) - for path, filename in zip(file_paths, file_names): - os.link(path, os.path.join(datasette_dir, filename)) + + with temporary_docker_directory(files, name, metadata): call('now') - finally: - tmp.cleanup() - os.chdir(saved_cwd) + + +@cli.command() +@click.argument('files', type=click.Path(exists=True), nargs=-1, required=True) +@click.option( + '-t', '--tag', + help='Name for the resulting Docker container, can optionally use name:tag format' +) +@click.option( + '-m', '--metadata', type=click.File(mode='r'), + help='Path to JSON file containing metadata to publish' +) +def package(files, tag, metadata): + "Package specified SQLite files into a new datasette Docker container" + if not shutil.which('docker'): + click.secho( + ' The package command requires "docker" to be installed and configured ', + bg='red', + fg='white', + bold=True, + err=True, + ) + sys.exit(1) + with temporary_docker_directory(files, 'datasette', metadata): + args = ['docker', 'build'] + if tag: + args.append('-t') + args.append(tag) + args.append('.') + call(args) @cli.command() diff --git a/datasette/utils.py b/datasette/utils.py index a520843d..8fbaac3a 100644 --- a/datasette/utils.py +++ b/datasette/utils.py @@ -1,8 +1,10 @@ from contextlib import contextmanager import base64 import json +import os import re import sqlite3 +import tempfile import time import urllib @@ -140,9 +142,35 @@ COPY . /app WORKDIR /app RUN pip install https://static.simonwillison.net/static/2017/datasette-0.6-py3-none-any.whl RUN datasette build {} --inspect-file inspect-data.json -EXPOSE 8006 -CMD ["datasette", "serve", {}, "--port", "8006", "--inspect-file", "inspect-data.json"{}]'''.format( +EXPOSE 8001 +CMD ["datasette", "serve", {}, "--port", "8001", "--inspect-file", "inspect-data.json"{}]'''.format( ' '.join(files), '"' + '", "'.join(files) + '"', metadata_file and ', "--metadata", "{}"'.format(metadata_file) or '', ).strip() + + +@contextmanager +def temporary_docker_directory(files, name, metadata): + tmp = tempfile.TemporaryDirectory() + # We create a datasette folder in there to get a nicer now deploy name + datasette_dir = os.path.join(tmp.name, name) + os.mkdir(datasette_dir) + saved_cwd = os.getcwd() + file_paths = [ + os.path.join(saved_cwd, name) + for name in files + ] + file_names = [os.path.split(f)[-1] for f in files] + try: + dockerfile = make_dockerfile(file_names, metadata and 'metadata.json') + os.chdir(datasette_dir) + open('Dockerfile', 'w').write(dockerfile) + if metadata: + open('metadata.json', 'w').write(metadata.read()) + for path, filename in zip(file_paths, file_names): + os.link(path, os.path.join(datasette_dir, filename)) + yield + finally: + tmp.cleanup() + os.chdir(saved_cwd)