datasette publish cloudrun (#434) - thanks, @rprimet

New publish subcommand that publishes using the
new Google Cloud Run platform.

    datasette publish cloudrun database.db
This commit is contained in:
Romain Primet 2019-05-03 15:59:01 +02:00 committed by Simon Willison
commit 75a21fc2a1
5 changed files with 152 additions and 13 deletions

View file

@ -6,6 +6,7 @@ from . import hookspecs
DEFAULT_PLUGINS = (
"datasette.publish.heroku",
"datasette.publish.now",
"datasette.publish.cloudrun",
"datasette.facets",
)

View file

@ -0,0 +1,71 @@
from datasette import hookimpl
import click
import json
from subprocess import check_call, check_output
from .common import (
add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed,
)
from ..utils import temporary_docker_directory
@hookimpl
def publish_subcommand(publish):
@publish.command()
@add_common_publish_arguments_and_options
@click.option(
"-n",
"--name",
default="datasette",
help="Application name to use when deploying",
)
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
def cloudrun(
files,
metadata,
extra_options,
branch,
template_dir,
plugins_dir,
static,
install,
version_note,
title,
license,
license_url,
source,
source_url,
about,
about_url,
name,
spatialite,
):
fail_if_publish_binary_not_installed("gcloud", "Google Cloud", "https://cloud.google.com/sdk/")
project = check_output("gcloud config get-value project", shell=True, universal_newlines=True).strip()
with temporary_docker_directory(
files,
name,
metadata,
extra_options,
branch,
template_dir,
plugins_dir,
static,
install,
spatialite,
version_note,
{
"title": title,
"license": license,
"license_url": license_url,
"source": source,
"source_url": source_url,
"about": about,
"about_url": about_url,
},
):
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
check_call("gcloud beta run deploy --allow-unauthenticated --image {}".format(image_id), shell=True)

View file

@ -265,26 +265,28 @@ def escape_sqlite(s):
else:
return '[{}]'.format(s)
def make_dockerfile(files, metadata_file, extra_options, branch, template_dir, plugins_dir, static, install, spatialite, version_note):
cmd = ['"datasette"', '"serve"', '"--host"', '"0.0.0.0"']
cmd.append('"' + '", "'.join(files) + '"')
cmd.extend(['"--cors"', '"--port"', '"8001"', '"--inspect-file"', '"inspect-data.json"'])
cmd = ['datasette', 'serve', '--host', '0.0.0.0']
cmd.append('", "'.join(files))
cmd.extend(['--cors', '--inspect-file', 'inspect-data.json'])
if metadata_file:
cmd.extend(['"--metadata"', '"{}"'.format(metadata_file)])
cmd.extend(['--metadata', '{}'.format(metadata_file)])
if template_dir:
cmd.extend(['"--template-dir"', '"templates/"'])
cmd.extend(['--template-dir', 'templates/'])
if plugins_dir:
cmd.extend(['"--plugins-dir"', '"plugins/"'])
cmd.extend(['--plugins-dir', 'plugins/'])
if version_note:
cmd.extend(['"--version-note"', '"{}"'.format(version_note)])
cmd.extend(['--version-note', '{}'.format(version_note)])
if static:
for mount_point, _ in static:
cmd.extend(['"--static"', '"{}:{}"'.format(mount_point, mount_point)])
cmd.extend(['--static', '{}:{}'.format(mount_point, mount_point)])
if extra_options:
for opt in extra_options.split():
cmd.append('"{}"'.format(opt))
cmd.append('{}'.format(opt))
cmd = [shlex.quote(part) for part in cmd]
# port attribute is a (fixed) env variable and should not be quoted
cmd.extend(['--port', '$PORT'])
cmd = ' '.join(cmd)
if branch:
install = ['https://github.com/simonw/datasette/archive/{}.zip'.format(
branch
@ -299,10 +301,11 @@ WORKDIR /app
{spatialite_extras}
RUN pip install -U {install_from}
RUN datasette inspect {files} --inspect-file inspect-data.json
ENV PORT 8001
EXPOSE 8001
CMD [{cmd}]'''.format(
CMD {cmd}'''.format(
files=' '.join(files),
cmd=', '.join(cmd),
cmd=cmd,
install_from=' '.join(install),
spatialite_extras=SPATIALITE_DOCKERFILE_EXTRAS if spatialite else '',
).strip()

View file

@ -0,0 +1,41 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock
@mock.patch("shutil.which")
def test_publish_cloudrun_requires_gcloud(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
assert result.exit_code == 1
assert "Publishing to Google Cloud requires gcloud" in result.output
@mock.patch("shutil.which")
def test_publish_cloudrun_invalid_database(mock_which):
mock_which.return_value = True
runner = CliRunner()
result = runner.invoke(cli.cli, ["publish", "cloudrun", "woop.db"])
assert result.exit_code == 2
assert 'Path "woop.db" does not exist' in result.output
@mock.patch("shutil.which")
@mock.patch("datasette.publish.cloudrun.check_output")
@mock.patch("datasette.publish.cloudrun.check_call")
def test_publish_cloudrun(mock_call, mock_output, mock_which):
mock_output.return_value = "myproject"
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
assert 0 == result.exit_code
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
mock_call.assert_has_calls([
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
mock.call("gcloud beta run deploy --allow-unauthenticated --image {}".format(tag), shell=True)])

View file

@ -256,6 +256,29 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
assert 1 == os.stat(hello).st_nlink
def test_temporary_docker_directory_quotes_args():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open('hello', 'w').write('world')
with utils.temporary_docker_directory(
files=['hello'],
name='t',
metadata=None,
extra_options='--$HOME',
branch=None,
template_dir=None,
plugins_dir=None,
static=[],
install=[],
spatialite=False,
version_note='$PWD',
) as temp_docker:
df = os.path.join(temp_docker, 'Dockerfile')
df_contents = open(df).read()
assert "'$PWD'" in df_contents
assert "'--$HOME'" in df_contents
def test_compound_keys_after_sql():
assert '((a > :p0))' == utils.compound_keys_after_sql(['a'])
assert '''