From 75a21fc2a136ccfc9da7bbf521cf288e63c9707f Mon Sep 17 00:00:00 2001 From: Romain Primet Date: Fri, 3 May 2019 15:59:01 +0200 Subject: [PATCH] datasette publish cloudrun (#434) - thanks, @rprimet New publish subcommand that publishes using the new Google Cloud Run platform. datasette publish cloudrun database.db --- datasette/plugins.py | 1 + datasette/publish/cloudrun.py | 71 ++++++++++++++++++++++++++++++++++ datasette/utils.py | 29 +++++++------- tests/test_publish_cloudrun.py | 41 ++++++++++++++++++++ tests/test_utils.py | 23 +++++++++++ 5 files changed, 152 insertions(+), 13 deletions(-) create mode 100644 datasette/publish/cloudrun.py create mode 100644 tests/test_publish_cloudrun.py diff --git a/datasette/plugins.py b/datasette/plugins.py index 4e038923..bf3735dc 100644 --- a/datasette/plugins.py +++ b/datasette/plugins.py @@ -6,6 +6,7 @@ from . import hookspecs DEFAULT_PLUGINS = ( "datasette.publish.heroku", "datasette.publish.now", + "datasette.publish.cloudrun", "datasette.facets", ) diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py new file mode 100644 index 00000000..1902ac48 --- /dev/null +++ b/datasette/publish/cloudrun.py @@ -0,0 +1,71 @@ +from datasette import hookimpl +import click +import json +from subprocess import check_call, check_output + +from .common import ( + add_common_publish_arguments_and_options, + fail_if_publish_binary_not_installed, +) +from ..utils import temporary_docker_directory + + +@hookimpl +def publish_subcommand(publish): + @publish.command() + @add_common_publish_arguments_and_options + @click.option( + "-n", + "--name", + default="datasette", + help="Application name to use when deploying", + ) + @click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension") + def cloudrun( + files, + metadata, + extra_options, + branch, + template_dir, + plugins_dir, + static, + install, + version_note, + title, + license, + license_url, + source, + source_url, + about, + about_url, + name, + spatialite, + ): + fail_if_publish_binary_not_installed("gcloud", "Google Cloud", "https://cloud.google.com/sdk/") + project = check_output("gcloud config get-value project", shell=True, universal_newlines=True).strip() + + with temporary_docker_directory( + files, + name, + metadata, + extra_options, + branch, + template_dir, + plugins_dir, + static, + install, + spatialite, + version_note, + { + "title": title, + "license": license, + "license_url": license_url, + "source": source, + "source_url": source_url, + "about": about, + "about_url": about_url, + }, + ): + image_id = "gcr.io/{project}/{name}".format(project=project, name=name) + check_call("gcloud builds submit --tag {}".format(image_id), shell=True) + check_call("gcloud beta run deploy --allow-unauthenticated --image {}".format(image_id), shell=True) diff --git a/datasette/utils.py b/datasette/utils.py index 2f5c633e..df33884c 100644 --- a/datasette/utils.py +++ b/datasette/utils.py @@ -265,26 +265,28 @@ def escape_sqlite(s): else: return '[{}]'.format(s) - def make_dockerfile(files, metadata_file, extra_options, branch, template_dir, plugins_dir, static, install, spatialite, version_note): - cmd = ['"datasette"', '"serve"', '"--host"', '"0.0.0.0"'] - cmd.append('"' + '", "'.join(files) + '"') - cmd.extend(['"--cors"', '"--port"', '"8001"', '"--inspect-file"', '"inspect-data.json"']) + cmd = ['datasette', 'serve', '--host', '0.0.0.0'] + cmd.append('", "'.join(files)) + cmd.extend(['--cors', '--inspect-file', 'inspect-data.json']) if metadata_file: - cmd.extend(['"--metadata"', '"{}"'.format(metadata_file)]) + cmd.extend(['--metadata', '{}'.format(metadata_file)]) if template_dir: - cmd.extend(['"--template-dir"', '"templates/"']) + cmd.extend(['--template-dir', 'templates/']) if plugins_dir: - cmd.extend(['"--plugins-dir"', '"plugins/"']) + cmd.extend(['--plugins-dir', 'plugins/']) if version_note: - cmd.extend(['"--version-note"', '"{}"'.format(version_note)]) + cmd.extend(['--version-note', '{}'.format(version_note)]) if static: for mount_point, _ in static: - cmd.extend(['"--static"', '"{}:{}"'.format(mount_point, mount_point)]) + cmd.extend(['--static', '{}:{}'.format(mount_point, mount_point)]) if extra_options: for opt in extra_options.split(): - cmd.append('"{}"'.format(opt)) - + cmd.append('{}'.format(opt)) + cmd = [shlex.quote(part) for part in cmd] + # port attribute is a (fixed) env variable and should not be quoted + cmd.extend(['--port', '$PORT']) + cmd = ' '.join(cmd) if branch: install = ['https://github.com/simonw/datasette/archive/{}.zip'.format( branch @@ -299,10 +301,11 @@ WORKDIR /app {spatialite_extras} RUN pip install -U {install_from} RUN datasette inspect {files} --inspect-file inspect-data.json +ENV PORT 8001 EXPOSE 8001 -CMD [{cmd}]'''.format( +CMD {cmd}'''.format( files=' '.join(files), - cmd=', '.join(cmd), + cmd=cmd, install_from=' '.join(install), spatialite_extras=SPATIALITE_DOCKERFILE_EXTRAS if spatialite else '', ).strip() diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py new file mode 100644 index 00000000..8dbe9064 --- /dev/null +++ b/tests/test_publish_cloudrun.py @@ -0,0 +1,41 @@ +from click.testing import CliRunner +from datasette import cli +from unittest import mock + + +@mock.patch("shutil.which") +def test_publish_cloudrun_requires_gcloud(mock_which): + mock_which.return_value = False + runner = CliRunner() + with runner.isolated_filesystem(): + open("test.db", "w").write("data") + result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) + assert result.exit_code == 1 + assert "Publishing to Google Cloud requires gcloud" in result.output + + +@mock.patch("shutil.which") +def test_publish_cloudrun_invalid_database(mock_which): + mock_which.return_value = True + runner = CliRunner() + result = runner.invoke(cli.cli, ["publish", "cloudrun", "woop.db"]) + assert result.exit_code == 2 + assert 'Path "woop.db" does not exist' in result.output + + +@mock.patch("shutil.which") +@mock.patch("datasette.publish.cloudrun.check_output") +@mock.patch("datasette.publish.cloudrun.check_call") +def test_publish_cloudrun(mock_call, mock_output, mock_which): + mock_output.return_value = "myproject" + mock_which.return_value = True + runner = CliRunner() + with runner.isolated_filesystem(): + open("test.db", "w").write("data") + result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) + assert 0 == result.exit_code + tag = "gcr.io/{}/datasette".format(mock_output.return_value) + mock_call.assert_has_calls([ + mock.call("gcloud builds submit --tag {}".format(tag), shell=True), + mock.call("gcloud beta run deploy --allow-unauthenticated --image {}".format(tag), shell=True)]) + diff --git a/tests/test_utils.py b/tests/test_utils.py index 36f6254c..f4d40c43 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -256,6 +256,29 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): assert 1 == os.stat(hello).st_nlink +def test_temporary_docker_directory_quotes_args(): + with tempfile.TemporaryDirectory() as td: + os.chdir(td) + open('hello', 'w').write('world') + with utils.temporary_docker_directory( + files=['hello'], + name='t', + metadata=None, + extra_options='--$HOME', + branch=None, + template_dir=None, + plugins_dir=None, + static=[], + install=[], + spatialite=False, + version_note='$PWD', + ) as temp_docker: + df = os.path.join(temp_docker, 'Dockerfile') + df_contents = open(df).read() + assert "'$PWD'" in df_contents + assert "'--$HOME'" in df_contents + + def test_compound_keys_after_sql(): assert '((a > :p0))' == utils.compound_keys_after_sql(['a']) assert '''