Compare commits

...

12 commits

Author SHA1 Message Date
Simon Willison
e05998bc85 Added missing word, refs #1963 2022-12-17 19:08:01 -08:00
Simon Willison
a9ffcbd42e Release 0.63.3
Refs #1955, #1958, #1963
2022-12-17 19:06:03 -08:00
Simon Willison
d93f975b3d On publish run tests same way as for test 2022-12-17 18:52:52 -08:00
Simon Willison
e6d94f9ffa Run new HTTPS test in CI, refs #1955 2022-12-17 18:51:17 -08:00
Simon Willison
0bd3eaa2dd Move HTTPS test to a bash script
See https://github.com/simonw/datasette/issues/1955#issuecomment-1356627931
2022-12-17 18:51:17 -08:00
Simon Willison
5649e547ef Put AsgiLifestyle back so server starts up again, refs #1955 2022-12-17 18:51:17 -08:00
Simon Willison
96b3a86d7f Replace AsgiLifespan with AsgiRunOnFirstRequest, refs #1955 2022-12-17 18:51:17 -08:00
Simon Willison
4ba8d57bb1 Try click.echo() instead
This ensures the URL is output correctly when running under Docker.

Closes #1958
2022-12-15 16:57:45 -08:00
Simon Willison
d67f812b73 Release 0.63.2
Refs #1904, #1905
2022-11-18 16:53:05 -08:00
Simon Willison
c4d002fef5 Pin httpx in Pyodide test, refs #1904
Should help get tests to pass for #1896 too
2022-11-18 16:52:09 -08:00
Simon Willison
a93ccc63c7 Upgrade to Python 3.11 on Heroku, refs #1905 2022-11-18 16:49:08 -08:00
Simon Willison
9cca381033 --generate-dir option to publish heroku, refs #1905 2022-11-18 16:49:02 -08:00
17 changed files with 172 additions and 87 deletions

View file

@ -31,7 +31,10 @@ jobs:
pip install -e '.[test]' pip install -e '.[test]'
- name: Run tests - name: Run tests
run: | run: |
pytest pytest -n auto -m "not serial"
pytest -m "serial"
# And the test that exceeds a localhost HTTPS server
tests/test_datasette_https_server.sh
deploy: deploy:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View file

@ -35,6 +35,8 @@ jobs:
run: | run: |
pytest -n auto -m "not serial" pytest -n auto -m "not serial"
pytest -m "serial" pytest -m "serial"
# And the test that exceeds a localhost HTTPS server
tests/test_datasette_https_server.sh
- name: Check if cog needs to be run - name: Check if cog needs to be run
run: | run: |
cog --check docs/*.rst cog --check docs/*.rst

View file

@ -64,16 +64,14 @@ from .utils import (
) )
from .utils.asgi import ( from .utils.asgi import (
AsgiLifespan, AsgiLifespan,
Base400,
Forbidden, Forbidden,
NotFound, NotFound,
Request, Request,
Response, Response,
AsgiRunOnFirstRequest,
asgi_static, asgi_static,
asgi_send, asgi_send,
asgi_send_file, asgi_send_file,
asgi_send_html,
asgi_send_json,
asgi_send_redirect, asgi_send_redirect,
) )
from .utils.internal_db import init_internal_db, populate_schema_tables from .utils.internal_db import init_internal_db, populate_schema_tables
@ -1260,7 +1258,7 @@ class Datasette:
async def setup_db(): async def setup_db():
# First time server starts up, calculate table counts for immutable databases # First time server starts up, calculate table counts for immutable databases
for dbname, database in self.databases.items(): for database in self.databases.values():
if not database.is_mutable: if not database.is_mutable:
await database.table_counts(limit=60 * 60 * 1000) await database.table_counts(limit=60 * 60 * 1000)
@ -1274,10 +1272,8 @@ class Datasette:
) )
if self.setting("trace_debug"): if self.setting("trace_debug"):
asgi = AsgiTracer(asgi) asgi = AsgiTracer(asgi)
asgi = AsgiLifespan( asgi = AsgiLifespan(asgi)
asgi, asgi = AsgiRunOnFirstRequest(asgi, on_startup=[setup_db, self.invoke_startup])
on_startup=setup_db,
)
for wrapper in pm.hook.asgi_wrapper(datasette=self): for wrapper in pm.hook.asgi_wrapper(datasette=self):
asgi = wrapper(asgi) asgi = wrapper(asgi)
return asgi return asgi
@ -1566,42 +1562,34 @@ class DatasetteClient:
return path return path
async def get(self, path, **kwargs): async def get(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.get(self._fix(path), **kwargs) return await client.get(self._fix(path), **kwargs)
async def options(self, path, **kwargs): async def options(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.options(self._fix(path), **kwargs) return await client.options(self._fix(path), **kwargs)
async def head(self, path, **kwargs): async def head(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.head(self._fix(path), **kwargs) return await client.head(self._fix(path), **kwargs)
async def post(self, path, **kwargs): async def post(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.post(self._fix(path), **kwargs) return await client.post(self._fix(path), **kwargs)
async def put(self, path, **kwargs): async def put(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.put(self._fix(path), **kwargs) return await client.put(self._fix(path), **kwargs)
async def patch(self, path, **kwargs): async def patch(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.patch(self._fix(path), **kwargs) return await client.patch(self._fix(path), **kwargs)
async def delete(self, path, **kwargs): async def delete(self, path, **kwargs):
await self.ds.invoke_startup()
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.delete(self._fix(path), **kwargs) return await client.delete(self._fix(path), **kwargs)
async def request(self, method, path, **kwargs): async def request(self, method, path, **kwargs):
await self.ds.invoke_startup()
avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None)
async with httpx.AsyncClient(app=self.app) as client: async with httpx.AsyncClient(app=self.app) as client:
return await client.request( return await client.request(

View file

@ -607,7 +607,7 @@ def serve(
url = "http://{}:{}{}?token={}".format( url = "http://{}:{}{}?token={}".format(
host, port, ds.urls.path("-/auth-token"), ds._root_token host, port, ds.urls.path("-/auth-token"), ds._root_token
) )
print(url) click.echo(url)
if open_browser: if open_browser:
if url is None: if url is None:
# Figure out most convenient URL - to table, database or homepage # Figure out most convenient URL - to table, database or homepage

View file

@ -3,7 +3,9 @@ from datasette import hookimpl
import click import click
import json import json
import os import os
import pathlib
import shlex import shlex
import shutil
from subprocess import call, check_output from subprocess import call, check_output
import tempfile import tempfile
@ -28,6 +30,11 @@ def publish_subcommand(publish):
"--tar", "--tar",
help="--tar option to pass to Heroku, e.g. --tar=/usr/local/bin/gtar", help="--tar option to pass to Heroku, e.g. --tar=/usr/local/bin/gtar",
) )
@click.option(
"--generate-dir",
type=click.Path(dir_okay=True, file_okay=False),
help="Output generated application files and stop without deploying",
)
def heroku( def heroku(
files, files,
metadata, metadata,
@ -49,6 +56,7 @@ def publish_subcommand(publish):
about_url, about_url,
name, name,
tar, tar,
generate_dir,
): ):
"Publish databases to Datasette running on Heroku" "Publish databases to Datasette running on Heroku"
fail_if_publish_binary_not_installed( fail_if_publish_binary_not_installed(
@ -105,6 +113,16 @@ def publish_subcommand(publish):
secret, secret,
extra_metadata, extra_metadata,
): ):
if generate_dir:
# Recursively copy files from current working directory to it
if pathlib.Path(generate_dir).exists():
raise click.ClickException("Directory already exists")
shutil.copytree(".", generate_dir)
click.echo(
f"Generated files written to {generate_dir}, stopping without deploying",
err=True,
)
return
app_name = None app_name = None
if name: if name:
# Check to see if this app already exists # Check to see if this app already exists
@ -176,7 +194,7 @@ def temporary_heroku_directory(
fp.write(json.dumps(metadata_content, indent=2)) fp.write(json.dumps(metadata_content, indent=2))
with open("runtime.txt", "w") as fp: with open("runtime.txt", "w") as fp:
fp.write("python-3.8.10") fp.write("python-3.11.0")
if branch: if branch:
install = [ install = [

View file

@ -428,3 +428,18 @@ class AsgiFileDownload:
content_type=self.content_type, content_type=self.content_type,
headers=self.headers, headers=self.headers,
) )
class AsgiRunOnFirstRequest:
def __init__(self, asgi, on_startup):
assert isinstance(on_startup, list)
self.asgi = asgi
self.on_startup = on_startup
self._started = False
async def __call__(self, scope, receive, send):
if not self._started:
self._started = True
for hook in self.on_startup:
await hook()
return await self.asgi(scope, receive, send)

View file

@ -1,2 +1,2 @@
__version__ = "0.63.1" __version__ = "0.63.3"
__version_info__ = tuple(__version__.split(".")) __version_info__ = tuple(__version__.split("."))

View file

@ -4,6 +4,22 @@
Changelog Changelog
========= =========
.. _v0_63_3:
0.63.3 (2022-12-17)
-------------------
- Fixed a bug where ``datasette --root``, when running in Docker, would only output the URL to sign in as root when the server shut down, not when it started up. (:issue:`1958`)
- You no longer need to ensure ``await datasette.invoke_startup()`` has been called in order for Datasette to start correctly serving requests - this is now handled automatically the first time the server receives a request. This fixes a bug experienced when Datasette is served directly by an ASGI application server such as Uvicorn or Gunicorn. It also fixes a bug with the `datasette-gunicorn <https://datasette.io/plugins/datasette-gunicorn>`__ plugin. (:issue:`1955`)
.. _v0_63_2:
0.63.2 (2022-11-18)
-------------------
- Fixed a bug in ``datasette publish heroku`` where deployments failed due to an older version of Python being requested. (:issue:`1905`)
- New ``datasette publish heroku --generate-dir <dir>`` option for generating a Heroku deployment directory without deploying it.
.. _v0_63_1: .. _v0_63_1:
0.63.1 (2022-11-10) 0.63.1 (2022-11-10)

View file

@ -501,6 +501,8 @@ See :ref:`publish_heroku`.
-n, --name TEXT Application name to use when deploying -n, --name TEXT Application name to use when deploying
--tar TEXT --tar option to pass to Heroku, e.g. --tar TEXT --tar option to pass to Heroku, e.g.
--tar=/usr/local/bin/gtar --tar=/usr/local/bin/gtar
--generate-dir DIRECTORY Output generated application files and stop
without deploying
--help Show this message and exit. --help Show this message and exit.

View file

@ -855,13 +855,14 @@ Potential use-cases:
.. note:: .. note::
If you are writing :ref:`unit tests <testing_plugins>` for a plugin that uses this hook you will need to explicitly call ``await ds.invoke_startup()`` in your tests. An example: If you are writing :ref:`unit tests <testing_plugins>` for a plugin that uses this hook and doesn't exercise Datasette by sending
any simulated requests through it you will need to explicitly call ``await ds.invoke_startup()`` in your tests. An example:
.. code-block:: python .. code-block:: python
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_my_plugin(): async def test_my_plugin():
ds = Datasette([], metadata={}) ds = Datasette()
await ds.invoke_startup() await ds.invoke_startup()
# Rest of test goes here # Rest of test goes here

View file

@ -73,6 +73,10 @@ This will output some details about the new deployment, including a URL like thi
You can specify a custom app name by passing ``-n my-app-name`` to the publish command. This will also allow you to overwrite an existing app. You can specify a custom app name by passing ``-n my-app-name`` to the publish command. This will also allow you to overwrite an existing app.
Rather than deploying directly you can use the ``--generate-dir`` option to output the files that would be deployed to a directory::
datasette publish heroku mydatabase.db --generate-dir=/tmp/deploy-this-to-heroku
See :ref:`cli_help_publish_heroku___help` for the full list of options for this command. See :ref:`cli_help_publish_heroku___help` for the full list of options for this command.
.. _publish_vercel: .. _publish_vercel:

View file

@ -80,7 +80,7 @@ Creating a ``Datasette()`` instance like this as useful shortcut in tests, but t
This method registers any :ref:`plugin_hook_startup` or :ref:`plugin_hook_prepare_jinja2_environment` plugins that might themselves need to make async calls. This method registers any :ref:`plugin_hook_startup` or :ref:`plugin_hook_prepare_jinja2_environment` plugins that might themselves need to make async calls.
If you are using ``await datasette.client.get()`` and similar methods then you don't need to worry about this - those method calls ensure that ``.invoke_startup()`` has been called for you. If you are using ``await datasette.client.get()`` and similar methods then you don't need to worry about this - Datasette automatically calls ``invoke_startup()`` the first time it handles a request.
.. _testing_plugins_pdb: .. _testing_plugins_pdb:

View file

@ -25,6 +25,7 @@ async () => {
let output = await pyodide.runPythonAsync(\` let output = await pyodide.runPythonAsync(\`
import micropip import micropip
await micropip.install('h11==0.12.0') await micropip.install('h11==0.12.0')
await micropip.install('httpx==0.23')
await micropip.install('http://localhost:8529/$wheel') await micropip.install('http://localhost:8529/$wheel')
import ssl import ssl
import setuptools import setuptools

View file

@ -23,6 +23,17 @@ UNDOCUMENTED_PERMISSIONS = {
} }
def wait_until_responds(url, timeout=5.0, client=httpx, **kwargs):
start = time.time()
while time.time() - start < timeout:
try:
client.get(url, **kwargs)
return
except httpx.ConnectError:
time.sleep(0.1)
raise AssertionError("Timed out waiting for {} to respond".format(url))
def pytest_report_header(config): def pytest_report_header(config):
return "SQLite: {}".format( return "SQLite: {}".format(
sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
@ -111,13 +122,7 @@ def ds_localhost_http_server():
# Avoid FileNotFoundError: [Errno 2] No such file or directory: # Avoid FileNotFoundError: [Errno 2] No such file or directory:
cwd=tempfile.gettempdir(), cwd=tempfile.gettempdir(),
) )
# Loop until port 8041 serves traffic wait_until_responds("http://localhost:8041/")
while True:
try:
httpx.get("http://localhost:8041/")
break
except httpx.ConnectError:
time.sleep(0.1)
# Check it started successfully # Check it started successfully
assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
yield ds_proc yield ds_proc
@ -125,46 +130,6 @@ def ds_localhost_http_server():
ds_proc.terminate() ds_proc.terminate()
@pytest.fixture(scope="session")
def ds_localhost_https_server(tmp_path_factory):
cert_directory = tmp_path_factory.mktemp("certs")
ca = trustme.CA()
server_cert = ca.issue_cert("localhost")
keyfile = str(cert_directory / "server.key")
certfile = str(cert_directory / "server.pem")
client_cert = str(cert_directory / "client.pem")
server_cert.private_key_pem.write_to_path(path=keyfile)
for blob in server_cert.cert_chain_pems:
blob.write_to_path(path=certfile, append=True)
ca.cert_pem.write_to_path(path=client_cert)
ds_proc = subprocess.Popen(
[
"datasette",
"--memory",
"-p",
"8042",
"--ssl-keyfile",
keyfile,
"--ssl-certfile",
certfile,
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=tempfile.gettempdir(),
)
while True:
try:
httpx.get("https://localhost:8042/", verify=client_cert)
break
except httpx.ConnectError:
time.sleep(0.1)
# Check it started successfully
assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
yield ds_proc, client_cert
# Shut it down at the end of the pytest session
ds_proc.terminate()
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def ds_unix_domain_socket_server(tmp_path_factory): def ds_unix_domain_socket_server(tmp_path_factory):
# This used to use tmp_path_factory.mktemp("uds") but that turned out to # This used to use tmp_path_factory.mktemp("uds") but that turned out to
@ -181,12 +146,7 @@ def ds_unix_domain_socket_server(tmp_path_factory):
# Poll until available # Poll until available
transport = httpx.HTTPTransport(uds=uds) transport = httpx.HTTPTransport(uds=uds)
client = httpx.Client(transport=transport) client = httpx.Client(transport=transport)
while True: wait_until_responds("http://localhost/_memory.json", client=client)
try:
client.get("http://localhost/_memory.json")
break
except httpx.ConnectError:
time.sleep(0.1)
# Check it started successfully # Check it started successfully
assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8")
yield ds_proc, uds yield ds_proc, uds

View file

@ -13,17 +13,6 @@ def test_serve_localhost_http(ds_localhost_http_server):
}.items() <= response.json().items() }.items() <= response.json().items()
@pytest.mark.serial
def test_serve_localhost_https(ds_localhost_https_server):
_, client_cert = ds_localhost_https_server
response = httpx.get("https://localhost:8042/_memory.json", verify=client_cert)
assert {
"database": "_memory",
"path": "/_memory",
"tables": [],
}.items() <= response.json().items()
@pytest.mark.serial @pytest.mark.serial
@pytest.mark.skipif( @pytest.mark.skipif(
not hasattr(socket, "AF_UNIX"), reason="Requires socket.AF_UNIX support" not hasattr(socket, "AF_UNIX"), reason="Requires socket.AF_UNIX support"

View file

@ -0,0 +1,33 @@
#!/bin/bash
# Generate certificates
python -m trustme
# This creates server.pem, server.key, client.pem
# Start the server in the background
datasette --memory \
--ssl-keyfile=server.key \
--ssl-certfile=server.pem \
-p 8152 &
# Store the background process ID in a variable
server_pid=$!
# Wait for the server to start
sleep 2
# Make a test request using curl
curl -f --cacert client.pem 'https://localhost:8152/_memory.json'
# Save curl's exit code (-f option causes it to return one on HTTP errors)
curl_exit_code=$?
# Shut down the server
kill $server_pid
sleep 1
# Clean up the certificates
rm server.pem server.key client.pem
echo $curl_exit_code
exit $curl_exit_code

View file

@ -2,6 +2,7 @@ from click.testing import CliRunner
from datasette import cli from datasette import cli
from unittest import mock from unittest import mock
import os import os
import pathlib
import pytest import pytest
@ -128,3 +129,55 @@ def test_publish_heroku_plugin_secrets(
mock.call(["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]), mock.call(["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]),
] ]
) )
@pytest.mark.serial
@mock.patch("shutil.which")
@mock.patch("datasette.publish.heroku.check_output")
@mock.patch("datasette.publish.heroku.call")
def test_publish_heroku_generate_dir(
mock_call, mock_check_output, mock_which, tmp_path_factory
):
mock_which.return_value = True
mock_check_output.side_effect = lambda s: {
"['heroku', 'plugins']": b"heroku-builds",
}[repr(s)]
runner = CliRunner()
os.chdir(tmp_path_factory.mktemp("runner"))
with open("test.db", "w") as fp:
fp.write("data")
output = str(tmp_path_factory.mktemp("generate_dir") / "output")
result = runner.invoke(
cli.cli,
[
"publish",
"heroku",
"test.db",
"--generate-dir",
output,
],
)
assert result.exit_code == 0
path = pathlib.Path(output)
assert path.exists()
file_names = {str(r.relative_to(path)) for r in path.glob("*")}
assert file_names == {
"requirements.txt",
"bin",
"runtime.txt",
"Procfile",
"test.db",
}
for name, expected in (
("requirements.txt", "datasette"),
("runtime.txt", "python-3.11.0"),
(
"Procfile",
(
"web: datasette serve --host 0.0.0.0 -i test.db "
"--cors --port $PORT --inspect-file inspect-data.json"
),
),
):
with open(path / name) as fp:
assert fp.read().strip() == expected