diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index fa608055..f1beef5c 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -31,11 +31,17 @@ jobs: pip install -e '.[test]' - name: Run tests run: | - pytest + pytest -n auto -m "not serial" + pytest -m "serial" + # And the test that exceeds a localhost HTTPS server + tests/test_datasette_https_server.sh deploy: runs-on: ubuntu-latest needs: [test] + environment: release + permissions: + id-token: write steps: - uses: actions/checkout@v3 - name: Set up Python @@ -51,14 +57,12 @@ jobs: ${{ runner.os }}-publish-pip- - name: Install dependencies run: | - pip install setuptools wheel twine - - name: Publish - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + pip install setuptools wheel build + - name: Build run: | - python setup.py sdist bdist_wheel - twine upload dist/* + python -m build + - name: Publish + uses: pypa/gh-action-pypi-publish@release/v1 deploy_static_docs: runs-on: ubuntu-latest @@ -69,7 +73,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.10' + python-version: '3.9' - uses: actions/cache@v2 name: Configure pip caching with: @@ -90,7 +94,7 @@ jobs: - name: Set up Cloud Run uses: google-github-actions/setup-gcloud@v0 with: - version: '275.0.0' + version: '318.0.0' service_account_email: ${{ secrets.GCP_SA_EMAIL }} service_account_key: ${{ secrets.GCP_SA_KEY }} - name: Deploy stable-docs.datasette.io to Cloud Run diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 886f649a..4c4b7534 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -35,6 +35,8 @@ jobs: run: | pytest -n auto -m "not serial" pytest -m "serial" + # And the test that exceeds a localhost HTTPS server + tests/test_datasette_https_server.sh - name: Check if cog needs to be run run: | cog --check docs/*.rst diff --git a/datasette/app.py b/datasette/app.py index 246269f3..6b889f08 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -64,16 +64,14 @@ from .utils import ( ) from .utils.asgi import ( AsgiLifespan, - Base400, Forbidden, NotFound, Request, Response, + AsgiRunOnFirstRequest, asgi_static, asgi_send, asgi_send_file, - asgi_send_html, - asgi_send_json, asgi_send_redirect, ) from .utils.internal_db import init_internal_db, populate_schema_tables @@ -118,6 +116,11 @@ SETTINGS = ( True, "Allow users to specify columns to facet using ?_facet= parameter", ), + Setting( + "default_allow_sql", + True, + "Allow anyone to run arbitrary SQL queries", + ), Setting( "allow_download", True, @@ -215,6 +218,8 @@ class Datasette: self.config_dir = config_dir self.pdb = pdb self._secret = secret or secrets.token_hex(32) + if files is not None and isinstance(files, str): + raise ValueError("files= must be a list of paths, not a string") self.files = tuple(files or []) + tuple(immutables or []) if config_dir: db_files = [] @@ -1260,7 +1265,7 @@ class Datasette: async def setup_db(): # First time server starts up, calculate table counts for immutable databases - for dbname, database in self.databases.items(): + for database in self.databases.values(): if not database.is_mutable: await database.table_counts(limit=60 * 60 * 1000) @@ -1274,10 +1279,8 @@ class Datasette: ) if self.setting("trace_debug"): asgi = AsgiTracer(asgi) - asgi = AsgiLifespan( - asgi, - on_startup=setup_db, - ) + asgi = AsgiLifespan(asgi) + asgi = AsgiRunOnFirstRequest(asgi, on_startup=[setup_db, self.invoke_startup]) for wrapper in pm.hook.asgi_wrapper(datasette=self): asgi = wrapper(asgi) return asgi @@ -1566,42 +1569,34 @@ class DatasetteClient: return path async def get(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.get(self._fix(path), **kwargs) async def options(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.options(self._fix(path), **kwargs) async def head(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.head(self._fix(path), **kwargs) async def post(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.post(self._fix(path), **kwargs) async def put(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.put(self._fix(path), **kwargs) async def patch(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.patch(self._fix(path), **kwargs) async def delete(self, path, **kwargs): - await self.ds.invoke_startup() async with httpx.AsyncClient(app=self.app) as client: return await client.delete(self._fix(path), **kwargs) async def request(self, method, path, **kwargs): - await self.ds.invoke_startup() avoid_path_rewrites = kwargs.pop("avoid_path_rewrites", None) async with httpx.AsyncClient(app=self.app) as client: return await client.request( diff --git a/datasette/cli.py b/datasette/cli.py index 6eb42712..fd65ea94 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -4,6 +4,7 @@ import click from click import formatting from click.types import CompositeParamType from click_default_group import DefaultGroup +import functools import json import os import pathlib @@ -11,6 +12,7 @@ import shutil from subprocess import call import sys from runpy import run_module +import textwrap import webbrowser from .app import ( OBSOLETE_SETTINGS, @@ -126,7 +128,7 @@ class Setting(CompositeParamType): def sqlite_extensions(fn): - return click.option( + fn = click.option( "sqlite_extensions", "--load-extension", type=LoadExtension(), @@ -135,6 +137,26 @@ def sqlite_extensions(fn): help="Path to a SQLite extension to load, and optional entrypoint", )(fn) + # Wrap it in a custom error handler + @functools.wraps(fn) + def wrapped(*args, **kwargs): + try: + return fn(*args, **kwargs) + except AttributeError as e: + if "enable_load_extension" in str(e): + raise click.ClickException( + textwrap.dedent( + """ + Your Python installation does not have the ability to load SQLite extensions. + + More information: https://datasette.io/help/extensions + """ + ).strip() + ) + raise + + return wrapped + @click.group(cls=DefaultGroup, default="serve", default_if_no_args=True) @click.version_option(version=__version__) @@ -607,7 +629,7 @@ def serve( url = "http://{}:{}{}?token={}".format( host, port, ds.urls.path("-/auth-token"), ds._root_token ) - print(url) + click.echo(url) if open_browser: if url is None: # Figure out most convenient URL - to table, database or homepage diff --git a/datasette/default_permissions.py b/datasette/default_permissions.py index b58d8d1b..a0681e83 100644 --- a/datasette/default_permissions.py +++ b/datasette/default_permissions.py @@ -36,12 +36,16 @@ def permission_allowed(datasette, actor, action, resource): return None return actor_matches_allow(actor, allow) elif action == "execute-sql": + # Only use default_allow_sql setting if it is set to False: + default_allow_sql = ( + None if datasette.setting("default_allow_sql") else False + ) # Use allow_sql block from database block, or from top-level database_allow_sql = datasette.metadata("allow_sql", database=resource) if database_allow_sql is None: database_allow_sql = datasette.metadata("allow_sql") if database_allow_sql is None: - return None + return default_allow_sql return actor_matches_allow(actor, database_allow_sql) return inner diff --git a/datasette/filters.py b/datasette/filters.py index 5ea3488b..73eea857 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -80,9 +80,9 @@ def search_filters(request, database, table, datasette): "{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format( fts_table=escape_sqlite(fts_table), fts_pk=escape_sqlite(fts_pk), - match_clause=":search" - if search_mode_raw - else "escape_fts(:search)", + match_clause=( + ":search" if search_mode_raw else "escape_fts(:search)" + ), ) ) human_descriptions.append(f'search matches "{search}"') @@ -99,9 +99,11 @@ def search_filters(request, database, table, datasette): "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format( fts_table=escape_sqlite(fts_table), search_col=escape_sqlite(search_col), - match_clause=":search_{}".format(i) - if search_mode_raw - else "escape_fts(:search_{})".format(i), + match_clause=( + ":search_{}".format(i) + if search_mode_raw + else "escape_fts(:search_{})".format(i) + ), ) ) human_descriptions.append( diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 77274eb0..760ff0d1 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -173,7 +173,7 @@ def publish_subcommand(publish): print(fp.read()) print("\n====================\n") - image_id = f"gcr.io/{project}/{name}" + image_id = f"gcr.io/{project}/datasette-{service}" check_call( "gcloud builds submit --tag {}{}".format( image_id, " --timeout {}".format(timeout) if timeout else "" diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index 171252ce..f576a346 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -3,7 +3,9 @@ from datasette import hookimpl import click import json import os +import pathlib import shlex +import shutil from subprocess import call, check_output import tempfile @@ -28,6 +30,11 @@ def publish_subcommand(publish): "--tar", help="--tar option to pass to Heroku, e.g. --tar=/usr/local/bin/gtar", ) + @click.option( + "--generate-dir", + type=click.Path(dir_okay=True, file_okay=False), + help="Output generated application files and stop without deploying", + ) def heroku( files, metadata, @@ -49,6 +56,7 @@ def publish_subcommand(publish): about_url, name, tar, + generate_dir, ): "Publish databases to Datasette running on Heroku" fail_if_publish_binary_not_installed( @@ -105,6 +113,16 @@ def publish_subcommand(publish): secret, extra_metadata, ): + if generate_dir: + # Recursively copy files from current working directory to it + if pathlib.Path(generate_dir).exists(): + raise click.ClickException("Directory already exists") + shutil.copytree(".", generate_dir) + click.echo( + f"Generated files written to {generate_dir}, stopping without deploying", + err=True, + ) + return app_name = None if name: # Check to see if this app already exists @@ -176,7 +194,7 @@ def temporary_heroku_directory( fp.write(json.dumps(metadata_content, indent=2)) with open("runtime.txt", "w") as fp: - fp.write("python-3.8.10") + fp.write("python-3.11.0") if branch: install = [ diff --git a/datasette/static/app.css b/datasette/static/app.css index 712b9925..71437bd4 100644 --- a/datasette/static/app.css +++ b/datasette/static/app.css @@ -573,6 +573,9 @@ form button[type=button] { display: inline-block; margin-right: 0.3em; } +.select-wrapper:focus-within { + border: 1px solid black; +} .select-wrapper.filter-op { width: 80px; } diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 5acfb8b4..168dc22f 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -402,9 +402,9 @@ def make_dockerfile( apt_get_extras = apt_get_extras_ if spatialite: apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"]) - environment_variables[ - "SQLITE_EXTENSIONS" - ] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" + environment_variables["SQLITE_EXTENSIONS"] = ( + "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" + ) return """ FROM python:3.11.0-slim-bullseye COPY . /app @@ -416,9 +416,11 @@ RUN datasette inspect {files} --inspect-file inspect-data.json ENV PORT {port} EXPOSE {port} CMD {cmd}""".format( - apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras)) - if apt_get_extras - else "", + apt_get_extras=( + APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras)) + if apt_get_extras + else "" + ), environment_variables="\n".join( [ "ENV {} '{}'".format(key, value) @@ -1114,17 +1116,24 @@ class StartupError(Exception): pass -_re_named_parameter = re.compile(":([a-zA-Z0-9_]+)") +_single_line_comment_re = re.compile(r"--.*") +_multi_line_comment_re = re.compile(r"/\*.*?\*/", re.DOTALL) +_single_quote_re = re.compile(r"'(?:''|[^'])*'") +_double_quote_re = re.compile(r'"(?:\"\"|[^"])*"') +_named_param_re = re.compile(r":(\w+)") async def derive_named_parameters(db, sql): - explain = "explain {}".format(sql.strip().rstrip(";")) - possible_params = _re_named_parameter.findall(sql) - try: - results = await db.execute(explain, {p: None for p in possible_params}) - return [row["p4"].lstrip(":") for row in results if row["opcode"] == "Variable"] - except sqlite3.DatabaseError: - return possible_params + # Remove single-line comments + sql = _single_line_comment_re.sub("", sql) + # Remove multi-line comments + sql = _multi_line_comment_re.sub("", sql) + # Remove single-quoted strings + sql = _single_quote_re.sub("", sql) + # Remove double-quoted strings + sql = _double_quote_re.sub("", sql) + # Extract parameters from what is left + return _named_param_re.findall(sql) def add_cors_headers(headers): diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 8a2fa060..16f90077 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -428,3 +428,18 @@ class AsgiFileDownload: content_type=self.content_type, headers=self.headers, ) + + +class AsgiRunOnFirstRequest: + def __init__(self, asgi, on_startup): + assert isinstance(on_startup, list) + self.asgi = asgi + self.on_startup = on_startup + self._started = False + + async def __call__(self, scope, receive, send): + if not self._started: + self._started = True + for hook in self.on_startup: + await hook() + return await self.asgi(scope, receive, send) diff --git a/datasette/utils/shutil_backport.py b/datasette/utils/shutil_backport.py index dbe22404..d1fd1bd7 100644 --- a/datasette/utils/shutil_backport.py +++ b/datasette/utils/shutil_backport.py @@ -4,6 +4,7 @@ Backported from Python 3.8. This code is licensed under the Python License: https://github.com/python/cpython/blob/v3.8.3/LICENSE """ + import os from shutil import copy, copy2, copystat, Error diff --git a/datasette/version.py b/datasette/version.py index 3a4f06dc..235781e4 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.63.1" +__version__ = "0.64.8" __version_info__ = tuple(__version__.split(".")) diff --git a/datasette/views/base.py b/datasette/views/base.py index 6b01fdd2..0ccf3c9c 100644 --- a/datasette/views/base.py +++ b/datasette/views/base.py @@ -308,9 +308,11 @@ class DataView(BaseView): if cell is None: new_row.extend(("", "")) else: - assert isinstance(cell, dict) - new_row.append(cell["value"]) - new_row.append(cell["label"]) + if not isinstance(cell, dict): + new_row.extend((cell, "")) + else: + new_row.append(cell["value"]) + new_row.append(cell["label"]) else: new_row.append(cell) await writer.writerow(new_row) @@ -340,7 +342,7 @@ class DataView(BaseView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database = db.name _format = request.url_vars["format"] diff --git a/datasette/views/database.py b/datasette/views/database.py index 8e08c3b1..34d4d603 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -37,7 +37,7 @@ class DatabaseView(DataView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database = db.name visible, private = await self.ds.check_visibility( @@ -226,7 +226,7 @@ class QueryView(DataView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database = db.name params = {key: request.args.get(key) for key in request.args} if "sql" in params: @@ -431,9 +431,11 @@ class QueryView(DataView): display_value = markupsafe.Markup( '<Binary: {:,} byte{}>'.format( blob_url, - ' title="{}"'.format(formatted) - if "bytes" not in formatted - else "", + ( + ' title="{}"'.format(formatted) + if "bytes" not in formatted + else "" + ), len(value), "" if len(value) == 1 else "s", ) diff --git a/datasette/views/index.py b/datasette/views/index.py index 1f366a49..0b86376f 100644 --- a/datasette/views/index.py +++ b/datasette/views/index.py @@ -105,9 +105,11 @@ class IndexView(BaseView): { "name": name, "hash": db.hash, - "color": db.hash[:6] - if db.hash - else hashlib.md5(name.encode("utf8")).hexdigest()[:6], + "color": ( + db.hash[:6] + if db.hash + else hashlib.md5(name.encode("utf8")).hexdigest()[:6] + ), "path": self.ds.urls.database(name), "tables_and_views_truncated": tables_and_views_truncated, "tables_and_views_more": (len(visible_tables) + len(views)) diff --git a/datasette/views/row.py b/datasette/views/row.py index cdbf0990..4d317470 100644 --- a/datasette/views/row.py +++ b/datasette/views/row.py @@ -19,7 +19,7 @@ class RowView(DataView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database = db.name # Ensure user has permission to view this row @@ -38,14 +38,14 @@ class RowView(DataView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database = db.name sql, params, pks = await _sql_params_pks(db, table, pk_values) results = await db.execute(sql, params, truncate=True) columns = [r[0] for r in results.description] rows = list(results.rows) if not rows: - raise NotFound(f"Record not found: {pk_values}") + raise NotFound(f"Record not found") async def template_data(): display_columns, display_rows = await display_columns_and_rows( diff --git a/datasette/views/table.py b/datasette/views/table.py index e80ed217..17d1b248 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -96,7 +96,7 @@ class TableView(DataView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database_name = db.name table_name = tilde_decode(request.url_vars["table"]) # Handle POST to a canned query @@ -169,23 +169,17 @@ class TableView(DataView): try: db = self.ds.get_database(route=database_route) except KeyError: - raise NotFound("Database not found: {}".format(database_route)) + raise NotFound("Database not found") database_name = db.name - # For performance profiling purposes, ?_noparallel=1 turns off asyncio.gather - async def _gather_parallel(*args): - return await asyncio.gather(*args) - - async def _gather_sequential(*args): + # We always now run queries sequentially, rather than with asyncio.gather() - + # see https://github.com/simonw/datasette/issues/2189 + async def gather(*args): results = [] for fn in args: results.append(await fn) return results - gather = ( - _gather_sequential if request.args.get("_noparallel") else _gather_parallel - ) - # If this is a canned query, not a table, then dispatch to QueryView instead canned_query = await self.ds.get_canned_query( database_name, table_name, request.actor @@ -210,7 +204,7 @@ class TableView(DataView): # If table or view not found, return 404 if not is_view and not table_exists: - raise NotFound(f"Table not found: {table_name}") + raise NotFound(f"Table not found") # Ensure user has permission to view this table visible, private = await self.ds.check_visibility( @@ -345,9 +339,11 @@ class TableView(DataView): from_sql = "from {table_name} {where}".format( table_name=escape_sqlite(table_name), - where=("where {} ".format(" and ".join(where_clauses))) - if where_clauses - else "", + where=( + ("where {} ".format(" and ".join(where_clauses))) + if where_clauses + else "" + ), ) # Copy of params so we can mutate them later: from_sql_params = dict(**params) @@ -412,10 +408,12 @@ class TableView(DataView): column=escape_sqlite(sort or sort_desc), op=">" if sort else "<", p=len(params), - extra_desc_only="" - if sort - else " or {column2} is null".format( - column2=escape_sqlite(sort or sort_desc) + extra_desc_only=( + "" + if sort + else " or {column2} is null".format( + column2=escape_sqlite(sort or sort_desc) + ) ), next_clauses=" and ".join(next_by_pk_clauses), ) @@ -778,9 +776,9 @@ class TableView(DataView): "metadata": metadata, "view_definition": await db.get_view_definition(table_name), "table_definition": await db.get_table_definition(table_name), - "datasette_allow_facet": "true" - if self.ds.setting("allow_facet") - else "false", + "datasette_allow_facet": ( + "true" if self.ds.setting("allow_facet") else "false" + ), } d.update(extra_context_from_filters) return d @@ -939,9 +937,11 @@ async def display_columns_and_rows( path_from_row_pks(row, pks, not pks), column, ), - ' title="{}"'.format(formatted) - if "bytes" not in formatted - else "", + ( + ' title="{}"'.format(formatted) + if "bytes" not in formatted + else "" + ), len(value), "" if len(value) == 1 else "s", ) @@ -992,9 +992,9 @@ async def display_columns_and_rows( "column": column, "value": display_value, "raw": value, - "value_type": "none" - if value is None - else str(type(value).__name__), + "value_type": ( + "none" if value is None else str(type(value).__name__) + ), } ) cell_rows.append(Row(cells)) diff --git a/docs/authentication.rst b/docs/authentication.rst index 685dab15..37703307 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -307,7 +307,21 @@ To limit access to the ``add_name`` canned query in your ``dogs.db`` database to Controlling the ability to execute arbitrary SQL ------------------------------------------------ -The ``"allow_sql"`` block can be used to control who is allowed to execute arbitrary SQL queries, both using the form on the database page e.g. https://latest.datasette.io/fixtures or by appending a ``?_where=`` parameter to the table page as seen on https://latest.datasette.io/fixtures/facetable?_where=city_id=1. +Datasette defaults to allowing any site visitor to execute their own custom SQL queries, for example using the form on `the database page `__ or by appending a ``?_where=`` parameter to the table page `like this `__. + +Access to this ability is controlled by the :ref:`permissions_execute_sql` permission. + +The easiest way to disable arbitrary SQL queries is using the :ref:`default_allow_sql setting ` when you first start Datasette running. + +You can alternatively use an ``"allow_sql"`` block to control who is allowed to execute arbitrary SQL queries. + +To prevent any user from executing arbitrary SQL queries, use this: + +.. code-block:: json + + { + "allow_sql": false + } To enable just the :ref:`root user` to execute SQL for all databases in your instance, use the following: @@ -515,7 +529,7 @@ Actor is allowed to run arbitrary SQL queries against a specific database, e.g. ``resource`` - string The name of the database -Default *allow*. +Default *allow*. See also :ref:`the default_allow_sql setting `. .. _permissions_permissions_debug: diff --git a/docs/changelog.rst b/docs/changelog.rst index 0e0393ef..d3772e73 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,91 @@ Changelog ========= +.. _v0_64_8: + +0.64.8 (2023-06-21) +------------------- + +- Security improvement: 404 pages used to reflect content from the URL path, which could be used to display misleading information to Datasette users. 404 errors no longer display additional information from the URL. (:issue:`2359`) +- Backported a better fix for correctly extracting named parameters from canned query SQL against SQLite 3.46.0. (:issue:`2353`) + +.. _v0_64_7: + +0.64.7 (2023-06-12) +------------------- + +- Fixed a bug where canned queries with named parameters threw an error when run against SQLite 3.46.0. (:issue:`2353`) + +.. _v0_64_6: + +0.64.6 (2023-12-22) +------------------- + +- Fixed a bug where CSV export with expanded labels could fail if a foreign key reference did not correctly resolve. (:issue:`2214`) + +.. _v0_64_5: + +0.64.5 (2023-10-08) +------------------- + +- Dropped dependency on ``click-default-group-wheel``, which could cause a dependency conflict. (:issue:`2197`) + +.. _v0_64_4: + +0.64.4 (2023-09-21) +------------------- + +- Fix for a crashing bug caused by viewing the table page for a named in-memory database. (:issue:`2189`) + +.. _v0_64_3: + +0.64.3 (2023-04-27) +------------------- + +- Added ``pip`` and ``setuptools`` as explicit dependencies. This fixes a bug where Datasette could not be installed using `Rye `__. (:issue:`2065`) + +.. _v0_64_2: + +0.64.2 (2023-03-08) +------------------- + +- Fixed a bug with ``datasette publish cloudrun`` where deploys all used the same Docker image tag. This was mostly inconsequential as the service is deployed as soon as the image has been pushed to the registry, but could result in the incorrect image being deployed if two different deploys for two separate services ran at exactly the same time. (:issue:`2036`) + +.. _v0_64_1: + +0.64.1 (2023-01-11) +------------------- + +- Documentation now links to a current source of information for installing Python 3. (:issue:`1987`) +- Incorrectly calling the Datasette constructor using ``Datasette("path/to/data.db")`` instead of ``Datasette(["path/to/data.db"])`` now returns a useful error message. (:issue:`1985`) + +.. _v0_64: + +0.64 (2023-01-09) +----------------- + +- Datasette now **strongly recommends against allowing arbitrary SQL queries if you are using SpatiaLite**. SpatiaLite includes SQL functions that could cause the Datasette server to crash. See :ref:`spatialite` for more details. +- New :ref:`setting_default_allow_sql` setting, providing an easier way to disable all arbitrary SQL execution by end users: ``datasette --setting default_allow_sql off``. See also :ref:`authentication_permissions_execute_sql`. (:issue:`1409`) +- `Building a location to time zone API with SpatiaLite `__ is a new Datasette tutorial showing how to safely use SpatiaLite to create a location to time zone API. +- New documentation about :ref:`how to debug problems loading SQLite extensions `. The error message shown when an extension cannot be loaded has also been improved. (:issue:`1979`) +- Fixed an accessibility issue: the ``