Upgrade to latest Black, closes #2239

This commit is contained in:
Simon Willison 2024-01-30 19:55:26 -08:00
commit 5c64af6936
16 changed files with 93 additions and 66 deletions

View file

@ -80,9 +80,9 @@ def search_filters(request, database, table, datasette):
"{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format( "{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format(
fts_table=escape_sqlite(fts_table), fts_table=escape_sqlite(fts_table),
fts_pk=escape_sqlite(fts_pk), fts_pk=escape_sqlite(fts_pk),
match_clause=":search" match_clause=(
if search_mode_raw ":search" if search_mode_raw else "escape_fts(:search)"
else "escape_fts(:search)", ),
) )
) )
human_descriptions.append(f'search matches "{search}"') human_descriptions.append(f'search matches "{search}"')
@ -99,9 +99,11 @@ def search_filters(request, database, table, datasette):
"rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format( "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format(
fts_table=escape_sqlite(fts_table), fts_table=escape_sqlite(fts_table),
search_col=escape_sqlite(search_col), search_col=escape_sqlite(search_col),
match_clause=":search_{}".format(i) match_clause=(
if search_mode_raw ":search_{}".format(i)
else "escape_fts(:search_{})".format(i), if search_mode_raw
else "escape_fts(:search_{})".format(i)
),
) )
) )
human_descriptions.append( human_descriptions.append(

View file

@ -402,9 +402,9 @@ def make_dockerfile(
apt_get_extras = apt_get_extras_ apt_get_extras = apt_get_extras_
if spatialite: if spatialite:
apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"]) apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"])
environment_variables[ environment_variables["SQLITE_EXTENSIONS"] = (
"SQLITE_EXTENSIONS" "/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so" )
return """ return """
FROM python:3.11.0-slim-bullseye FROM python:3.11.0-slim-bullseye
COPY . /app COPY . /app
@ -416,9 +416,11 @@ RUN datasette inspect {files} --inspect-file inspect-data.json
ENV PORT {port} ENV PORT {port}
EXPOSE {port} EXPOSE {port}
CMD {cmd}""".format( CMD {cmd}""".format(
apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras)) apt_get_extras=(
if apt_get_extras APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
else "", if apt_get_extras
else ""
),
environment_variables="\n".join( environment_variables="\n".join(
[ [
"ENV {} '{}'".format(key, value) "ENV {} '{}'".format(key, value)

View file

@ -4,6 +4,7 @@ Backported from Python 3.8.
This code is licensed under the Python License: This code is licensed under the Python License:
https://github.com/python/cpython/blob/v3.8.3/LICENSE https://github.com/python/cpython/blob/v3.8.3/LICENSE
""" """
import os import os
from shutil import copy, copy2, copystat, Error from shutil import copy, copy2, copystat, Error

View file

@ -126,9 +126,9 @@ class DatabaseView(View):
"views": sql_views, "views": sql_views,
"queries": canned_queries, "queries": canned_queries,
"allow_execute_sql": allow_execute_sql, "allow_execute_sql": allow_execute_sql,
"table_columns": await _table_columns(datasette, database) "table_columns": (
if allow_execute_sql await _table_columns(datasette, database) if allow_execute_sql else {}
else {}, ),
} }
if format_ == "json": if format_ == "json":
@ -719,9 +719,11 @@ class QueryView(View):
display_rows=await display_rows( display_rows=await display_rows(
datasette, database, request, rows, columns datasette, database, request, rows, columns
), ),
table_columns=await _table_columns(datasette, database) table_columns=(
if allow_execute_sql await _table_columns(datasette, database)
else {}, if allow_execute_sql
else {}
),
columns=columns, columns=columns,
renderers=renderers, renderers=renderers,
url_csv=datasette.urls.path( url_csv=datasette.urls.path(
@ -1036,9 +1038,11 @@ async def display_rows(datasette, database, request, rows, columns):
display_value = markupsafe.Markup( display_value = markupsafe.Markup(
'<a class="blob-download" href="{}"{}>&lt;Binary:&nbsp;{:,}&nbsp;byte{}&gt;</a>'.format( '<a class="blob-download" href="{}"{}>&lt;Binary:&nbsp;{:,}&nbsp;byte{}&gt;</a>'.format(
blob_url, blob_url,
' title="{}"'.format(formatted) (
if "bytes" not in formatted ' title="{}"'.format(formatted)
else "", if "bytes" not in formatted
else ""
),
len(value), len(value),
"" if len(value) == 1 else "s", "" if len(value) == 1 else "s",
) )

View file

@ -236,9 +236,11 @@ async def display_columns_and_rows(
path_from_row_pks(row, pks, not pks), path_from_row_pks(row, pks, not pks),
column, column,
), ),
' title="{}"'.format(formatted) (
if "bytes" not in formatted ' title="{}"'.format(formatted)
else "", if "bytes" not in formatted
else ""
),
len(value), len(value),
"" if len(value) == 1 else "s", "" if len(value) == 1 else "s",
) )
@ -289,9 +291,9 @@ async def display_columns_and_rows(
"column": column, "column": column,
"value": display_value, "value": display_value,
"raw": value, "raw": value,
"value_type": "none" "value_type": (
if value is None "none" if value is None else str(type(value).__name__)
else str(type(value).__name__), ),
} }
) )
cell_rows.append(Row(cells)) cell_rows.append(Row(cells))
@ -974,9 +976,9 @@ async def table_view_data(
from_sql = "from {table_name} {where}".format( from_sql = "from {table_name} {where}".format(
table_name=escape_sqlite(table_name), table_name=escape_sqlite(table_name),
where=("where {} ".format(" and ".join(where_clauses))) where=(
if where_clauses ("where {} ".format(" and ".join(where_clauses))) if where_clauses else ""
else "", ),
) )
# Copy of params so we can mutate them later: # Copy of params so we can mutate them later:
from_sql_params = dict(**params) from_sql_params = dict(**params)
@ -1040,10 +1042,12 @@ async def table_view_data(
column=escape_sqlite(sort or sort_desc), column=escape_sqlite(sort or sort_desc),
op=">" if sort else "<", op=">" if sort else "<",
p=len(params), p=len(params),
extra_desc_only="" extra_desc_only=(
if sort ""
else " or {column2} is null".format( if sort
column2=escape_sqlite(sort or sort_desc) else " or {column2} is null".format(
column2=escape_sqlite(sort or sort_desc)
)
), ),
next_clauses=" and ".join(next_by_pk_clauses), next_clauses=" and ".join(next_by_pk_clauses),
) )

View file

@ -85,7 +85,7 @@ setup(
"pytest-xdist>=2.2.1", "pytest-xdist>=2.2.1",
"pytest-asyncio>=0.17", "pytest-asyncio>=0.17",
"beautifulsoup4>=4.8.1", "beautifulsoup4>=4.8.1",
"black==23.9.1", "black==24.1.1",
"blacken-docs==1.16.0", "blacken-docs==1.16.0",
"pytest-timeout>=1.4.2", "pytest-timeout>=1.4.2",
"trustme>=0.7", "trustme>=0.7",

View file

@ -39,9 +39,9 @@ def extra_css_urls(template, database, table, view_name, columns, request, datas
"database": database, "database": database,
"table": table, "table": table,
"view_name": view_name, "view_name": view_name,
"request_path": request.path "request_path": (
if request is not None request.path if request is not None else None
else None, ),
"added": ( "added": (
await datasette.get_database().execute("select 3 * 5") await datasette.get_database().execute("select 3 * 5")
).first()[0], ).first()[0],

View file

@ -279,9 +279,11 @@ async def test_insert_or_upsert_row_errors(
json=input, json=input,
headers={ headers={
"Authorization": "Bearer {}".format(token), "Authorization": "Bearer {}".format(token),
"Content-Type": "text/plain" "Content-Type": (
if special_case == "invalid_content_type" "text/plain"
else "application/json", if special_case == "invalid_content_type"
else "application/json"
),
}, },
) )

View file

@ -335,9 +335,11 @@ def test_serve_create(tmpdir):
def test_serve_config(tmpdir, argument, format_): def test_serve_config(tmpdir, argument, format_):
config_path = tmpdir / "datasette.{}".format(format_) config_path = tmpdir / "datasette.{}".format(format_)
config_path.write_text( config_path.write_text(
"settings:\n default_page_size: 5\n" (
if format_ == "yaml" "settings:\n default_page_size: 5\n"
else '{"settings": {"default_page_size": 5}}', if format_ == "yaml"
else '{"settings": {"default_page_size": 5}}'
),
"utf-8", "utf-8",
) )
runner = CliRunner() runner = CliRunner()

View file

@ -1,6 +1,7 @@
""" """
Tests to ensure certain things are documented. Tests to ensure certain things are documented.
""" """
from datasette import app, utils from datasette import app, utils
from datasette.app import Datasette from datasette.app import Datasette
from datasette.filters import Filters from datasette.filters import Filters

View file

@ -1,6 +1,7 @@
""" """
Tests for the datasette.database.Database class Tests for the datasette.database.Database class
""" """
from datasette.app import Datasette from datasette.app import Datasette
from datasette.database import Database, Results, MultipleValues from datasette.database import Database, Results, MultipleValues
from datasette.utils.sqlite import sqlite3 from datasette.utils.sqlite import sqlite3

View file

@ -1,6 +1,7 @@
""" """
Tests for the datasette.app.Datasette class Tests for the datasette.app.Datasette class
""" """
import dataclasses import dataclasses
from datasette import Forbidden, Context from datasette import Forbidden, Context
from datasette.app import Datasette, Database from datasette.app import Datasette, Database

View file

@ -381,9 +381,11 @@ async def test_permissions_debug(ds_client):
{ {
"action": div.select_one(".check-action").text, "action": div.select_one(".check-action").text,
# True = green tick, False = red cross, None = gray None # True = green tick, False = red cross, None = gray None
"result": None "result": (
if div.select(".check-result-no-opinion") None
else bool(div.select(".check-result-true")), if div.select(".check-result-no-opinion")
else bool(div.select(".check-result-true"))
),
"used_default": bool(div.select(".check-used-default")), "used_default": bool(div.select(".check-used-default")),
} }
for div in check_divs for div in check_divs

View file

@ -1096,24 +1096,26 @@ async def test_hook_filters_from_request(ds_client):
@pytest.mark.parametrize("extra_metadata", (False, True)) @pytest.mark.parametrize("extra_metadata", (False, True))
async def test_hook_register_permissions(extra_metadata): async def test_hook_register_permissions(extra_metadata):
ds = Datasette( ds = Datasette(
config={ config=(
"plugins": { {
"datasette-register-permissions": { "plugins": {
"permissions": [ "datasette-register-permissions": {
{ "permissions": [
"name": "extra-from-metadata", {
"abbr": "efm", "name": "extra-from-metadata",
"description": "Extra from metadata", "abbr": "efm",
"takes_database": False, "description": "Extra from metadata",
"takes_resource": False, "takes_database": False,
"default": True, "takes_resource": False,
} "default": True,
] }
]
}
} }
} }
} if extra_metadata
if extra_metadata else None
else None, ),
plugins_dir=PLUGINS_DIR, plugins_dir=PLUGINS_DIR,
) )
await ds.invoke_startup() await ds.invoke_startup()

View file

@ -305,9 +305,11 @@ async def test_paginate_compound_keys_with_extra_filters(ds_client):
"_sort_desc=sortable_with_nulls", "_sort_desc=sortable_with_nulls",
lambda row: ( lambda row: (
1 if row["sortable_with_nulls"] is None else 0, 1 if row["sortable_with_nulls"] is None else 0,
-row["sortable_with_nulls"] (
if row["sortable_with_nulls"] is not None -row["sortable_with_nulls"]
else 0, if row["sortable_with_nulls"] is not None
else 0
),
row["content"], row["content"],
), ),
"sorted by sortable_with_nulls descending", "sorted by sortable_with_nulls descending",

View file

@ -1,6 +1,7 @@
""" """
Tests for various datasette helper functions. Tests for various datasette helper functions.
""" """
from datasette.app import Datasette from datasette.app import Datasette
from datasette import utils from datasette import utils
from datasette.utils.asgi import Request from datasette.utils.asgi import Request