Compare commits

...

29 commits

Author SHA1 Message Date
Simon Willison
ee24ea9452 Add pip as a dependency too, for Rye - refs #2065 2023-07-26 07:10:27 -07:00
Simon Willison
e17a1373b3 Add setuptools to dependencies
Refs #2065
2023-07-26 07:10:27 -07:00
Simon Willison
e8ac498e24 Work in progress on query view, refs #2049 2023-07-26 07:10:24 -07:00
Simon Willison
d7aa14b17f Homepage test now just asserts isinstance(x, int) - closes #2092 2023-07-26 06:51:25 -07:00
Simon Willison
b5647ebd53 Fix all E741 Ambiguous variable name warnings, refs #2090 2023-07-26 06:51:25 -07:00
Simon Willison
737a1a7fd2 Fixed spelling error, refs #2089
Also ensure codespell runs as part of just lint
2023-07-26 06:51:25 -07:00
Simon Willison
c0c764727f Justfile I use for local development
Now with codespell, refs #2089
2023-07-26 06:51:25 -07:00
Simon Willison
71491551e0 codespell>=2.5.5, also spellcheck README - refs #2089 2023-07-26 06:51:25 -07:00
dependabot[bot]
a88cd45ae5 Bump blacken-docs from 1.13.0 to 1.14.0 (#2083)
Bumps [blacken-docs](https://github.com/asottile/blacken-docs) from 1.13.0 to 1.14.0.
- [Changelog](https://github.com/adamchainz/blacken-docs/blob/main/CHANGELOG.rst)
- [Commits](https://github.com/asottile/blacken-docs/compare/1.13.0...1.14.0)

---
updated-dependencies:
- dependency-name: blacken-docs
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-26 06:51:25 -07:00
Simon Willison
ba7bc2ab0f Better docs for startup() hook 2023-07-26 06:51:25 -07:00
Simon Willison
007294008d
Merge branch 'main' into json-extras-query 2023-05-25 17:25:56 -07:00
Simon Willison
8ea00e038d New View base class (#2080)
* New View base class, closes #2078
* Use new View subclass for PatternPortfolioView
2023-05-25 17:23:53 -07:00
Simon Willison
94882aa72b --cors Access-Control-Max-Age: 3600, closes #2079 2023-05-25 17:23:53 -07:00
Simon Willison
0805771061 Rename callable.py to check_callable.py, refs #2078 2023-05-25 17:23:53 -07:00
Simon Willison
3f39fba7ea datasette.utils.check_callable(obj) - refs #2078 2023-05-25 17:23:53 -07:00
Simon Willison
59c52b5874 Action: Deploy a Datasette branch preview to Vercel
Closes #2070
2023-05-25 17:23:53 -07:00
Simon Willison
01353c7ee8 Build docs with 3.11 on ReadTheDocs
Inspired by https://github.com/simonw/sqlite-utils/issues/540
2023-05-25 17:23:53 -07:00
Simon Willison
305655c816 Add pip as a dependency too, for Rye - refs #2065 2023-05-25 17:23:53 -07:00
Simon Willison
3a7be0c5b1 Hopeful fix for Python 3.7 httpx failure, refs #2066 2023-05-25 17:23:53 -07:00
Simon Willison
bbbfdb034c Add setuptools to dependencies
Refs #2065
2023-05-25 17:23:52 -07:00
Simon Willison
6ae5312158 ?sql=... now displays HTML 2023-05-22 18:44:07 -07:00
Simon Willison
fdb141f622 shape_arrayfirst for query view 2023-05-08 17:52:22 -07:00
Simon Willison
6f903d5a98 Fixed a test 2023-05-08 17:51:19 -07:00
Simon Willison
3304fd43a2 refresh_schemas() on database view 2023-05-08 17:51:05 -07:00
Simon Willison
8b86fb7fb4 Better debugging 2023-05-08 17:50:12 -07:00
Simon Willison
a706f34b92 Remove debug lines 2023-04-26 22:07:05 -07:00
Simon Willison
026429fadd Work in progress on query view, refs #2049 2023-04-26 20:47:03 -07:00
Simon Willison
40dc5f5c50 WIP 2023-04-12 17:04:26 -07:00
Simon Willison
7b41521b33 WIP new JSON for queries, refs #2049 2023-04-05 16:25:29 -07:00
13 changed files with 971 additions and 30 deletions

View file

@ -26,5 +26,6 @@ jobs:
pip install -e '.[docs]'
- name: Check spelling
run: |
codespell README.md --ignore-words docs/codespell-ignore-words.txt
codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt

41
Justfile Normal file
View file

@ -0,0 +1,41 @@
export DATASETTE_SECRET := "not_a_secret"
# Run tests and linters
@default: test lint
# Setup project
@init:
pipenv run pip install -e '.[test,docs]'
# Run pytest with supplied options
@test *options:
pipenv run pytest {{options}}
@codespell:
pipenv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
pipenv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
pipenv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
# Run linters: black, flake8, mypy, cog
@lint: codespell
pipenv run black . --check
pipenv run flake8
pipenv run cog --check README.md docs/*.rst
# Rebuild docs with cog
@cog:
pipenv run cog -r README.md docs/*.rst
# Serve live docs on localhost:8000
@docs: cog
pipenv run blacken-docs -l 60 docs/*.rst
cd docs && pipenv run make livehtml
# Apply Black
@black:
pipenv run black .
@serve:
pipenv run sqlite-utils create-database data.db
pipenv run sqlite-utils create-table data.db docs id integer title text --pk id --ignore
pipenv run python -m datasette data.db --root --reload

View file

@ -34,7 +34,12 @@ from jinja2.environment import Template
from jinja2.exceptions import TemplateNotFound
from .views.base import ureg
from .views.database import DatabaseDownload, DatabaseView, TableCreateView
from .views.database import (
DatabaseDownload,
DatabaseView,
TableCreateView,
database_view,
)
from .views.index import IndexView
from .views.special import (
JsonDataView,
@ -1366,8 +1371,12 @@ class Datasette:
r"/-/patterns$",
)
add_route(DatabaseDownload.as_view(self), r"/(?P<database>[^\/\.]+)\.db$")
# add_route(
# DatabaseView.as_view(self), r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$"
# )
add_route(
DatabaseView.as_view(self), r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$"
wrap_view(database_view, self),
r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$",
)
add_route(TableCreateView.as_view(self), r"/(?P<database>[^\/\.]+)/-/create$")
add_route(

View file

@ -16,6 +16,9 @@ class TestResponse:
def status(self):
return self.httpx_response.status_code
def __repr__(self):
return "<TestResponse {} [{}]>".format(self.httpx_response.url, self.status)
# Supports both for test-writing convenience
@property
def status_code(self):

View file

@ -1,3 +1,4 @@
from asyncinject import Registry
import os
import hashlib
import itertools
@ -11,9 +12,12 @@ import markupsafe
from datasette.utils import (
add_cors_headers,
append_querystring,
await_me_maybe,
call_with_supported_arguments,
derive_named_parameters,
format_bytes,
path_with_replaced_args,
tilde_decode,
to_css_class,
validate_sql_select,
@ -757,3 +761,887 @@ async def _table_columns(datasette, database_name):
for view_name in await db.view_names():
table_columns[view_name] = []
return table_columns
async def database_view(request, datasette):
return await database_view_impl(request, datasette)
async def database_index_view(request, datasette, db):
database = db.name
visible, private = await datasette.check_visibility(
request.actor,
permissions=[
("view-database", database),
"view-instance",
],
)
if not visible:
raise Forbidden("You do not have permission to view this database")
metadata = (datasette.metadata("databases") or {}).get(database, {})
datasette.update_with_inherited_metadata(metadata)
table_counts = await db.table_counts(5)
hidden_table_names = set(await db.hidden_table_names())
all_foreign_keys = await db.get_all_foreign_keys()
views = []
for view_name in await db.view_names():
view_visible, view_private = await datasette.check_visibility(
request.actor,
permissions=[
("view-table", (database, view_name)),
("view-database", database),
"view-instance",
],
)
if view_visible:
views.append(
{
"name": view_name,
"private": view_private,
}
)
tables = []
for table in table_counts:
table_visible, table_private = await datasette.check_visibility(
request.actor,
permissions=[
("view-table", (database, table)),
("view-database", database),
"view-instance",
],
)
if not table_visible:
continue
table_columns = await db.table_columns(table)
tables.append(
{
"name": table,
"columns": table_columns,
"primary_keys": await db.primary_keys(table),
"count": table_counts[table],
"hidden": table in hidden_table_names,
"fts_table": await db.fts_table(table),
"foreign_keys": all_foreign_keys[table],
"private": table_private,
}
)
tables.sort(key=lambda t: (t["hidden"], t["name"]))
canned_queries = []
for query in (await datasette.get_canned_queries(database, request.actor)).values():
query_visible, query_private = await datasette.check_visibility(
request.actor,
permissions=[
("view-query", (database, query["name"])),
("view-database", database),
"view-instance",
],
)
if query_visible:
canned_queries.append(dict(query, private=query_private))
async def database_actions():
links = []
for hook in pm.hook.database_actions(
datasette=datasette,
database=database,
actor=request.actor,
request=request,
):
extra_links = await await_me_maybe(hook)
if extra_links:
links.extend(extra_links)
return links
attached_databases = [d.name for d in await db.attached_databases()]
allow_execute_sql = await datasette.permission_allowed(
request.actor, "execute-sql", database
)
return Response.json(
{
"database": db.name,
"private": private,
"path": datasette.urls.database(database),
"size": db.size,
"tables": tables,
"hidden_count": len([t for t in tables if t["hidden"]]),
"views": views,
"queries": canned_queries,
"allow_execute_sql": allow_execute_sql,
"table_columns": await _table_columns(datasette, database)
if allow_execute_sql
else {},
}
)
async def query_view(
request,
datasette,
canned_query=None,
_size=None,
named_parameters=None,
write=False,
):
print("query_view")
db = await datasette.resolve_database(request)
database = db.name
# TODO: Why do I do this? Is it to eliminate multi-args?
# It's going to break ?_extra=...&_extra=...
params = {key: request.args.get(key) for key in request.args}
sql = ""
if "sql" in params:
sql = params.pop("sql")
# TODO: Behave differently for canned query here:
await datasette.ensure_permissions(request.actor, [("execute-sql", database)])
_shape = None
if "_shape" in params:
_shape = params.pop("_shape")
# ?_shape=arrays - "rows" is the default option, shown above
# ?_shape=objects - "rows" is a list of JSON key/value objects
# ?_shape=array - an JSON array of objects
# ?_shape=array&_nl=on - a newline-separated list of JSON objects
# ?_shape=arrayfirst - a flat JSON array containing just the first value from each row
# ?_shape=object - a JSON object keyed using the primary keys of the rows
async def _results(_sql, _params):
# Returns (results, error (can be None))
try:
return await db.execute(_sql, _params, truncate=True), None
except Exception as e:
return None, e
async def shape_arrays(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return {
"ok": True,
"rows": [list(r) for r in results.rows],
"truncated": results.truncated,
}
async def shape_objects(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return {
"ok": True,
"rows": [dict(r) for r in results.rows],
"truncated": results.truncated,
}
async def shape_array(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return [dict(r) for r in results.rows]
shape_fn = {
"arrays": shape_arrays,
"objects": shape_objects,
"array": shape_array,
# "arrayfirst": shape_arrayfirst,
# "object": shape_object,
}[_shape or "objects"]
registry = Registry.from_dict(
{
"_results": _results,
"_shape": shape_fn,
},
parallel=False,
)
results = await registry.resolve_multi(
["_shape"],
results={
"_sql": sql,
"_params": params,
},
)
# If "shape" does not include "rows" we return that as the response
# because it's likely [{...}] or similar, with no room to attach extras
if "rows" not in results["_shape"]:
return Response.json(results["_shape"])
output = results["_shape"]
# Include the extras:
output.update(dict((k, v) for k, v in results.items() if not k.startswith("_")))
return Response.json(output)
async def database_view_impl(
request,
datasette,
canned_query=None,
_size=None,
named_parameters=None,
write=False,
):
db = await datasette.resolve_database(request)
format_ = request.url_vars.get("format") or "html"
force_shape = None
if format_ == "html":
force_shape = "arrays"
data = await query_view_data(
request,
datasette,
canned_query=canned_query,
_size=_size,
named_parameters=named_parameters,
write=write,
force_shape=force_shape,
)
if format_ == "csv":
raise NotImplementedError("CSV format not yet implemented")
elif format_ in datasette.renderers.keys():
# Dispatch request to the correct output format renderer
# (CSV is not handled here due to streaming)
result = call_with_supported_arguments(
datasette.renderers[format_][0],
datasette=datasette,
columns=columns,
rows=rows,
sql=sql,
query_name=None,
database=db.name,
table=None,
request=request,
view_name="table", # TODO: should this be "query"?
# These will be deprecated in Datasette 1.0:
args=request.args,
data={
"rows": rows,
}, # TODO what should this be?
)
result = await await_me_maybe(result)
if result is None:
raise NotFound("No data")
if isinstance(result, dict):
r = Response(
body=result.get("body"),
status=result.get("status_code") or 200,
content_type=result.get("content_type", "text/plain"),
headers=result.get("headers"),
)
elif isinstance(result, Response):
r = result
# if status_code is not None:
# # Over-ride the status code
# r.status = status_code
else:
assert False, f"{result} should be dict or Response"
elif format_ == "html":
headers = {}
templates = [f"query-{to_css_class(db.name)}.html", "query.html"]
template = datasette.jinja_env.select_template(templates)
alternate_url_json = datasette.absolute_url(
request,
datasette.urls.path(path_with_format(request=request, format="json")),
)
headers.update(
{
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
alternate_url_json
)
}
)
metadata = (datasette.metadata("databases") or {}).get(db.name, {})
datasette.update_with_inherited_metadata(metadata)
r = Response.html(
await datasette.render_template(
template,
dict(
data,
database=db.name,
database_color=lambda database: "ff0000",
metadata=metadata,
display_rows=data["rows"],
renderers={},
query={
"sql": request.args.get("sql"),
},
editable=True,
append_querystring=append_querystring,
path_with_replaced_args=path_with_replaced_args,
fix_path=datasette.urls.path,
settings=datasette.settings_dict(),
# TODO: review up all of these hacks:
alternate_url_json=alternate_url_json,
datasette_allow_facet=(
"true" if datasette.setting("allow_facet") else "false"
),
is_sortable=False,
allow_execute_sql=await datasette.permission_allowed(
request.actor, "execute-sql", db.name
),
query_ms=1.2,
select_templates=[
f"{'*' if template_name == template.name else ''}{template_name}"
for template_name in templates
],
),
request=request,
view_name="table",
),
headers=headers,
)
else:
assert False, "Invalid format: {}".format(format_)
# if next_url:
# r.headers["link"] = f'<{next_url}>; rel="next"'
return r
response = Response.json(data)
if isinstance(data, dict) and data.get("ok") is False:
# TODO: Other error codes?
response.status_code = 400
if datasette.cors:
add_cors_headers(response.headers)
return response
async def query_view_data(
request,
datasette,
canned_query=None,
_size=None,
named_parameters=None,
write=False,
force_shape=None,
):
db = await datasette.resolve_database(request)
database = db.name
# TODO: Why do I do this? Is it to eliminate multi-args?
# It's going to break ?_extra=...&_extra=...
if request.args.get("sql", "").strip():
return await query_view(
request, datasette, canned_query, _size, named_parameters, write
)
# Index page shows the tables/views/canned queries for this database
params = {key: request.args.get(key) for key in request.args}
sql = ""
if "sql" in params:
sql = params.pop("sql")
# TODO: Behave differently for canned query here:
await datasette.ensure_permissions(request.actor, [("execute-sql", database)])
_shape = force_shape
if "_shape" in params:
_shape = params.pop("_shape")
# ?_shape=arrays
# ?_shape=objects - "rows" is a list of JSON key/value objects
# ?_shape=array - an JSON array of objects
# ?_shape=array&_nl=on - a newline-separated list of JSON objects
# ?_shape=arrayfirst - a flat JSON array containing just the first value from each row
# ?_shape=object - a JSON object keyed using the primary keys of the rows
async def _results(_sql, _params):
# Returns (results, error (can be None))
try:
return await db.execute(_sql, _params, truncate=True), None
except Exception as e:
return None, e
async def shape_arrays(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return {
"ok": True,
"columns": [r[0] for r in results.description],
"rows": [list(r) for r in results.rows],
"truncated": results.truncated,
}
async def shape_objects(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return {
"ok": True,
"rows": [dict(r) for r in results.rows],
"truncated": results.truncated,
}
async def shape_array(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return [dict(r) for r in results.rows]
async def shape_arrayfirst(_results):
results, error = _results
if error:
return {"ok": False, "error": str(error)}
return [r[0] for r in results.rows]
shape_fn = {
"arrays": shape_arrays,
"objects": shape_objects,
"array": shape_array,
"arrayfirst": shape_arrayfirst,
# "object": shape_object,
}[_shape or "objects"]
registry = Registry.from_dict(
{
"_results": _results,
"_shape": shape_fn,
},
parallel=False,
)
results = await registry.resolve_multi(
["_shape"],
results={
"_sql": sql,
"_params": params,
},
)
# If "shape" does not include "rows" we return that as the response
if "rows" not in results["_shape"]:
return Response.json(results["_shape"])
output = results["_shape"]
output.update(dict((k, v) for k, v in results.items() if not k.startswith("_")))
return output
async def database_view_impl(
request,
datasette,
canned_query=None,
_size=None,
named_parameters=None,
write=False,
):
await datasette.refresh_schemas()
db = await datasette.resolve_database(request)
database = db.name
if request.args.get("sql", "").strip():
return await query_view(
request, datasette, canned_query, _size, named_parameters, write
)
# Index page shows the tables/views/canned queries for this database
params = {key: request.args.get(key) for key in request.args}
sql = ""
if "sql" in params:
sql = params.pop("sql")
_shape = None
if "_shape" in params:
_shape = params.pop("_shape")
private = False
if canned_query:
# Respect canned query permissions
visible, private = await datasette.check_visibility(
request.actor,
permissions=[
("view-query", (database, canned_query)),
("view-database", database),
"view-instance",
],
)
if not visible:
raise Forbidden("You do not have permission to view this query")
else:
await datasette.ensure_permissions(request.actor, [("execute-sql", database)])
# If there's no sql, show the database index page
if not sql:
return await database_index_view(request, datasette, db)
validate_sql_select(sql)
# Extract any :named parameters
named_parameters = named_parameters or await derive_named_parameters(db, sql)
named_parameter_values = {
named_parameter: params.get(named_parameter) or ""
for named_parameter in named_parameters
if not named_parameter.startswith("_")
}
# Set to blank string if missing from params
for named_parameter in named_parameters:
if named_parameter not in params and not named_parameter.startswith("_"):
params[named_parameter] = ""
extra_args = {}
if params.get("_timelimit"):
extra_args["custom_time_limit"] = int(params["_timelimit"])
if _size:
extra_args["page_size"] = _size
templates = [f"query-{to_css_class(database)}.html", "query.html"]
if canned_query:
templates.insert(
0,
f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html",
)
query_error = None
# Execute query - as write or as read
if write:
raise NotImplementedError("Write queries not yet implemented")
# if request.method == "POST":
# # If database is immutable, return an error
# if not db.is_mutable:
# raise Forbidden("Database is immutable")
# body = await request.post_body()
# body = body.decode("utf-8").strip()
# if body.startswith("{") and body.endswith("}"):
# params = json.loads(body)
# # But we want key=value strings
# for key, value in params.items():
# params[key] = str(value)
# else:
# params = dict(parse_qsl(body, keep_blank_values=True))
# # Should we return JSON?
# should_return_json = (
# request.headers.get("accept") == "application/json"
# or request.args.get("_json")
# or params.get("_json")
# )
# if canned_query:
# params_for_query = MagicParameters(params, request, self.ds)
# else:
# params_for_query = params
# ok = None
# try:
# cursor = await self.ds.databases[database].execute_write(
# sql, params_for_query
# )
# message = metadata.get(
# "on_success_message"
# ) or "Query executed, {} row{} affected".format(
# cursor.rowcount, "" if cursor.rowcount == 1 else "s"
# )
# message_type = self.ds.INFO
# redirect_url = metadata.get("on_success_redirect")
# ok = True
# except Exception as e:
# message = metadata.get("on_error_message") or str(e)
# message_type = self.ds.ERROR
# redirect_url = metadata.get("on_error_redirect")
# ok = False
# if should_return_json:
# return Response.json(
# {
# "ok": ok,
# "message": message,
# "redirect": redirect_url,
# }
# )
# else:
# self.ds.add_message(request, message, message_type)
# return self.redirect(request, redirect_url or request.path)
# else:
# async def extra_template():
# return {
# "request": request,
# "db_is_immutable": not db.is_mutable,
# "path_with_added_args": path_with_added_args,
# "path_with_removed_args": path_with_removed_args,
# "named_parameter_values": named_parameter_values,
# "canned_query": canned_query,
# "success_message": request.args.get("_success") or "",
# "canned_write": True,
# }
# return (
# {
# "database": database,
# "rows": [],
# "truncated": False,
# "columns": [],
# "query": {"sql": sql, "params": params},
# "private": private,
# },
# extra_template,
# templates,
# )
# Not a write
rows = []
if canned_query:
params_for_query = MagicParameters(params, request, datasette)
else:
params_for_query = params
try:
results = await datasette.execute(
database, sql, params_for_query, truncate=True, **extra_args
)
columns = [r[0] for r in results.description]
rows = list(results.rows)
except sqlite3.DatabaseError as e:
query_error = e
results = None
columns = []
allow_execute_sql = await datasette.permission_allowed(
request.actor, "execute-sql", database
)
format_ = request.url_vars.get("format") or "html"
if format_ == "csv":
raise NotImplementedError("CSV format not yet implemented")
elif format_ in datasette.renderers.keys():
# Dispatch request to the correct output format renderer
# (CSV is not handled here due to streaming)
result = call_with_supported_arguments(
datasette.renderers[format_][0],
datasette=datasette,
columns=columns,
rows=rows,
sql=sql,
query_name=None,
database=db.name,
table=None,
request=request,
view_name="table", # TODO: should this be "query"?
# These will be deprecated in Datasette 1.0:
args=request.args,
data={
"rows": rows,
}, # TODO what should this be?
)
result = await await_me_maybe(result)
if result is None:
raise NotFound("No data")
if isinstance(result, dict):
r = Response(
body=result.get("body"),
status=result.get("status_code") or 200,
content_type=result.get("content_type", "text/plain"),
headers=result.get("headers"),
)
elif isinstance(result, Response):
r = result
# if status_code is not None:
# # Over-ride the status code
# r.status = status_code
else:
assert False, f"{result} should be dict or Response"
elif format_ == "html":
headers = {}
templates = [f"query-{to_css_class(database)}.html", "query.html"]
template = datasette.jinja_env.select_template(templates)
alternate_url_json = datasette.absolute_url(
request,
datasette.urls.path(path_with_format(request=request, format="json")),
)
headers.update(
{
"Link": '{}; rel="alternate"; type="application/json+datasette"'.format(
alternate_url_json
)
}
)
r = Response.html(
await datasette.render_template(
template,
dict(
data,
append_querystring=append_querystring,
path_with_replaced_args=path_with_replaced_args,
fix_path=datasette.urls.path,
settings=datasette.settings_dict(),
# TODO: review up all of these hacks:
alternate_url_json=alternate_url_json,
datasette_allow_facet=(
"true" if datasette.setting("allow_facet") else "false"
),
is_sortable=any(c["sortable"] for c in data["display_columns"]),
allow_execute_sql=await datasette.permission_allowed(
request.actor, "execute-sql", resolved.db.name
),
query_ms=1.2,
select_templates=[
f"{'*' if template_name == template.name else ''}{template_name}"
for template_name in templates
],
),
request=request,
view_name="table",
),
headers=headers,
)
else:
assert False, "Invalid format: {}".format(format_)
# if next_url:
# r.headers["link"] = f'<{next_url}>; rel="next"'
return r
async def extra_template():
display_rows = []
truncate_cells = datasette.setting("truncate_cells_html")
for row in results.rows if results else []:
display_row = []
for column, value in zip(results.columns, row):
display_value = value
# Let the plugins have a go
# pylint: disable=no-member
plugin_display_value = None
for candidate in pm.hook.render_cell(
row=row,
value=value,
column=column,
table=None,
database=database,
datasette=self.ds,
request=request,
):
candidate = await await_me_maybe(candidate)
if candidate is not None:
plugin_display_value = candidate
break
if plugin_display_value is not None:
display_value = plugin_display_value
else:
if value in ("", None):
display_value = Markup("&nbsp;")
elif is_url(str(display_value).strip()):
display_value = markupsafe.Markup(
'<a href="{url}">{truncated_url}</a>'.format(
url=markupsafe.escape(value.strip()),
truncated_url=markupsafe.escape(
truncate_url(value.strip(), truncate_cells)
),
)
)
elif isinstance(display_value, bytes):
blob_url = path_with_format(
request=request,
format="blob",
extra_qs={
"_blob_column": column,
"_blob_hash": hashlib.sha256(display_value).hexdigest(),
},
)
formatted = format_bytes(len(value))
display_value = markupsafe.Markup(
'<a class="blob-download" href="{}"{}>&lt;Binary:&nbsp;{:,}&nbsp;byte{}&gt;</a>'.format(
blob_url,
' title="{}"'.format(formatted)
if "bytes" not in formatted
else "",
len(value),
"" if len(value) == 1 else "s",
)
)
else:
display_value = str(value)
if truncate_cells and len(display_value) > truncate_cells:
display_value = display_value[:truncate_cells] + "\u2026"
display_row.append(display_value)
display_rows.append(display_row)
# Show 'Edit SQL' button only if:
# - User is allowed to execute SQL
# - SQL is an approved SELECT statement
# - No magic parameters, so no :_ in the SQL string
edit_sql_url = None
is_validated_sql = False
try:
validate_sql_select(sql)
is_validated_sql = True
except InvalidSql:
pass
if allow_execute_sql and is_validated_sql and ":_" not in sql:
edit_sql_url = (
self.ds.urls.database(database)
+ "?"
+ urlencode(
{
**{
"sql": sql,
},
**named_parameter_values,
}
)
)
show_hide_hidden = ""
if metadata.get("hide_sql"):
if bool(params.get("_show_sql")):
show_hide_link = path_with_removed_args(request, {"_show_sql"})
show_hide_text = "hide"
show_hide_hidden = '<input type="hidden" name="_show_sql" value="1">'
else:
show_hide_link = path_with_added_args(request, {"_show_sql": 1})
show_hide_text = "show"
else:
if bool(params.get("_hide_sql")):
show_hide_link = path_with_removed_args(request, {"_hide_sql"})
show_hide_text = "show"
show_hide_hidden = '<input type="hidden" name="_hide_sql" value="1">'
else:
show_hide_link = path_with_added_args(request, {"_hide_sql": 1})
show_hide_text = "hide"
hide_sql = show_hide_text == "show"
return {
"display_rows": display_rows,
"custom_sql": True,
"named_parameter_values": named_parameter_values,
"editable": editable,
"canned_query": canned_query,
"edit_sql_url": edit_sql_url,
"metadata": metadata,
"settings": self.ds.settings_dict(),
"request": request,
"show_hide_link": self.ds.urls.path(show_hide_link),
"show_hide_text": show_hide_text,
"show_hide_hidden": markupsafe.Markup(show_hide_hidden),
"hide_sql": hide_sql,
"table_columns": await _table_columns(self.ds, database)
if allow_execute_sql
else {},
}
return (
{
"ok": not query_error,
"database": database,
"query_name": canned_query,
"rows": results.rows if results else [],
"truncated": results.truncated if results else False,
"columns": columns,
"query": {"sql": sql, "params": params},
"error": str(query_error) if query_error else None,
"private": private,
"allow_execute_sql": allow_execute_sql,
},
extra_template,
templates,
400 if query_error else 200,
)

View file

@ -9,7 +9,6 @@ import markupsafe
from datasette.plugins import pm
from datasette.database import QueryInterrupted
from datasette import tracer
from datasette.renderer import json_renderer
from datasette.utils import (
add_cors_headers,
await_me_maybe,

View file

@ -189,7 +189,7 @@ Or use ``"sort_desc"`` to sort in descending order:
Setting a custom page size
--------------------------
Datasette defaults to displaing 100 rows per page, for both tables and views. You can change this default page size on a per-table or per-view basis using the ``"size"`` key in ``metadata.json``:
Datasette defaults to displaying 100 rows per page, for both tables and views. You can change this default page size on a per-table or per-view basis using the ``"size"`` key in ``metadata.json``:
.. code-block:: json

View file

@ -869,7 +869,9 @@ Examples: `datasette-cors <https://datasette.io/plugins/datasette-cors>`__, `dat
startup(datasette)
------------------
This hook fires when the Datasette application server first starts up. You can implement a regular function, for example to validate required plugin configuration:
This hook fires when the Datasette application server first starts up.
Here is an example that validates required plugin configuration. The server will fail to start and show an error if the validation check fails:
.. code-block:: python
@ -880,7 +882,7 @@ This hook fires when the Datasette application server first starts up. You can i
"required-setting" in config
), "my-plugin requires setting required-setting"
Or you can return an async function which will be awaited on startup. Use this option if you need to make any database queries:
You can also return an async function, which will be awaited on startup. Use this option if you need to execute any database queries, for example this function which creates the ``my_table`` database table if it does not yet exist:
.. code-block:: python

View file

@ -58,7 +58,9 @@ setup(
"mergedeep>=1.1.1",
"itsdangerous>=1.1",
"sqlite-utils>=3.30",
"asyncinject>=0.5",
"setuptools",
"pip",
"asyncinject>=0.6",
"setuptools",
"pip",
],
@ -72,7 +74,7 @@ setup(
"Sphinx==6.1.3",
"furo==2023.3.27",
"sphinx-autobuild",
"codespell",
"codespell>=2.2.5",
"blacken-docs",
"sphinx-copybutton",
],
@ -82,7 +84,7 @@ setup(
"pytest-asyncio>=0.17",
"beautifulsoup4>=4.8.1",
"black==23.3.0",
"blacken-docs==1.13.0",
"blacken-docs==1.14.0",
"pytest-timeout>=1.4.2",
"trustme>=0.7",
"cogapp>=3.3.0",

View file

@ -32,14 +32,12 @@ async def test_homepage(ds_client):
assert data.keys() == {"fixtures": 0}.keys()
d = data["fixtures"]
assert d["name"] == "fixtures"
assert d["tables_count"] == 24
assert len(d["tables_and_views_truncated"]) == 5
assert isinstance(d["tables_count"], int)
assert isinstance(len(d["tables_and_views_truncated"]), int)
assert d["tables_and_views_more"] is True
# 4 hidden FTS tables + no_primary_key (hidden in metadata)
assert d["hidden_tables_count"] == 6
# 201 in no_primary_key, plus 6 in other hidden tables:
assert d["hidden_table_rows_sum"] == 207, data
assert d["views_count"] == 4
assert isinstance(d["hidden_tables_count"], int)
assert isinstance(d["hidden_table_rows_sum"], int)
assert isinstance(d["views_count"], int)
@pytest.mark.asyncio
@ -643,9 +641,6 @@ async def test_custom_sql(ds_client):
"/fixtures.json?sql=select+content+from+simple_primary_key&_shape=objects"
)
data = response.json()
assert {"sql": "select content from simple_primary_key", "params": {}} == data[
"query"
]
assert [
{"content": "hello"},
{"content": "world"},
@ -653,8 +648,6 @@ async def test_custom_sql(ds_client):
{"content": "RENDER_CELL_DEMO"},
{"content": "RENDER_CELL_ASYNC"},
] == data["rows"]
assert ["content"] == data["columns"]
assert "fixtures" == data["database"]
assert not data["truncated"]

View file

@ -1,6 +1,7 @@
from datasette.cli import cli, serve
from datasette.plugins import pm
from click.testing import CliRunner
from unittest.mock import ANY
import textwrap
import json
@ -35,11 +36,11 @@ def test_serve_with_get(tmp_path_factory):
],
)
assert 0 == result.exit_code, result.output
assert {
"database": "_memory",
assert json.loads(result.output) == {
"ok": True,
"rows": [{"sqlite_version()": ANY}],
"truncated": False,
"columns": ["sqlite_version()"],
}.items() <= json.loads(result.output).items()
}
# The plugin should have created hello.txt
assert (plugins_dir / "hello.txt").read_text() == "hello"

View file

@ -115,7 +115,9 @@ async def test_hook_extra_css_urls(ds_client, path, expected_decoded_object):
assert response.status_code == 200
links = Soup(response.text, "html.parser").findAll("link")
special_href = [
l for l in links if l.attrs["href"].endswith("/extra-css-urls-demo.css")
link
for link in links
if link.attrs["href"].endswith("/extra-css-urls-demo.css")
][0]["href"]
# This link has a base64-encoded JSON blob in it
encoded = special_href.split("/")[3]
@ -543,7 +545,7 @@ async def test_hook_register_output_renderer_can_render(ds_client):
.find("p", {"class": "export-links"})
.findAll("a")
)
actual = [l["href"] for l in links]
actual = [link["href"] for link in links]
# Should not be present because we sent ?_no_can_render=1
assert "/fixtures/facetable.testall?_labels=on" not in actual
# Check that it was passed the values we expected
@ -940,7 +942,7 @@ async def test_hook_table_actions(ds_client, table_or_view):
response_2 = await ds_client.get(f"/fixtures/{table_or_view}?_bot=1&_hello=BOB")
assert sorted(
get_table_actions_links(response_2.text), key=lambda l: l["label"]
get_table_actions_links(response_2.text), key=lambda link: link["label"]
) == [
{"label": "Database: fixtures", "href": "/"},
{"label": "From async BOB", "href": "/"},

View file

@ -481,7 +481,7 @@ async def test_table_csv_json_export_interface(ds_client):
.find("p", {"class": "export-links"})
.findAll("a")
)
actual = [l["href"] for l in links]
actual = [link["href"] for link in links]
expected = [
"/fixtures/simple_primary_key.json?id__gt=2",
"/fixtures/simple_primary_key.testall?id__gt=2",
@ -521,7 +521,7 @@ async def test_csv_json_export_links_include_labels_if_foreign_keys(ds_client):
.find("p", {"class": "export-links"})
.findAll("a")
)
actual = [l["href"] for l in links]
actual = [link["href"] for link in links]
expected = [
"/fixtures/facetable.json?_labels=on",
"/fixtures/facetable.testall?_labels=on",