diff --git a/datasette/app.py b/datasette/app.py index 1f80c5a9..8bc6518f 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -34,7 +34,12 @@ from jinja2.environment import Template from jinja2.exceptions import TemplateNotFound from .views.base import ureg -from .views.database import DatabaseDownload, DatabaseView, TableCreateView +from .views.database import ( + DatabaseDownload, + DatabaseView, + TableCreateView, + database_view, +) from .views.index import IndexView from .views.special import ( JsonDataView, @@ -1366,8 +1371,12 @@ class Datasette: r"/-/patterns$", ) add_route(DatabaseDownload.as_view(self), r"/(?P[^\/\.]+)\.db$") + # add_route( + # DatabaseView.as_view(self), r"/(?P[^\/\.]+)(\.(?P\w+))?$" + # ) add_route( - DatabaseView.as_view(self), r"/(?P[^\/\.]+)(\.(?P\w+))?$" + wrap_view(database_view, self), + r"/(?P[^\/\.]+)(\.(?P\w+))?$", ) add_route(TableCreateView.as_view(self), r"/(?P[^\/\.]+)/-/create$") add_route( diff --git a/datasette/context.py b/datasette/context.py new file mode 100644 index 00000000..c5927d1d --- /dev/null +++ b/datasette/context.py @@ -0,0 +1,97 @@ +from typing import List, Dict, Optional, Any +from dataclasses import dataclass, field + + +def doc(documentation): + return field(metadata={"doc": documentation}) + + +def is_builtin_type(obj): + return isinstance( + obj, + tuple( + x.__class__ + for x in (int, float, str, bool, bytes, list, tuple, dict, set, frozenset) + ), + ) + + +def rst_docs_for_dataclass(klass: Any) -> str: + """Generate reStructuredText (reST) docs for a dataclass.""" + docs = [] + + # Class name and docstring + docs.append(klass.__name__) + docs.append("-" * len(klass.__name__)) + docs.append("") + if klass.__doc__: + docs.append(klass.__doc__) + docs.append("") + + # Dataclass fields + docs.append("Fields") + docs.append("~~~~~~") + docs.append("") + + for name, field_info in klass.__dataclass_fields__.items(): + if is_builtin_type(field_info.type): + # + type_name = field_info.type.__name__ + else: + # List[str] + type_name = str(field_info.type).replace("typing.", "") + docs.append(f':{name} - ``{type_name}``: {field_info.metadata.get("doc", "")}') + + return "\n".join(docs) + + +@dataclass +class ForeignKey: + incoming: List[Dict] + outgoing: List[Dict] + + +@dataclass +class Table: + "A table is a useful thing" + name: str = doc("The name of the table") + columns: List[str] = doc("List of column names in the table") + primary_keys: List[str] = doc("List of column names that are primary keys") + count: int = doc("Number of rows in the table") + hidden: bool = doc( + "Should this table default to being hidden in the main database UI?" + ) + fts_table: Optional[str] = doc( + "If this table has FTS support, the accompanying FTS table name" + ) + foreign_keys: ForeignKey = doc("List of foreign keys for this table") + private: bool = doc("Private tables are not visible to signed-out anonymous users") + + +@dataclass +class View: + name: str + private: bool + + +@dataclass +class Query: + title: str + sql: str + name: str + private: bool + + +@dataclass +class Database: + content: str + private: bool + path: str + size: int + tables: List[Table] + hidden_count: int + views: List[View] + queries: List[Query] + allow_execute_sql: bool + table_columns: Dict[str, List[str]] + query_ms: float diff --git a/datasette/utils/testing.py b/datasette/utils/testing.py index d4990784..7747f7ce 100644 --- a/datasette/utils/testing.py +++ b/datasette/utils/testing.py @@ -16,6 +16,9 @@ class TestResponse: def status(self): return self.httpx_response.status_code + def __repr__(self): + return "".format(self.httpx_response.url, self.status) + # Supports both for test-writing convenience @property def status_code(self): diff --git a/datasette/views/database.py b/datasette/views/database.py index dda82510..455ebd1f 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -1,3 +1,4 @@ +from asyncinject import Registry import os import hashlib import itertools @@ -11,9 +12,12 @@ import markupsafe from datasette.utils import ( add_cors_headers, + append_querystring, await_me_maybe, + call_with_supported_arguments, derive_named_parameters, format_bytes, + path_with_replaced_args, tilde_decode, to_css_class, validate_sql_select, @@ -757,3 +761,1077 @@ async def _table_columns(datasette, database_name): for view_name in await db.view_names(): table_columns[view_name] = [] return table_columns + + +async def database_view(request, datasette): + return await database_view_impl(request, datasette) + + +async def database_index_view(request, datasette, db): + database = db.name + visible, private = await datasette.check_visibility( + request.actor, + permissions=[ + ("view-database", database), + "view-instance", + ], + ) + if not visible: + raise Forbidden("You do not have permission to view this database") + + metadata = (datasette.metadata("databases") or {}).get(database, {}) + datasette.update_with_inherited_metadata(metadata) + + table_counts = await db.table_counts(5) + hidden_table_names = set(await db.hidden_table_names()) + all_foreign_keys = await db.get_all_foreign_keys() + + views = [] + for view_name in await db.view_names(): + view_visible, view_private = await datasette.check_visibility( + request.actor, + permissions=[ + ("view-table", (database, view_name)), + ("view-database", database), + "view-instance", + ], + ) + if view_visible: + views.append( + { + "name": view_name, + "private": view_private, + } + ) + + tables = [] + for table in table_counts: + table_visible, table_private = await datasette.check_visibility( + request.actor, + permissions=[ + ("view-table", (database, table)), + ("view-database", database), + "view-instance", + ], + ) + if not table_visible: + continue + table_columns = await db.table_columns(table) + tables.append( + { + "name": table, + "columns": table_columns, + "primary_keys": await db.primary_keys(table), + "count": table_counts[table], + "hidden": table in hidden_table_names, + "fts_table": await db.fts_table(table), + "foreign_keys": all_foreign_keys[table], + "private": table_private, + } + ) + + tables.sort(key=lambda t: (t["hidden"], t["name"])) + canned_queries = [] + for query in (await datasette.get_canned_queries(database, request.actor)).values(): + query_visible, query_private = await datasette.check_visibility( + request.actor, + permissions=[ + ("view-query", (database, query["name"])), + ("view-database", database), + "view-instance", + ], + ) + if query_visible: + canned_queries.append(dict(query, private=query_private)) + + async def database_actions(): + links = [] + for hook in pm.hook.database_actions( + datasette=datasette, + database=database, + actor=request.actor, + request=request, + ): + extra_links = await await_me_maybe(hook) + if extra_links: + links.extend(extra_links) + return links + + attached_databases = [d.name for d in await db.attached_databases()] + + allow_execute_sql = await datasette.permission_allowed( + request.actor, "execute-sql", database + ) + return Response.json( + { + "database": db.name, + "private": private, + "path": datasette.urls.database(database), + "size": db.size, + "tables": tables, + "hidden_count": len([t for t in tables if t["hidden"]]), + "views": views, + "queries": canned_queries, + "allow_execute_sql": allow_execute_sql, + "table_columns": await _table_columns(datasette, database) + if allow_execute_sql + else {}, + } + ) + + +async def query_view( + request, + datasette, + canned_query=None, + _size=None, + named_parameters=None, + write=False, +): + db = await datasette.resolve_database(request) + + format_ = request.url_vars.get("format") or "html" + force_shape = None + if format_ == "html": + force_shape = "arrays" + + data = await query_view_data( + request, + datasette, + canned_query=canned_query, + _size=_size, + named_parameters=named_parameters, + write=write, + force_shape=force_shape, + ) + if format_ == "csv": + raise NotImplementedError("CSV format not yet implemented") + elif format_ in datasette.renderers.keys(): + # Dispatch request to the correct output format renderer + # (CSV is not handled here due to streaming) + result = call_with_supported_arguments( + datasette.renderers[format_][0], + datasette=datasette, + columns=columns, + rows=rows, + sql=sql, + query_name=None, + database=db.name, + table=None, + request=request, + view_name="table", # TODO: should this be "query"? + # These will be deprecated in Datasette 1.0: + args=request.args, + data={ + "rows": rows, + }, # TODO what should this be? + ) + result = await await_me_maybe(result) + if result is None: + raise NotFound("No data") + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code") or 200, + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + # if status_code is not None: + # # Over-ride the status code + # r.status = status_code + else: + assert False, f"{result} should be dict or Response" + elif format_ == "html": + headers = {} + templates = [f"query-{to_css_class(db.name)}.html", "query.html"] + template = datasette.jinja_env.select_template(templates) + alternate_url_json = datasette.absolute_url( + request, + datasette.urls.path(path_with_format(request=request, format="json")), + ) + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) + metadata = (datasette.metadata("databases") or {}).get(db.name, {}) + datasette.update_with_inherited_metadata(metadata) + + r = Response.html( + await datasette.render_template( + template, + dict( + data, + database=db.name, + database_color=lambda database: "ff0000", + metadata=metadata, + display_rows=data["rows"], + renderers={}, + query={ + "sql": request.args.get("sql"), + }, + editable=True, + append_querystring=append_querystring, + path_with_replaced_args=path_with_replaced_args, + fix_path=datasette.urls.path, + settings=datasette.settings_dict(), + # TODO: review up all of these hacks: + alternate_url_json=alternate_url_json, + datasette_allow_facet=( + "true" if datasette.setting("allow_facet") else "false" + ), + is_sortable=False, + allow_execute_sql=await datasette.permission_allowed( + request.actor, "execute-sql", db.name + ), + query_ms=1.2, + select_templates=[ + f"{'*' if template_name == template.name else ''}{template_name}" + for template_name in templates + ], + ), + request=request, + view_name="table", + ), + headers=headers, + ) + else: + assert False, "Invalid format: {}".format(format_) + # if next_url: + # r.headers["link"] = f'<{next_url}>; rel="next"' + return r + + response = Response.json(data) + + if isinstance(data, dict) and data.get("ok") is False: + # TODO: Other error codes? + + response.status_code = 400 + + if datasette.cors: + add_cors_headers(response.headers) + + return response + + +async def query_view_data( + request, + datasette, + canned_query=None, + _size=None, + named_parameters=None, + write=False, + force_shape=None, +): + db = await datasette.resolve_database(request) + database = db.name + # TODO: Why do I do this? Is it to eliminate multi-args? + # It's going to break ?_extra=...&_extra=... + params = {key: request.args.get(key) for key in request.args} + sql = "" + if "sql" in params: + sql = params.pop("sql") + + # TODO: Behave differently for canned query here: + await datasette.ensure_permissions(request.actor, [("execute-sql", database)]) + + _shape = force_shape + if "_shape" in params: + _shape = params.pop("_shape") + + # ?_shape=arrays + # ?_shape=objects - "rows" is a list of JSON key/value objects + # ?_shape=array - an JSON array of objects + # ?_shape=array&_nl=on - a newline-separated list of JSON objects + # ?_shape=arrayfirst - a flat JSON array containing just the first value from each row + # ?_shape=object - a JSON object keyed using the primary keys of the rows + async def _results(_sql, _params): + # Returns (results, error (can be None)) + try: + return await db.execute(_sql, _params, truncate=True), None + except Exception as e: + return None, e + + async def shape_arrays(_results): + results, error = _results + if error: + return {"ok": False, "error": str(error)} + return { + "ok": True, + "columns": [r[0] for r in results.description], + "rows": [list(r) for r in results.rows], + "truncated": results.truncated, + } + + async def shape_objects(_results): + results, error = _results + if error: + return {"ok": False, "error": str(error)} + return { + "ok": True, + "rows": [dict(r) for r in results.rows], + "truncated": results.truncated, + } + + async def shape_array(_results): + results, error = _results + if error: + return {"ok": False, "error": str(error)} + return [dict(r) for r in results.rows] + + async def shape_arrayfirst(_results): + results, error = _results + if error: + return {"ok": False, "error": str(error)} + return [r[0] for r in results.rows] + + shape_fn = { + "arrays": shape_arrays, + "objects": shape_objects, + "array": shape_array, + "arrayfirst": shape_arrayfirst, + # "object": shape_object, + }[_shape or "objects"] + + registry = Registry.from_dict( + { + "_results": _results, + "_shape": shape_fn, + }, + parallel=False, + ) + + results = await registry.resolve_multi( + ["_shape"], + results={ + "_sql": sql, + "_params": params, + }, + ) + + # If "shape" does not include "rows" we return that as the response + if "rows" not in results["_shape"]: + return Response.json(results["_shape"]) + + output = results["_shape"] + output.update(dict((k, v) for k, v in results.items() if not k.startswith("_"))) + + return output + + # registry = Registry( + # extra_count, + # extra_facet_results, + # extra_facets_timed_out, + # extra_suggested_facets, + # facet_instances, + # extra_human_description_en, + # extra_next_url, + # extra_columns, + # extra_primary_keys, + # run_display_columns_and_rows, + # extra_display_columns, + # extra_display_rows, + # extra_debug, + # extra_request, + # extra_query, + # extra_metadata, + # extra_extras, + # extra_database, + # extra_table, + # extra_database_color, + # extra_table_actions, + # extra_filters, + # extra_renderers, + # extra_custom_table_templates, + # extra_sorted_facet_results, + # extra_table_definition, + # extra_view_definition, + # extra_is_view, + # extra_private, + # extra_expandable_columns, + # extra_form_hidden_args, + # ) + + results = await registry.resolve_multi( + ["extra_{}".format(extra) for extra in extras] + ) + data = { + "ok": True, + "next": next_value and str(next_value) or None, + } + data.update( + { + key.replace("extra_", ""): value + for key, value in results.items() + if key.startswith("extra_") and key.replace("extra_", "") in extras + } + ) + raw_sqlite_rows = rows[:page_size] + data["rows"] = [dict(r) for r in raw_sqlite_rows] + + private = False + if canned_query: + # Respect canned query permissions + visible, private = await datasette.check_visibility( + request.actor, + permissions=[ + ("view-query", (database, canned_query)), + ("view-database", database), + "view-instance", + ], + ) + if not visible: + raise Forbidden("You do not have permission to view this query") + + else: + await datasette.ensure_permissions(request.actor, [("execute-sql", database)]) + + # If there's no sql, show the database index page + if not sql: + return await database_index_view(request, datasette, db) + + validate_sql_select(sql) + + # Extract any :named parameters + named_parameters = named_parameters or await derive_named_parameters(db, sql) + named_parameter_values = { + named_parameter: params.get(named_parameter) or "" + for named_parameter in named_parameters + if not named_parameter.startswith("_") + } + + # Set to blank string if missing from params + for named_parameter in named_parameters: + if named_parameter not in params and not named_parameter.startswith("_"): + params[named_parameter] = "" + + extra_args = {} + if params.get("_timelimit"): + extra_args["custom_time_limit"] = int(params["_timelimit"]) + if _size: + extra_args["page_size"] = _size + + templates = [f"query-{to_css_class(database)}.html", "query.html"] + if canned_query: + templates.insert( + 0, + f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", + ) + + query_error = None + + # Execute query - as write or as read + if write: + raise NotImplementedError("Write queries not yet implemented") + # if request.method == "POST": + # # If database is immutable, return an error + # if not db.is_mutable: + # raise Forbidden("Database is immutable") + # body = await request.post_body() + # body = body.decode("utf-8").strip() + # if body.startswith("{") and body.endswith("}"): + # params = json.loads(body) + # # But we want key=value strings + # for key, value in params.items(): + # params[key] = str(value) + # else: + # params = dict(parse_qsl(body, keep_blank_values=True)) + # # Should we return JSON? + # should_return_json = ( + # request.headers.get("accept") == "application/json" + # or request.args.get("_json") + # or params.get("_json") + # ) + # if canned_query: + # params_for_query = MagicParameters(params, request, self.ds) + # else: + # params_for_query = params + # ok = None + # try: + # cursor = await self.ds.databases[database].execute_write( + # sql, params_for_query + # ) + # message = metadata.get( + # "on_success_message" + # ) or "Query executed, {} row{} affected".format( + # cursor.rowcount, "" if cursor.rowcount == 1 else "s" + # ) + # message_type = self.ds.INFO + # redirect_url = metadata.get("on_success_redirect") + # ok = True + # except Exception as e: + # message = metadata.get("on_error_message") or str(e) + # message_type = self.ds.ERROR + # redirect_url = metadata.get("on_error_redirect") + # ok = False + # if should_return_json: + # return Response.json( + # { + # "ok": ok, + # "message": message, + # "redirect": redirect_url, + # } + # ) + # else: + # self.ds.add_message(request, message, message_type) + # return self.redirect(request, redirect_url or request.path) + # else: + + # async def extra_template(): + # return { + # "request": request, + # "db_is_immutable": not db.is_mutable, + # "path_with_added_args": path_with_added_args, + # "path_with_removed_args": path_with_removed_args, + # "named_parameter_values": named_parameter_values, + # "canned_query": canned_query, + # "success_message": request.args.get("_success") or "", + # "canned_write": True, + # } + + # return ( + # { + # "database": database, + # "rows": [], + # "truncated": False, + # "columns": [], + # "query": {"sql": sql, "params": params}, + # "private": private, + # }, + # extra_template, + # templates, + # ) + + # Not a write + rows = [] + if canned_query: + params_for_query = MagicParameters(params, request, datasette) + else: + params_for_query = params + try: + results = await datasette.execute( + database, sql, params_for_query, truncate=True, **extra_args + ) + columns = [r[0] for r in results.description] + rows = list(results.rows) + except sqlite3.DatabaseError as e: + query_error = e + results = None + columns = [] + + allow_execute_sql = await datasette.permission_allowed( + request.actor, "execute-sql", database + ) + + format_ = request.url_vars.get("format") or "html" + + if format_ == "csv": + raise NotImplementedError("CSV format not yet implemented") + elif format_ in datasette.renderers.keys(): + # Dispatch request to the correct output format renderer + # (CSV is not handled here due to streaming) + result = call_with_supported_arguments( + datasette.renderers[format_][0], + datasette=datasette, + columns=columns, + rows=rows, + sql=sql, + query_name=None, + database=db.name, + table=None, + request=request, + view_name="table", # TODO: should this be "query"? + # These will be deprecated in Datasette 1.0: + args=request.args, + data={ + "rows": rows, + }, # TODO what should this be? + ) + result = await await_me_maybe(result) + if result is None: + raise NotFound("No data") + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code") or 200, + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + # if status_code is not None: + # # Over-ride the status code + # r.status = status_code + else: + assert False, f"{result} should be dict or Response" + elif format_ == "html": + headers = {} + templates = [f"query-{to_css_class(database)}.html", "query.html"] + template = datasette.jinja_env.select_template(templates) + alternate_url_json = datasette.absolute_url( + request, + datasette.urls.path(path_with_format(request=request, format="json")), + ) + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) + r = Response.html( + await datasette.render_template( + template, + dict( + data, + append_querystring=append_querystring, + path_with_replaced_args=path_with_replaced_args, + fix_path=datasette.urls.path, + settings=datasette.settings_dict(), + # TODO: review up all of these hacks: + alternate_url_json=alternate_url_json, + datasette_allow_facet=( + "true" if datasette.setting("allow_facet") else "false" + ), + is_sortable=any(c["sortable"] for c in data["display_columns"]), + allow_execute_sql=await datasette.permission_allowed( + request.actor, "execute-sql", resolved.db.name + ), + query_ms=1.2, + select_templates=[ + f"{'*' if template_name == template.name else ''}{template_name}" + for template_name in templates + ], + ), + request=request, + view_name="table", + ), + headers=headers, + ) + else: + assert False, "Invalid format: {}".format(format_) + # if next_url: + # r.headers["link"] = f'<{next_url}>; rel="next"' + return r + + +async def database_view_impl( + request, + datasette, + canned_query=None, + _size=None, + named_parameters=None, + write=False, +): + await datasette.refresh_schemas() + db = await datasette.resolve_database(request) + database = db.name + + if request.args.get("sql", "").strip(): + return await query_view( + request, datasette, canned_query, _size, named_parameters, write + ) + + # Index page shows the tables/views/canned queries for this database + + params = {key: request.args.get(key) for key in request.args} + sql = "" + if "sql" in params: + sql = params.pop("sql") + + _shape = None + if "_shape" in params: + _shape = params.pop("_shape") + + private = False + if canned_query: + # Respect canned query permissions + visible, private = await datasette.check_visibility( + request.actor, + permissions=[ + ("view-query", (database, canned_query)), + ("view-database", database), + "view-instance", + ], + ) + if not visible: + raise Forbidden("You do not have permission to view this query") + + else: + await datasette.ensure_permissions(request.actor, [("execute-sql", database)]) + + # If there's no sql, show the database index page + if not sql: + return await database_index_view(request, datasette, db) + + validate_sql_select(sql) + + # Extract any :named parameters + named_parameters = named_parameters or await derive_named_parameters(db, sql) + named_parameter_values = { + named_parameter: params.get(named_parameter) or "" + for named_parameter in named_parameters + if not named_parameter.startswith("_") + } + + # Set to blank string if missing from params + for named_parameter in named_parameters: + if named_parameter not in params and not named_parameter.startswith("_"): + params[named_parameter] = "" + + extra_args = {} + if params.get("_timelimit"): + extra_args["custom_time_limit"] = int(params["_timelimit"]) + if _size: + extra_args["page_size"] = _size + + templates = [f"query-{to_css_class(database)}.html", "query.html"] + if canned_query: + templates.insert( + 0, + f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html", + ) + + query_error = None + + # Execute query - as write or as read + if write: + raise NotImplementedError("Write queries not yet implemented") + # if request.method == "POST": + # # If database is immutable, return an error + # if not db.is_mutable: + # raise Forbidden("Database is immutable") + # body = await request.post_body() + # body = body.decode("utf-8").strip() + # if body.startswith("{") and body.endswith("}"): + # params = json.loads(body) + # # But we want key=value strings + # for key, value in params.items(): + # params[key] = str(value) + # else: + # params = dict(parse_qsl(body, keep_blank_values=True)) + # # Should we return JSON? + # should_return_json = ( + # request.headers.get("accept") == "application/json" + # or request.args.get("_json") + # or params.get("_json") + # ) + # if canned_query: + # params_for_query = MagicParameters(params, request, self.ds) + # else: + # params_for_query = params + # ok = None + # try: + # cursor = await self.ds.databases[database].execute_write( + # sql, params_for_query + # ) + # message = metadata.get( + # "on_success_message" + # ) or "Query executed, {} row{} affected".format( + # cursor.rowcount, "" if cursor.rowcount == 1 else "s" + # ) + # message_type = self.ds.INFO + # redirect_url = metadata.get("on_success_redirect") + # ok = True + # except Exception as e: + # message = metadata.get("on_error_message") or str(e) + # message_type = self.ds.ERROR + # redirect_url = metadata.get("on_error_redirect") + # ok = False + # if should_return_json: + # return Response.json( + # { + # "ok": ok, + # "message": message, + # "redirect": redirect_url, + # } + # ) + # else: + # self.ds.add_message(request, message, message_type) + # return self.redirect(request, redirect_url or request.path) + # else: + + # async def extra_template(): + # return { + # "request": request, + # "db_is_immutable": not db.is_mutable, + # "path_with_added_args": path_with_added_args, + # "path_with_removed_args": path_with_removed_args, + # "named_parameter_values": named_parameter_values, + # "canned_query": canned_query, + # "success_message": request.args.get("_success") or "", + # "canned_write": True, + # } + + # return ( + # { + # "database": database, + # "rows": [], + # "truncated": False, + # "columns": [], + # "query": {"sql": sql, "params": params}, + # "private": private, + # }, + # extra_template, + # templates, + # ) + + # Not a write + rows = [] + if canned_query: + params_for_query = MagicParameters(params, request, datasette) + else: + params_for_query = params + try: + results = await datasette.execute( + database, sql, params_for_query, truncate=True, **extra_args + ) + columns = [r[0] for r in results.description] + rows = list(results.rows) + except sqlite3.DatabaseError as e: + query_error = e + results = None + columns = [] + + allow_execute_sql = await datasette.permission_allowed( + request.actor, "execute-sql", database + ) + + format_ = request.url_vars.get("format") or "html" + + if format_ == "csv": + raise NotImplementedError("CSV format not yet implemented") + elif format_ in datasette.renderers.keys(): + # Dispatch request to the correct output format renderer + # (CSV is not handled here due to streaming) + result = call_with_supported_arguments( + datasette.renderers[format_][0], + datasette=datasette, + columns=columns, + rows=rows, + sql=sql, + query_name=None, + database=db.name, + table=None, + request=request, + view_name="table", # TODO: should this be "query"? + # These will be deprecated in Datasette 1.0: + args=request.args, + data={ + "rows": rows, + }, # TODO what should this be? + ) + result = await await_me_maybe(result) + if result is None: + raise NotFound("No data") + if isinstance(result, dict): + r = Response( + body=result.get("body"), + status=result.get("status_code") or 200, + content_type=result.get("content_type", "text/plain"), + headers=result.get("headers"), + ) + elif isinstance(result, Response): + r = result + # if status_code is not None: + # # Over-ride the status code + # r.status = status_code + else: + assert False, f"{result} should be dict or Response" + elif format_ == "html": + headers = {} + templates = [f"query-{to_css_class(database)}.html", "query.html"] + template = datasette.jinja_env.select_template(templates) + alternate_url_json = datasette.absolute_url( + request, + datasette.urls.path(path_with_format(request=request, format="json")), + ) + headers.update( + { + "Link": '{}; rel="alternate"; type="application/json+datasette"'.format( + alternate_url_json + ) + } + ) + r = Response.html( + await datasette.render_template( + template, + dict( + data, + append_querystring=append_querystring, + path_with_replaced_args=path_with_replaced_args, + fix_path=datasette.urls.path, + settings=datasette.settings_dict(), + # TODO: review up all of these hacks: + alternate_url_json=alternate_url_json, + datasette_allow_facet=( + "true" if datasette.setting("allow_facet") else "false" + ), + is_sortable=any(c["sortable"] for c in data["display_columns"]), + allow_execute_sql=await datasette.permission_allowed( + request.actor, "execute-sql", resolved.db.name + ), + query_ms=1.2, + select_templates=[ + f"{'*' if template_name == template.name else ''}{template_name}" + for template_name in templates + ], + ), + request=request, + view_name="table", + ), + headers=headers, + ) + else: + assert False, "Invalid format: {}".format(format_) + # if next_url: + # r.headers["link"] = f'<{next_url}>; rel="next"' + return r + + async def extra_template(): + display_rows = [] + truncate_cells = datasette.setting("truncate_cells_html") + for row in results.rows if results else []: + display_row = [] + for column, value in zip(results.columns, row): + display_value = value + # Let the plugins have a go + # pylint: disable=no-member + plugin_display_value = None + for candidate in pm.hook.render_cell( + row=row, + value=value, + column=column, + table=None, + database=database, + datasette=self.ds, + request=request, + ): + candidate = await await_me_maybe(candidate) + if candidate is not None: + plugin_display_value = candidate + break + if plugin_display_value is not None: + display_value = plugin_display_value + else: + if value in ("", None): + display_value = Markup(" ") + elif is_url(str(display_value).strip()): + display_value = markupsafe.Markup( + '{truncated_url}'.format( + url=markupsafe.escape(value.strip()), + truncated_url=markupsafe.escape( + truncate_url(value.strip(), truncate_cells) + ), + ) + ) + elif isinstance(display_value, bytes): + blob_url = path_with_format( + request=request, + format="blob", + extra_qs={ + "_blob_column": column, + "_blob_hash": hashlib.sha256(display_value).hexdigest(), + }, + ) + formatted = format_bytes(len(value)) + display_value = markupsafe.Markup( + '<Binary: {:,} byte{}>'.format( + blob_url, + ' title="{}"'.format(formatted) + if "bytes" not in formatted + else "", + len(value), + "" if len(value) == 1 else "s", + ) + ) + else: + display_value = str(value) + if truncate_cells and len(display_value) > truncate_cells: + display_value = display_value[:truncate_cells] + "\u2026" + display_row.append(display_value) + display_rows.append(display_row) + + # Show 'Edit SQL' button only if: + # - User is allowed to execute SQL + # - SQL is an approved SELECT statement + # - No magic parameters, so no :_ in the SQL string + edit_sql_url = None + is_validated_sql = False + try: + validate_sql_select(sql) + is_validated_sql = True + except InvalidSql: + pass + if allow_execute_sql and is_validated_sql and ":_" not in sql: + edit_sql_url = ( + self.ds.urls.database(database) + + "?" + + urlencode( + { + **{ + "sql": sql, + }, + **named_parameter_values, + } + ) + ) + + show_hide_hidden = "" + if metadata.get("hide_sql"): + if bool(params.get("_show_sql")): + show_hide_link = path_with_removed_args(request, {"_show_sql"}) + show_hide_text = "hide" + show_hide_hidden = '' + else: + show_hide_link = path_with_added_args(request, {"_show_sql": 1}) + show_hide_text = "show" + else: + if bool(params.get("_hide_sql")): + show_hide_link = path_with_removed_args(request, {"_hide_sql"}) + show_hide_text = "show" + show_hide_hidden = '' + else: + show_hide_link = path_with_added_args(request, {"_hide_sql": 1}) + show_hide_text = "hide" + hide_sql = show_hide_text == "show" + return { + "display_rows": display_rows, + "custom_sql": True, + "named_parameter_values": named_parameter_values, + "editable": editable, + "canned_query": canned_query, + "edit_sql_url": edit_sql_url, + "metadata": metadata, + "settings": self.ds.settings_dict(), + "request": request, + "show_hide_link": self.ds.urls.path(show_hide_link), + "show_hide_text": show_hide_text, + "show_hide_hidden": markupsafe.Markup(show_hide_hidden), + "hide_sql": hide_sql, + "table_columns": await _table_columns(self.ds, database) + if allow_execute_sql + else {}, + } + + return ( + { + "ok": not query_error, + "database": database, + "query_name": canned_query, + "rows": results.rows if results else [], + "truncated": results.truncated if results else False, + "columns": columns, + "query": {"sql": sql, "params": params}, + "error": str(query_error) if query_error else None, + "private": private, + "allow_execute_sql": allow_execute_sql, + }, + extra_template, + templates, + 400 if query_error else 200, + ) diff --git a/datasette/views/table.py b/datasette/views/table.py index c102c103..e367a075 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -9,7 +9,6 @@ import markupsafe from datasette.plugins import pm from datasette.database import QueryInterrupted from datasette import tracer -from datasette.renderer import json_renderer from datasette.utils import ( add_cors_headers, await_me_maybe, diff --git a/docs/conf.py b/docs/conf.py index c25d8a95..5423fa2a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -31,7 +31,12 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["sphinx.ext.extlinks", "sphinx.ext.autodoc", "sphinx_copybutton"] +extensions = [ + "sphinx.ext.extlinks", + "sphinx.ext.autodoc", + "sphinx_copybutton", + "jsoncontext", +] extlinks = { "issue": ("https://github.com/simonw/datasette/issues/%s", "#%s"), diff --git a/docs/index.rst b/docs/index.rst index 5a9cc7ed..254ed3da 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -57,6 +57,7 @@ Contents settings introspection custom_templates + template_context plugins writing_plugins plugin_hooks diff --git a/docs/jsoncontext.py b/docs/jsoncontext.py new file mode 100644 index 00000000..d759f89f --- /dev/null +++ b/docs/jsoncontext.py @@ -0,0 +1,28 @@ +from docutils import nodes +from sphinx.util.docutils import SphinxDirective +from importlib import import_module +import json + + +class JSONContextDirective(SphinxDirective): + required_arguments = 1 + + def run(self): + module_path, class_name = self.arguments[0].rsplit(".", 1) + try: + module = import_module(module_path) + dataclass = getattr(module, class_name) + except ImportError: + warning = f"Unable to import {self.arguments[0]}" + return [nodes.error(None, nodes.paragraph(text=warning))] + + doc = json.dumps( + dataclass.__annotations__, indent=4, sort_keys=True, default=repr + ) + doc_node = nodes.literal_block(text=doc) + + return [doc_node] + + +def setup(app): + app.add_directive("jsoncontext", JSONContextDirective) diff --git a/docs/template_context.rst b/docs/template_context.rst new file mode 100644 index 00000000..f1de4f9e --- /dev/null +++ b/docs/template_context.rst @@ -0,0 +1,29 @@ +.. _template_context: + +Template context +================ + +This page describes the variables made available to templates used by Datasette to render different pages of the application. + + +.. [[[cog + from datasette.context import rst_docs_for_dataclass, Table + cog.out(rst_docs_for_dataclass(Table)) +.. ]]] +Table +----- + +A table is a useful thing + +Fields +~~~~~~ + +:name - ``str``: The name of the table +:columns - ``List[str]``: List of column names in the table +:primary_keys - ``List[str]``: List of column names that are primary keys +:count - ``int``: Number of rows in the table +:hidden - ``bool``: Should this table default to being hidden in the main database UI? +:fts_table - ``Optional[str]``: If this table has FTS support, the accompanying FTS table name +:foreign_keys - ``ForeignKey``: List of foreign keys for this table +:private - ``bool``: Private tables are not visible to signed-out anonymous users +.. [[[end]]] diff --git a/setup.py b/setup.py index d6824255..0eedadb2 100644 --- a/setup.py +++ b/setup.py @@ -58,7 +58,7 @@ setup( "mergedeep>=1.1.1", "itsdangerous>=1.1", "sqlite-utils>=3.30", - "asyncinject>=0.5", + "asyncinject>=0.6", "setuptools", "pip", ], diff --git a/tests/test_api.py b/tests/test_api.py index 247fdd5c..e7d8d849 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -643,9 +643,6 @@ async def test_custom_sql(ds_client): "/fixtures.json?sql=select+content+from+simple_primary_key&_shape=objects" ) data = response.json() - assert {"sql": "select content from simple_primary_key", "params": {}} == data[ - "query" - ] assert [ {"content": "hello"}, {"content": "world"}, @@ -653,8 +650,6 @@ async def test_custom_sql(ds_client): {"content": "RENDER_CELL_DEMO"}, {"content": "RENDER_CELL_ASYNC"}, ] == data["rows"] - assert ["content"] == data["columns"] - assert "fixtures" == data["database"] assert not data["truncated"] diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index ac44e1e2..e484a6db 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -1,6 +1,7 @@ from datasette.cli import cli, serve from datasette.plugins import pm from click.testing import CliRunner +from unittest.mock import ANY import textwrap import json @@ -35,11 +36,11 @@ def test_serve_with_get(tmp_path_factory): ], ) assert 0 == result.exit_code, result.output - assert { - "database": "_memory", + assert json.loads(result.output) == { + "ok": True, + "rows": [{"sqlite_version()": ANY}], "truncated": False, - "columns": ["sqlite_version()"], - }.items() <= json.loads(result.output).items() + } # The plugin should have created hello.txt assert (plugins_dir / "hello.txt").read_text() == "hello"