From 1600d2a3ec3ada1f6fb5b1eb73bdaeccb5f80530 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 Jan 2021 14:48:56 -0800 Subject: [PATCH 0001/1323] Renamed /:memory: to /_memory, with redirects - closes #1205 --- README.md | 2 +- datasette/app.py | 23 +++++++++++++++++++---- datasette/cli.py | 2 +- datasette/views/database.py | 4 ++-- docs/datasette-serve-help.txt | 2 +- tests/test_api.py | 26 +++++++++++++++++++++----- tests/test_cli.py | 2 +- tests/test_cli_serve_get.py | 4 ++-- tests/test_html.py | 6 +++--- tests/test_internals_urls.py | 28 ++++++++++++++-------------- 10 files changed, 65 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index 16fc8f0e..3d44d0e1 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,7 @@ Now visiting http://localhost:8001/History/downloads will show you a web interfa --plugins-dir DIRECTORY Path to directory containing custom plugins --static STATIC MOUNT mountpoint:path-to-directory for serving static files - --memory Make :memory: database available + --memory Make /_memory database available --config CONFIG Set config option using configname:value docs.datasette.io/en/stable/config.html --version-note TEXT Additional note to show on /-/versions diff --git a/datasette/app.py b/datasette/app.py index cfce8e0b..9e15a162 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -218,7 +218,7 @@ class Datasette: self.immutables = set(immutables or []) self.databases = collections.OrderedDict() if memory or not self.files: - self.add_database(Database(self, is_memory=True), name=":memory:") + self.add_database(Database(self, is_memory=True), name="_memory") # memory_name is a random string so that each Datasette instance gets its own # unique in-memory named database - otherwise unit tests can fail with weird # errors when different instances accidentally share an in-memory database @@ -633,7 +633,7 @@ class Datasette: def _versions(self): conn = sqlite3.connect(":memory:") - self._prepare_connection(conn, ":memory:") + self._prepare_connection(conn, "_memory") sqlite_version = conn.execute("select sqlite_version()").fetchone()[0] sqlite_extensions = {} for extension, testsql, hasversion in ( @@ -927,6 +927,12 @@ class Datasette: plugin["name"].replace("-", "_") ), ) + add_route( + permanent_redirect( + "/_memory", forward_query_string=True, forward_rest=True + ), + r"/:memory:(?P.*)$", + ) add_route( JsonDataView.as_view(self, "metadata.json", lambda: self._metadata), r"/-/metadata(?P(\.json)?)$", @@ -1290,9 +1296,18 @@ def wrap_view(view_fn, datasette): return async_view_fn -def permanent_redirect(path): +def permanent_redirect(path, forward_query_string=False, forward_rest=False): return wrap_view( - lambda request, send: Response.redirect(path, status=301), + lambda request, send: Response.redirect( + path + + (request.url_vars["rest"] if forward_rest else "") + + ( + ("?" + request.query_string) + if forward_query_string and request.query_string + else "" + ), + status=301, + ), datasette=None, ) diff --git a/datasette/cli.py b/datasette/cli.py index eade19a0..25d6acea 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -365,7 +365,7 @@ def uninstall(packages, yes): help="Serve static files from this directory at /MOUNT/...", multiple=True, ) -@click.option("--memory", is_flag=True, help="Make :memory: database available") +@click.option("--memory", is_flag=True, help="Make /_memory database available") @click.option( "--config", type=Config(), diff --git a/datasette/views/database.py b/datasette/views/database.py index f6fd579c..75eb8f02 100644 --- a/datasette/views/database.py +++ b/datasette/views/database.py @@ -138,7 +138,7 @@ class DatabaseView(DataView): "metadata": metadata, "allow_download": self.ds.setting("allow_download") and not db.is_mutable - and database != ":memory:", + and not db.is_memory, }, (f"database-{to_css_class(database)}.html", "database.html"), ) @@ -160,7 +160,7 @@ class DatabaseDownload(DataView): raise DatasetteError("Invalid database", status=404) db = self.ds.databases[database] if db.is_memory: - raise DatasetteError("Cannot download :memory: database", status=404) + raise DatasetteError("Cannot download in-memory databases", status=404) if not self.ds.setting("allow_download") or db.is_mutable: raise Forbidden("Database download is forbidden") if not db.path: diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 079ec9f8..257d38c6 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -24,7 +24,7 @@ Options: --template-dir DIRECTORY Path to directory containing custom templates --plugins-dir DIRECTORY Path to directory containing custom plugins --static MOUNT:DIRECTORY Serve static files from this directory at /MOUNT/... - --memory Make :memory: database available + --memory Make /_memory database available --config CONFIG Deprecated: set config option using configname:value. Use --setting instead. diff --git a/tests/test_api.py b/tests/test_api.py index 0d1bddd3..0b5401d6 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -608,11 +608,11 @@ def test_no_files_uses_memory_database(app_client_no_files): response = app_client_no_files.get("/.json") assert response.status == 200 assert { - ":memory:": { - "name": ":memory:", + "_memory": { + "name": "_memory", "hash": None, - "color": "f7935d", - "path": "/%3Amemory%3A", + "color": "a6c7b9", + "path": "/_memory", "tables_and_views_truncated": [], "tables_and_views_more": False, "tables_count": 0, @@ -626,12 +626,28 @@ def test_no_files_uses_memory_database(app_client_no_files): } == response.json # Try that SQL query response = app_client_no_files.get( - "/:memory:.json?sql=select+sqlite_version()&_shape=array" + "/_memory.json?sql=select+sqlite_version()&_shape=array" ) assert 1 == len(response.json) assert ["sqlite_version()"] == list(response.json[0].keys()) +@pytest.mark.parametrize( + "path,expected_redirect", + ( + ("/:memory:", "/_memory"), + ("/:memory:.json", "/_memory.json"), + ("/:memory:?sql=select+1", "/_memory?sql=select+1"), + ("/:memory:.json?sql=select+1", "/_memory.json?sql=select+1"), + ("/:memory:.csv?sql=select+1", "/_memory.csv?sql=select+1"), + ), +) +def test_old_memory_urls_redirect(app_client_no_files, path, expected_redirect): + response = app_client_no_files.get(path, allow_redirects=False) + assert response.status == 301 + assert response.headers["location"] == expected_redirect + + def test_database_page_for_database_with_dot_in_name(app_client_with_dot): response = app_client_with_dot.get("/fixtures.dot.json") assert 200 == response.status diff --git a/tests/test_cli.py b/tests/test_cli.py index c42c22ea..33c2c8ac 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -221,7 +221,7 @@ def test_config_deprecated(ensure_eventloop): def test_sql_errors_logged_to_stderr(ensure_eventloop): runner = CliRunner(mix_stderr=False) - result = runner.invoke(cli, ["--get", "/:memory:.json?sql=select+blah"]) + result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"]) assert result.exit_code == 1 assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index 39236dd8..aaa692e5 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -30,12 +30,12 @@ def test_serve_with_get(tmp_path_factory): "--plugins-dir", str(plugins_dir), "--get", - "/:memory:.json?sql=select+sqlite_version()", + "/_memory.json?sql=select+sqlite_version()", ], ) assert 0 == result.exit_code, result.output assert { - "database": ":memory:", + "database": "_memory", "truncated": False, "columns": ["sqlite_version()"], }.items() <= json.loads(result.output).items() diff --git a/tests/test_html.py b/tests/test_html.py index 6c33fba7..9f7a987d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -88,7 +88,7 @@ def test_static_mounts(): def test_memory_database_page(): with make_app_client(memory=True) as client: - response = client.get("/:memory:") + response = client.get("/_memory") assert response.status == 200 @@ -1056,10 +1056,10 @@ def test_database_download_disallowed_for_mutable(app_client): def test_database_download_disallowed_for_memory(): with make_app_client(memory=True) as client: # Memory page should NOT have a download link - response = client.get("/:memory:") + response = client.get("/_memory") soup = Soup(response.body, "html.parser") assert 0 == len(soup.findAll("a", {"href": re.compile(r"\.db$")})) - assert 404 == client.get("/:memory:.db").status + assert 404 == client.get("/_memory.db").status def test_allow_download_off(): diff --git a/tests/test_internals_urls.py b/tests/test_internals_urls.py index e6f405b3..e486e4c9 100644 --- a/tests/test_internals_urls.py +++ b/tests/test_internals_urls.py @@ -103,14 +103,14 @@ def test_logout(ds, base_url, expected): @pytest.mark.parametrize( "base_url,format,expected", [ - ("/", None, "/%3Amemory%3A"), - ("/prefix/", None, "/prefix/%3Amemory%3A"), - ("/", "json", "/%3Amemory%3A.json"), + ("/", None, "/_memory"), + ("/prefix/", None, "/prefix/_memory"), + ("/", "json", "/_memory.json"), ], ) def test_database(ds, base_url, format, expected): ds._settings["base_url"] = base_url - actual = ds.urls.database(":memory:", format=format) + actual = ds.urls.database("_memory", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) @@ -118,18 +118,18 @@ def test_database(ds, base_url, format, expected): @pytest.mark.parametrize( "base_url,name,format,expected", [ - ("/", "name", None, "/%3Amemory%3A/name"), - ("/prefix/", "name", None, "/prefix/%3Amemory%3A/name"), - ("/", "name", "json", "/%3Amemory%3A/name.json"), - ("/", "name.json", "json", "/%3Amemory%3A/name.json?_format=json"), + ("/", "name", None, "/_memory/name"), + ("/prefix/", "name", None, "/prefix/_memory/name"), + ("/", "name", "json", "/_memory/name.json"), + ("/", "name.json", "json", "/_memory/name.json?_format=json"), ], ) def test_table_and_query(ds, base_url, name, format, expected): ds._settings["base_url"] = base_url - actual1 = ds.urls.table(":memory:", name, format=format) + actual1 = ds.urls.table("_memory", name, format=format) assert actual1 == expected assert isinstance(actual1, PrefixedUrlString) - actual2 = ds.urls.query(":memory:", name, format=format) + actual2 = ds.urls.query("_memory", name, format=format) assert actual2 == expected assert isinstance(actual2, PrefixedUrlString) @@ -137,14 +137,14 @@ def test_table_and_query(ds, base_url, name, format, expected): @pytest.mark.parametrize( "base_url,format,expected", [ - ("/", None, "/%3Amemory%3A/facetable/1"), - ("/prefix/", None, "/prefix/%3Amemory%3A/facetable/1"), - ("/", "json", "/%3Amemory%3A/facetable/1.json"), + ("/", None, "/_memory/facetable/1"), + ("/prefix/", None, "/prefix/_memory/facetable/1"), + ("/", "json", "/_memory/facetable/1.json"), ], ) def test_row(ds, base_url, format, expected): ds._settings["base_url"] = base_url - actual = ds.urls.row(":memory:", "facetable", "1", format=format) + actual = ds.urls.row("_memory", "facetable", "1", format=format) assert actual == expected assert isinstance(actual, PrefixedUrlString) From dde3c500c73ace33529672f7d862b76753d309cc Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 28 Jan 2021 18:12:32 -0800 Subject: [PATCH 0002/1323] Using pdb for errors thrown inside Datasette Closes #1207 --- docs/testing_plugins.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 4261f639..d3f78a97 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -51,6 +51,25 @@ Then run the tests using pytest like so:: pytest +.. _testing_plugins_pdb: + +Using pdb for errors thrown inside Datasette +-------------------------------------------- + +If an exception occurs within Datasette itself during a test, the response returned to your plugin will have a ``response.status_code`` value of 500. + +You can add ``pdb=True`` to the ``Datasette`` constructor to drop into a Python debugger session inside your test run instead of getting back a 500 response code. This is equivalent to running the ``datasette`` command-line tool with the ``--pdb`` option. + +Here's what that looks like in a test function: + +.. code-block:: python + + def test_that_opens_the_debugger_or_errors(): + ds = Datasette([db_path], pdb=True) + response = await ds.client.get("/") + +If you use this pattern you will need to run ``pytest`` with the ``-s`` option to avoid capturing stdin/stdout in order to interact with the debugger prompt. + .. _testing_plugins_fixtures: Using pytest fixtures From beb98bf45488dcb7b8f0a70e13dbf7b2473c998c Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 Jan 2021 00:49:09 -0800 Subject: [PATCH 0003/1323] Fixed typo in code example --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index d3f78a97..8ea5e79b 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -204,7 +204,7 @@ Here's a test for that plugin that mocks the HTTPX outbound request: response = await datasette.client.post("/-/fetch-url", data={ "url": "https://www.example.com/" }) - asert response.text == "Hello world" + assert response.text == "Hello world" outbound_request = httpx_mock.get_request() assert outbound_request.url == "https://www.example.com/" From 53d8cd708a2f29539512adb9e01eb951ec06c5be Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 31 Jan 2021 00:49:09 -0800 Subject: [PATCH 0004/1323] Fixed typo in code example --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 4261f639..23e72214 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -185,7 +185,7 @@ Here's a test for that plugin that mocks the HTTPX outbound request: response = await datasette.client.post("/-/fetch-url", data={ "url": "https://www.example.com/" }) - asert response.text == "Hello world" + assert response.text == "Hello world" outbound_request = httpx_mock.get_request() assert outbound_request.url == "https://www.example.com/" From 6f0f85bcf12f52f6a372046cc643419d3d5817ab Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Feb 2021 13:20:53 -0800 Subject: [PATCH 0005/1323] Fixed bug with ?_sort= and ?_search=, closes #1214 --- datasette/views/table.py | 2 +- tests/test_html.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 0a3504b3..48792284 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -815,7 +815,7 @@ class TableView(RowTableShared): form_hidden_args = [] for key in request.args: - if key.startswith("_"): + if key.startswith("_") and key not in ("_sort", "_search"): for value in request.args.getlist(key): form_hidden_args.append((key, value)) diff --git a/tests/test_html.py b/tests/test_html.py index 6c33fba7..1aa9639e 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1272,6 +1272,23 @@ def test_other_hidden_form_fields(app_client, path, expected_hidden): assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden +@pytest.mark.parametrize( + "path,expected_hidden", + [ + ("/fixtures/searchable?_search=terry", []), + ("/fixtures/searchable?_sort=text2", []), + ("/fixtures/searchable?_sort=text2&_where=1", [("_where", "1")]), + ], +) +def test_search_and_sort_fields_not_duplicated(app_client, path, expected_hidden): + # https://github.com/simonw/datasette/issues/1214 + response = app_client.get(path) + soup = Soup(response.body, "html.parser") + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden + + def test_binary_data_display_in_table(app_client): response = app_client.get("/fixtures/binary_data") assert response.status == 200 From 7a2ed9f8a119e220b66d67c7b9e07cbab47b1196 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Feb 2021 13:20:53 -0800 Subject: [PATCH 0006/1323] Fixed bug with ?_sort= and ?_search=, closes #1214 --- datasette/views/table.py | 2 +- tests/test_html.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/datasette/views/table.py b/datasette/views/table.py index 0a3504b3..48792284 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -815,7 +815,7 @@ class TableView(RowTableShared): form_hidden_args = [] for key in request.args: - if key.startswith("_"): + if key.startswith("_") and key not in ("_sort", "_search"): for value in request.args.getlist(key): form_hidden_args.append((key, value)) diff --git a/tests/test_html.py b/tests/test_html.py index 9f7a987d..e21bd64d 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -1272,6 +1272,23 @@ def test_other_hidden_form_fields(app_client, path, expected_hidden): assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden +@pytest.mark.parametrize( + "path,expected_hidden", + [ + ("/fixtures/searchable?_search=terry", []), + ("/fixtures/searchable?_sort=text2", []), + ("/fixtures/searchable?_sort=text2&_where=1", [("_where", "1")]), + ], +) +def test_search_and_sort_fields_not_duplicated(app_client, path, expected_hidden): + # https://github.com/simonw/datasette/issues/1214 + response = app_client.get(path) + soup = Soup(response.body, "html.parser") + inputs = soup.find("form").findAll("input") + hiddens = [i for i in inputs if i["type"] == "hidden"] + assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden + + def test_binary_data_display_in_table(app_client): response = app_client.get("/fixtures/binary_data") assert response.status == 200 From dd730a391adfb6daf3fb454001edf74f39a39d00 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Feb 2021 13:23:39 -0800 Subject: [PATCH 0007/1323] Release 0.54.1 Refs #1214 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 8fb7217d..bf182d9f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.54" +__version__ = "0.54.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 54c59036..9d0c636a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_54_1: + +0.54.1 (2021-02-02) +------------------- + +- Fixed a bug where ``?_search=`` and ``?_sort=`` parameters were incorrectly duplicated when the filter form on the table page was re-submitted. (`#1214 `__) + .. _v0_54: 0.54 (2021-01-25) From 3a3de76009ea482911c22f921c3f48cb1337e01a Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 2 Feb 2021 13:23:39 -0800 Subject: [PATCH 0008/1323] Release 0.54.1 Refs #1214 --- datasette/version.py | 2 +- docs/changelog.rst | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/datasette/version.py b/datasette/version.py index 8fb7217d..bf182d9f 100644 --- a/datasette/version.py +++ b/datasette/version.py @@ -1,2 +1,2 @@ -__version__ = "0.54" +__version__ = "0.54.1" __version_info__ = tuple(__version__.split(".")) diff --git a/docs/changelog.rst b/docs/changelog.rst index 54c59036..9d0c636a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,6 +4,13 @@ Changelog ========= +.. _v0_54_1: + +0.54.1 (2021-02-02) +------------------- + +- Fixed a bug where ``?_search=`` and ``?_sort=`` parameters were incorrectly duplicated when the filter form on the table page was re-submitted. (`#1214 `__) + .. _v0_54: 0.54 (2021-01-25) From aa1fe0692c2abb901216738bfb35f9fcc5090e7d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 7 Feb 2021 19:27:02 -0800 Subject: [PATCH 0009/1323] Updated demo and video links --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3d44d0e1..a4fe36c0 100644 --- a/README.md +++ b/README.md @@ -14,8 +14,9 @@ Datasette is a tool for exploring and publishing data. It helps people take data Datasette is aimed at data journalists, museum curators, archivists, local governments and anyone else who has data that they wish to share with the world. -[Explore a demo](https://fivethirtyeight.datasettes.com/fivethirtyeight), watch [a video about the project](https://www.youtube.com/watch?v=pTr1uLQTJNE) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). +[Explore a demo](https://global-power-plants.datasettes.com/global-power-plants/global-power-plants), watch [a video about the project](https://simonwillison.net/2021/Feb/7/video/) or try it out by [uploading and publishing your own CSV data](https://simonwillison.net/2019/Apr/23/datasette-glitch/). +* [datasette.io](https://datasette.io/) is the official project website * Latest [Datasette News](https://datasette.io/news) * Comprehensive documentation: https://docs.datasette.io/ * Examples: https://datasette.io/examples From eda652cf6ee28a0babfb30ce3834512e9e33fb8e Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Feb 2021 16:52:16 -0800 Subject: [PATCH 0010/1323] --ssl-keyfile and --ssl-certfile options to "datasette serve" Closes #1221 --- datasette/cli.py | 19 +++++++++-- docs/datasette-serve-help.txt | 2 ++ setup.py | 1 + tests/conftest.py | 60 +++++++++++++++++++++++++++++++++++ tests/test_cli.py | 2 ++ 5 files changed, 82 insertions(+), 2 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 25d6acea..0a355edb 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -408,6 +408,14 @@ def uninstall(packages, yes): is_flag=True, help="Create database files if they do not exist", ) +@click.option( + "--ssl-keyfile", + help="SSL key file", +) +@click.option( + "--ssl-certfile", + help="SSL certificate file", +) def serve( files, immutable, @@ -432,6 +440,8 @@ def serve( pdb, open_browser, create, + ssl_keyfile, + ssl_certfile, return_instance=False, ): """Serve up specified SQLite database files with a web UI""" @@ -546,9 +556,14 @@ def serve( ) url = f"http://{host}:{port}{path}" webbrowser.open(url) - uvicorn.run( - ds.app(), host=host, port=port, log_level="info", lifespan="on", workers=1 + uvicorn_kwargs = dict( + host=host, port=port, log_level="info", lifespan="on", workers=1 ) + if ssl_keyfile: + uvicorn_kwargs["ssl_keyfile"] = ssl_keyfile + if ssl_certfile: + uvicorn_kwargs["ssl_certfile"] = ssl_certfile + uvicorn.run(ds.app(), **uvicorn_kwargs) async def check_databases(ds): diff --git a/docs/datasette-serve-help.txt b/docs/datasette-serve-help.txt index 257d38c6..62d59794 100644 --- a/docs/datasette-serve-help.txt +++ b/docs/datasette-serve-help.txt @@ -41,4 +41,6 @@ Options: --pdb Launch debugger on any errors -o, --open Open Datasette in your web browser --create Create database files if they do not exist + --ssl-keyfile TEXT SSL key file + --ssl-certfile TEXT SSL certificate file --help Show this message and exit. diff --git a/setup.py b/setup.py index be94c1c6..34b6b396 100644 --- a/setup.py +++ b/setup.py @@ -73,6 +73,7 @@ setup( "beautifulsoup4>=4.8.1,<4.10.0", "black==20.8b1", "pytest-timeout>=1.4.2,<1.5", + "trustme>=0.7,<0.8", ], }, tests_require=["datasette[test]"], diff --git a/tests/conftest.py b/tests/conftest.py index a963a4fd..b00ea006 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,11 @@ import os import pathlib import pytest import re +import subprocess +import tempfile +import time +import trustme + try: import pysqlite3 as sqlite3 @@ -91,3 +96,58 @@ def check_permission_actions_are_documented(): pm.add_hookcall_monitoring( before=before, after=lambda outcome, hook_name, hook_impls, kwargs: None ) + + +@pytest.fixture(scope="session") +def ds_localhost_http_server(): + ds_proc = subprocess.Popen( + ["datasette", "--memory", "-p", "8041"], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + # Avoid FileNotFoundError: [Errno 2] No such file or directory: + cwd=tempfile.gettempdir(), + ) + # Give the server time to start + time.sleep(1.5) + # Check it started successfully + assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") + yield ds_proc + # Shut it down at the end of the pytest session + ds_proc.terminate() + + +@pytest.fixture(scope="session") +def ds_localhost_https_server(tmp_path_factory): + cert_directory = tmp_path_factory.mktemp("certs") + ca = trustme.CA() + server_cert = ca.issue_cert("localhost") + keyfile = str(cert_directory / "server.key") + certfile = str(cert_directory / "server.pem") + client_cert = str(cert_directory / "client.pem") + server_cert.private_key_pem.write_to_path(path=keyfile) + for blob in server_cert.cert_chain_pems: + blob.write_to_path(path=certfile, append=True) + ca.cert_pem.write_to_path(path=client_cert) + + ds_proc = subprocess.Popen( + [ + "datasette", + "--memory", + "-p", + "8042", + "--ssl-keyfile", + keyfile, + "--ssl-certfile", + certfile, + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=tempfile.gettempdir(), + ) + # Give the server time to start + time.sleep(1.5) + # Check it started successfully + assert not ds_proc.poll(), ds_proc.stdout.read().decode("utf-8") + yield ds_proc, client_cert + # Shut it down at the end of the pytest session + ds_proc.terminate() diff --git a/tests/test_cli.py b/tests/test_cli.py index 33c2c8ac..2ca500cf 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -149,6 +149,8 @@ def test_metadata_yaml(): pdb=False, open_browser=False, create=False, + ssl_keyfile=None, + ssl_certfile=None, return_instance=True, ) client = _TestClient(ds) From 9603d893b9b72653895318c9104d754229fdb146 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Thu, 11 Feb 2021 16:53:20 -0800 Subject: [PATCH 0011/1323] Tests for --ssl-keyfile and --ssl-certfile, refs #1221 --- tests/test_cli_serve_server.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 tests/test_cli_serve_server.py diff --git a/tests/test_cli_serve_server.py b/tests/test_cli_serve_server.py new file mode 100644 index 00000000..6962d2fd --- /dev/null +++ b/tests/test_cli_serve_server.py @@ -0,0 +1,20 @@ +import httpx + + +def test_serve_localhost_http(ds_localhost_http_server): + response = httpx.get("http://localhost:8041/_memory.json") + assert { + "database": "_memory", + "path": "/_memory", + "tables": [], + }.items() <= response.json().items() + + +def test_serve_localhost_https(ds_localhost_https_server): + _, client_cert = ds_localhost_https_server + response = httpx.get("https://localhost:8042/_memory.json", verify=client_cert) + assert { + "database": "_memory", + "path": "/_memory", + "tables": [], + }.items() <= response.json().items() From d2d53a5559f3014cccba2cba7e1eab1e5854c759 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 17 Feb 2021 17:20:15 -0800 Subject: [PATCH 0012/1323] New :issue: Sphinx macro, closes #1227 --- docs/changelog.rst | 480 ++++++++++++++++++++++----------------------- docs/conf.py | 6 +- 2 files changed, 245 insertions(+), 241 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9d0c636a..fe69686e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,7 +9,7 @@ Changelog 0.54.1 (2021-02-02) ------------------- -- Fixed a bug where ``?_search=`` and ``?_sort=`` parameters were incorrectly duplicated when the filter form on the table page was re-submitted. (`#1214 `__) +- Fixed a bug where ``?_search=`` and ``?_sort=`` parameters were incorrectly duplicated when the filter form on the table page was re-submitted. (:issue:`1214`) .. _v0_54: @@ -23,7 +23,7 @@ For additional commentary on this release, see `Datasette 0.54, the annotated re The _internal database ~~~~~~~~~~~~~~~~~~~~~~ -As part of ongoing work to help Datasette handle much larger numbers of connected databases and tables (see `Datasette Library `__) Datasette now maintains an in-memory SQLite database with details of all of the attached databases, tables, columns, indexes and foreign keys. (`#1150 `__) +As part of ongoing work to help Datasette handle much larger numbers of connected databases and tables (see `Datasette Library `__) Datasette now maintains an in-memory SQLite database with details of all of the attached databases, tables, columns, indexes and foreign keys. (:issue:`1150`) This will support future improvements such as a searchable, paginated homepage of all available tables. @@ -34,7 +34,7 @@ Plugins can use these tables to introspect attached data in an efficient way. Pl Named in-memory database support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -As part of the work building the ``_internal`` database, Datasette now supports named in-memory databases that can be shared across multiple connections. This allows plugins to create in-memory databases which will persist data for the lifetime of the Datasette server process. (`#1151 `__) +As part of the work building the ``_internal`` database, Datasette now supports named in-memory databases that can be shared across multiple connections. This allows plugins to create in-memory databases which will persist data for the lifetime of the Datasette server process. (:issue:`1151`) The new ``memory_name=`` parameter to the :ref:`internals_database` can be used to create named, shared in-memory databases. @@ -45,28 +45,28 @@ JavaScript modules To use modules, JavaScript needs to be included in `` + diff --git a/datasette/templates/_codemirror_foot.html b/datasette/templates/_codemirror_foot.html index 4019d448..ee09cff1 100644 --- a/datasette/templates/_codemirror_foot.html +++ b/datasette/templates/_codemirror_foot.html @@ -23,6 +23,7 @@ window.onload = () => { editor.setValue(sqlFormatter.format(editor.getValue())); }) } + cmResize(editor, {resizableWidth: false}); } if (sqlFormat && readOnly) { const formatted = sqlFormatter.format(readOnly.innerHTML); From 42caabf7e9e6e4d69ef6dd7de16f2cd96bc79d5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 09:35:41 -0800 Subject: [PATCH 0021/1323] Fixed typo --- docs/testing_plugins.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing_plugins.rst b/docs/testing_plugins.rst index 8ea5e79b..1291a875 100644 --- a/docs/testing_plugins.rst +++ b/docs/testing_plugins.rst @@ -79,7 +79,7 @@ Using pytest fixtures A common pattern for Datasette plugins is to create a fixture which sets up a temporary test database and wraps it in a Datasette instance. -Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` congiguration: +Here's an example that uses the `sqlite-utils library `__ to populate a temporary test database. It also sets the title of that table using a simulated ``metadata.json`` configuration: .. code-block:: python From 726f781c50e88f557437f6490b8479c3d6fabfc2 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 22 Feb 2021 16:22:47 -0800 Subject: [PATCH 0022/1323] Fix for arraycontains bug, closes #1239 --- datasette/filters.py | 4 ++-- tests/test_filters.py | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/datasette/filters.py b/datasette/filters.py index 152a26b4..2b859d99 100644 --- a/datasette/filters.py +++ b/datasette/filters.py @@ -150,7 +150,7 @@ class Filters: "arraycontains", "array contains", """rowid in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} contains "{v}"', @@ -159,7 +159,7 @@ class Filters: "arraynotcontains", "array does not contain", """rowid not in ( - select {t}.rowid from {t}, json_each({t}.{c}) j + select {t}.rowid from {t}, json_each([{t}].[{c}]) j where j.value = :{p} )""", '{c} does not contain "{v}"', diff --git a/tests/test_filters.py b/tests/test_filters.py index 75a779b9..f22b7b5c 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -56,6 +56,14 @@ import pytest # Not in, and JSON array not in ((("foo__notin", "1,2,3"),), ["foo not in (:p0, :p1, :p2)"], ["1", "2", "3"]), ((("foo__notin", "[1,2,3]"),), ["foo not in (:p0, :p1, :p2)"], [1, 2, 3]), + # JSON arraycontains + ( + (("Availability+Info__arraycontains", "yes"),), + [ + "rowid in (\n select table.rowid from table, json_each([table].[Availability+Info]) j\n where j.value = :p0\n )" + ], + ["yes"], + ), ], ) def test_build_where(args, expected_where, expected_params): From afed51b1e36cf275c39e71c7cb262d6c5bdbaa31 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Fri, 26 Feb 2021 09:27:09 -0800 Subject: [PATCH 0023/1323] Note about where to find plugin examples, closes #1244 --- docs/writing_plugins.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/writing_plugins.rst b/docs/writing_plugins.rst index b43ecb27..6afee1c3 100644 --- a/docs/writing_plugins.rst +++ b/docs/writing_plugins.rst @@ -5,6 +5,8 @@ Writing plugins You can write one-off plugins that apply to just one Datasette instance, or you can write plugins which can be installed using ``pip`` and can be shipped to the Python Package Index (`PyPI `__) for other people to install. +Want to start by looking at an example? The `Datasette plugins directory `__ lists more than 50 open source plugins with code you can explore. The :ref:`plugin hooks ` page includes links to example plugins for each of the documented hooks. + .. _writing_plugins_one_off: Writing one-off plugins From cc6774cbaaba2359e0a92cfcc41ad988680075d6 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 14:34:44 -0800 Subject: [PATCH 0024/1323] Upgrade httpx and remove xfail from tests, refs #1005 --- setup.py | 2 +- tests/test_api.py | 2 -- tests/test_html.py | 3 --- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 34b6b396..15ee63fe 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ setup( "click-default-group~=1.2.2", "Jinja2>=2.10.3,<2.12.0", "hupper~=1.9", - "httpx>=0.15", + "httpx>=0.17", "pint~=0.9", "pluggy~=0.13.0", "uvicorn~=0.11", diff --git a/tests/test_api.py b/tests/test_api.py index 0b5401d6..caf23329 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -810,7 +810,6 @@ def test_table_shape_object_compound_primary_key(app_client): assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json -@pytest.mark.xfail def test_table_with_slashes_in_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json" @@ -1286,7 +1285,6 @@ def test_row_format_in_querystring(app_client): assert [{"id": "1", "content": "hello"}] == response.json["rows"] -@pytest.mark.xfail def test_row_strange_table_name(app_client): response = app_client.get( "/fixtures/table%2Fwith%2Fslashes.csv/3.json?_shape=objects" diff --git a/tests/test_html.py b/tests/test_html.py index e21bd64d..3482ec35 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -158,7 +158,6 @@ def test_row_redirects_with_url_hash(app_client_with_hash): assert response.status == 200 -@pytest.mark.xfail def test_row_strange_table_name_with_url_hash(app_client_with_hash): response = app_client_with_hash.get( "/fixtures/table%2Fwith%2Fslashes.csv/3", allow_redirects=False @@ -552,7 +551,6 @@ def test_facets_persist_through_filter_form(app_client): ] -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_classes", [ @@ -584,7 +582,6 @@ def test_css_classes_on_body(app_client, path, expected_classes): assert classes == expected_classes -@pytest.mark.xfail @pytest.mark.parametrize( "path,expected_considered", [ From 47eb885cc2c3aafa03645c330c6f597bee9b3b25 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 19:44:04 -0800 Subject: [PATCH 0025/1323] JSON faceting now suggested even if column has blank strings, closes #1246 --- datasette/facets.py | 11 ++++++++--- tests/test_facets.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/datasette/facets.py b/datasette/facets.py index 207d819d..01628760 100644 --- a/datasette/facets.py +++ b/datasette/facets.py @@ -279,6 +279,7 @@ class ArrayFacet(Facet): suggested_facet_sql = """ select distinct json_type({column}) from ({sql}) + where {column} is not null and {column} != '' """.format( column=escape_sqlite(column), sql=self.sql ) @@ -298,9 +299,13 @@ class ArrayFacet(Facet): v[0] for v in await self.ds.execute( self.database, - "select {column} from ({sql}) where {column} is not null and json_array_length({column}) > 0 limit 100".format( - column=escape_sqlite(column), sql=self.sql - ), + ( + "select {column} from ({sql}) " + "where {column} is not null " + "and {column} != '' " + "and json_array_length({column}) > 0 " + "limit 100" + ).format(column=escape_sqlite(column), sql=self.sql), self.params, truncate=False, custom_time_limit=self.ds.setting( diff --git a/tests/test_facets.py b/tests/test_facets.py index 1e19dc3a..31518682 100644 --- a/tests/test_facets.py +++ b/tests/test_facets.py @@ -1,3 +1,5 @@ +from datasette.app import Datasette +from datasette.database import Database from datasette.facets import ColumnFacet, ArrayFacet, DateFacet from datasette.utils.asgi import Request from datasette.utils import detect_json1 @@ -325,3 +327,23 @@ async def test_date_facet_results(app_client): "truncated": False, } } == buckets + + +@pytest.mark.asyncio +async def test_json_array_with_blanks_and_nulls(): + ds = Datasette([], memory=True) + db = ds.add_database(Database(ds, memory_name="test_json_array")) + await db.execute_write("create table foo(json_column text)", block=True) + for value in ('["a", "b", "c"]', '["a", "b"]', "", None): + await db.execute_write( + "insert into foo (json_column) values (?)", [value], block=True + ) + response = await ds.client.get("/test_json_array/foo.json") + data = response.json() + assert data["suggested_facets"] == [ + { + "name": "json_column", + "type": "array", + "toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column", + } + ] From 7c87532acc4e9d92caa1c4ee29a3446200928018 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 28 Feb 2021 20:02:18 -0800 Subject: [PATCH 0026/1323] New .add_memory_database() method, closes #1247 --- datasette/app.py | 3 +++ docs/internals.rst | 29 ++++++++++++++++++++--------- tests/test_internals_database.py | 4 ++-- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index e3272c6e..02d432df 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -390,6 +390,9 @@ class Datasette: self.databases[name] = db return db + def add_memory_database(self, memory_name): + return self.add_database(Database(self, memory_name=memory_name)) + def remove_database(self, name): self.databases.pop(name) diff --git a/docs/internals.rst b/docs/internals.rst index 713f5d7d..e3bb83fd 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -273,7 +273,25 @@ The ``db`` parameter should be an instance of the ``datasette.database.Database` This will add a mutable database and serve it at ``/my-new-database``. -To create a shared in-memory database named ``statistics``, use the following: +``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: + +.. code-block:: python + + db = datasette.add_database(Database(datasette, memory_name="statistics")) + await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) + +.. _datasette_add_memory_database: + +.add_memory_database(name) +-------------------------- + +Adds a shared in-memory database with the specified name: + +.. code-block:: python + + datasette.add_memory_database("statistics") + +This is a shortcut for the following: .. code-block:: python @@ -284,14 +302,7 @@ To create a shared in-memory database named ``statistics``, use the following: memory_name="statistics" )) -This database will be served at ``/statistics``. - -``.add_database()`` returns the Database instance, with its name set as the ``database.name`` attribute. Any time you are working with a newly added database you should use the return value of ``.add_database()``, for example: - -.. code-block:: python - - db = datasette.add_database(Database(datasette, memory_name="statistics")) - await db.execute_write("CREATE TABLE foo(id integer primary key)", block=True) +Using either of these pattern will result in the in-memory database being served at ``/statistics``. .. _datasette_remove_database: diff --git a/tests/test_internals_database.py b/tests/test_internals_database.py index 086f1a48..b60aaa8e 100644 --- a/tests/test_internals_database.py +++ b/tests/test_internals_database.py @@ -479,9 +479,9 @@ async def test_attached_databases(app_client_two_attached_databases_crossdb_enab async def test_database_memory_name(app_client): ds = app_client.ds foo1 = ds.add_database(Database(ds, memory_name="foo")) - foo2 = ds.add_database(Database(ds, memory_name="foo")) + foo2 = ds.add_memory_database("foo") bar1 = ds.add_database(Database(ds, memory_name="bar")) - bar2 = ds.add_database(Database(ds, memory_name="bar")) + bar2 = ds.add_memory_database("bar") for db in (foo1, foo2, bar1, bar2): table_names = await db.table_names() assert table_names == [] From 4f9a2f1f47dcf7e8561d68a8a07f5009a13cfdb3 Mon Sep 17 00:00:00 2001 From: David Boucha Date: Wed, 3 Mar 2021 22:46:10 -0700 Subject: [PATCH 0027/1323] Fix small typo (#1243) Thanks, @UtahDave --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 4e04ea1d..0f892f83 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data $ python3 -c 'import secrets; print(secrets.token_hex(32))' -This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. +This configuration will run Datasette against all database files contained in the ``/home/ubuntu/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details. You can start the Datasette process running using the following:: From d0fd833b8cdd97e1b91d0f97a69b494895d82bee Mon Sep 17 00:00:00 2001 From: Bob Whitelock Date: Sun, 7 Mar 2021 07:41:17 +0000 Subject: [PATCH 0028/1323] Add compile option to Dockerfile to fix failing test (fixes #696) (#1223) This test was failing when run inside the Docker container: `test_searchable[/fixtures/searchable.json?_search=te*+AND+do*&_searchmode=raw-expected_rows3]`, with this error: ``` def test_searchable(app_client, path, expected_rows): response = app_client.get(path) > assert expected_rows == response.json["rows"] E AssertionError: assert [[1, 'barry c...sel', 'puma']] == [] E Left contains 2 more items, first extra item: [1, 'barry cat', 'terry dog', 'panther'] E Full diff: E + [] E - [[1, 'barry cat', 'terry dog', 'panther'], E - [2, 'terry dog', 'sara weasel', 'puma']] ``` The issue was that the version of sqlite3 built inside the Docker container was built with FTS3 and FTS4 enabled, but without the `SQLITE_ENABLE_FTS3_PARENTHESIS` compile option passed, which adds support for using `AND` and `NOT` within `match` expressions (see https://sqlite.org/fts3.html#compiling_and_enabling_fts3_and_fts4 and https://www.sqlite.org/compile.html). Without this, the `AND` used in the search in this test was being interpreted as a literal string, and so no matches were found. Adding this compile option fixes this. Thanks, @bobwhitelock --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index aba701ab..f4b14146 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ RUN apt update \ RUN wget "https://www.sqlite.org/2020/sqlite-autoconf-3310100.tar.gz" && tar xzf sqlite-autoconf-3310100.tar.gz \ - && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ + && cd sqlite-autoconf-3310100 && ./configure --disable-static --enable-fts5 --enable-json1 CFLAGS="-g -O2 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_JSON1" \ && make && make install RUN wget "http://www.gaia-gis.it/gaia-sins/freexl-sources/freexl-1.0.5.tar.gz" && tar zxf freexl-1.0.5.tar.gz \ From a1bcd2fbe5e47bb431045f65eeceb5eb3a6718d5 Mon Sep 17 00:00:00 2001 From: Jean-Baptiste Pressac Date: Wed, 10 Mar 2021 19:26:39 +0100 Subject: [PATCH 0029/1323] Minor typo in IP adress (#1256) 127.0.01 replaced by 127.0.0.1 --- docs/deploying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deploying.rst b/docs/deploying.rst index 0f892f83..48261b59 100644 --- a/docs/deploying.rst +++ b/docs/deploying.rst @@ -67,7 +67,7 @@ You can start the Datasette process running using the following:: You can confirm that Datasette is running on port 8000 like so:: - curl 127.0.01:8000/-/versions.json + curl 127.0.0.1:8000/-/versions.json # Should output JSON showing the installed version Datasette will not be accessible from outside the server because it is listening on ``127.0.0.1``. You can expose it by instead listening on ``0.0.0.0``, but a better way is to set up a proxy such as ``nginx``. From 8e18c7943181f228ce5ebcea48deb59ce50bee1f Mon Sep 17 00:00:00 2001 From: Konstantin Baikov <4488943+kbaikov@users.noreply.github.com> Date: Thu, 11 Mar 2021 17:15:49 +0100 Subject: [PATCH 0030/1323] Use context manager instead of plain open (#1211) Context manager with open closes the files after usage. When the object is already a pathlib.Path i used read_text write_text functions In some cases pathlib.Path.open were used in context manager, it is basically the same as builtin open. Thanks, Konstantin Baikov! --- datasette/app.py | 13 ++++++------- datasette/cli.py | 13 +++++++------ datasette/publish/cloudrun.py | 6 ++++-- datasette/publish/heroku.py | 17 ++++++++++------- datasette/utils/__init__.py | 6 ++++-- setup.py | 3 ++- tests/conftest.py | 6 ++---- tests/fixtures.py | 5 +++-- tests/test_cli.py | 3 ++- tests/test_cli_serve_get.py | 3 ++- tests/test_docs.py | 8 ++++---- tests/test_package.py | 6 ++++-- tests/test_plugins.py | 3 ++- tests/test_publish_cloudrun.py | 32 ++++++++++++++++++++------------ tests/test_publish_heroku.py | 12 ++++++++---- tests/test_utils.py | 18 ++++++++++++------ update-docs-help.py | 2 +- 17 files changed, 93 insertions(+), 63 deletions(-) diff --git a/datasette/app.py b/datasette/app.py index 02d432df..f43ec205 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -212,7 +212,7 @@ class Datasette: and (config_dir / "inspect-data.json").exists() and not inspect_data ): - inspect_data = json.load((config_dir / "inspect-data.json").open()) + inspect_data = json.loads((config_dir / "inspect-data.json").read_text()) if immutables is None: immutable_filenames = [i["file"] for i in inspect_data.values()] immutables = [ @@ -269,7 +269,7 @@ class Datasette: if config_dir and (config_dir / "config.json").exists(): raise StartupError("config.json should be renamed to settings.json") if config_dir and (config_dir / "settings.json").exists() and not config: - config = json.load((config_dir / "settings.json").open()) + config = json.loads((config_dir / "settings.json").read_text()) self._settings = dict(DEFAULT_SETTINGS, **(config or {})) self.renderers = {} # File extension -> (renderer, can_render) functions self.version_note = version_note @@ -450,11 +450,10 @@ class Datasette: def app_css_hash(self): if not hasattr(self, "_app_css_hash"): - self._app_css_hash = hashlib.sha1( - open(os.path.join(str(app_root), "datasette/static/app.css")) - .read() - .encode("utf8") - ).hexdigest()[:6] + with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp: + self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[ + :6 + ] return self._app_css_hash async def get_canned_queries(self, database_name, actor): diff --git a/datasette/cli.py b/datasette/cli.py index 96a41740..2fa039a0 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -125,13 +125,13 @@ def cli(): @sqlite_extensions def inspect(files, inspect_file, sqlite_extensions): app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions) - if inspect_file == "-": - out = sys.stdout - else: - out = open(inspect_file, "w") loop = asyncio.get_event_loop() inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions)) - out.write(json.dumps(inspect_data, indent=2)) + if inspect_file == "-": + sys.stdout.write(json.dumps(inspect_data, indent=2)) + else: + with open(inspect_file, "w") as fp: + fp.write(json.dumps(inspect_data, indent=2)) async def inspect_(files, sqlite_extensions): @@ -475,7 +475,8 @@ def serve( inspect_data = None if inspect_file: - inspect_data = json.load(open(inspect_file)) + with open(inspect_file) as fp: + inspect_data = json.load(fp) metadata_data = None if metadata: diff --git a/datasette/publish/cloudrun.py b/datasette/publish/cloudrun.py index 7f9e89e2..bad223a1 100644 --- a/datasette/publish/cloudrun.py +++ b/datasette/publish/cloudrun.py @@ -141,9 +141,11 @@ def publish_subcommand(publish): if show_files: if os.path.exists("metadata.json"): print("=== metadata.json ===\n") - print(open("metadata.json").read()) + with open("metadata.json") as fp: + print(fp.read()) print("\n==== Dockerfile ====\n") - print(open("Dockerfile").read()) + with open("Dockerfile") as fp: + print(fp.read()) print("\n====================\n") image_id = f"gcr.io/{project}/{name}" diff --git a/datasette/publish/heroku.py b/datasette/publish/heroku.py index c0c70e12..19fe3fbe 100644 --- a/datasette/publish/heroku.py +++ b/datasette/publish/heroku.py @@ -171,9 +171,11 @@ def temporary_heroku_directory( os.chdir(tmp.name) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) - open("runtime.txt", "w").write("python-3.8.7") + with open("runtime.txt", "w") as fp: + fp.write("python-3.8.7") if branch: install = [ @@ -182,11 +184,11 @@ def temporary_heroku_directory( else: install = ["datasette"] + list(install) - open("requirements.txt", "w").write("\n".join(install)) + with open("requirements.txt", "w") as fp: + fp.write("\n".join(install)) os.mkdir("bin") - open("bin/post_compile", "w").write( - "datasette inspect --inspect-file inspect-data.json" - ) + with open("bin/post_compile", "w") as fp: + fp.write("datasette inspect --inspect-file inspect-data.json") extras = [] if template_dir: @@ -218,7 +220,8 @@ def temporary_heroku_directory( procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format( quoted_files=quoted_files, extras=" ".join(extras) ) - open("Procfile", "w").write(procfile_cmd) + with open("Procfile", "w") as fp: + fp.write(procfile_cmd) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(tmp.name, filename)) diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 47ca0551..1fedb69c 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -428,8 +428,10 @@ def temporary_docker_directory( ) os.chdir(datasette_dir) if metadata_content: - open("metadata.json", "w").write(json.dumps(metadata_content, indent=2)) - open("Dockerfile", "w").write(dockerfile) + with open("metadata.json", "w") as fp: + fp.write(json.dumps(metadata_content, indent=2)) + with open("Dockerfile", "w") as fp: + fp.write(dockerfile) for path, filename in zip(file_paths, file_names): link_or_copy(path, os.path.join(datasette_dir, filename)) if template_dir: diff --git a/setup.py b/setup.py index 15ee63fe..3540e30a 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,8 @@ def get_version(): os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py" ) g = {} - exec(open(path).read(), g) + with open(path) as fp: + exec(fp.read(), g) return g["__version__"] diff --git a/tests/conftest.py b/tests/conftest.py index b00ea006..ad3eb9f1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -75,10 +75,8 @@ def check_permission_actions_are_documented(): from datasette.plugins import pm content = ( - (pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst") - .open() - .read() - ) + pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst" + ).read_text() permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):") documented_permission_actions = set(permissions_re.findall(content)).union( UNDOCUMENTED_PERMISSIONS diff --git a/tests/fixtures.py b/tests/fixtures.py index 30113ff2..2fd8e9cb 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): conn.executescript(GENERATED_COLUMNS_SQL) print(f"Test tables written to {db_filename}") if metadata: - open(metadata, "w").write(json.dumps(METADATA, indent=4)) + with open(metadata, "w") as fp: + fp.write(json.dumps(METADATA, indent=4)) print(f"- metadata written to {metadata}") if plugins_path: path = pathlib.Path(plugins_path) @@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename): test_plugins = pathlib.Path(__file__).parent / "plugins" for filepath in test_plugins.glob("*.py"): newpath = path / filepath.name - newpath.write_text(filepath.open().read()) + newpath.write_text(filepath.read_text()) print(f" Wrote plugin: {newpath}") if extra_db_filename: if pathlib.Path(extra_db_filename).exists(): diff --git a/tests/test_cli.py b/tests/test_cli.py index 8ddd32f6..e094ccb6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client): cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"] ) assert 0 == result.exit_code, result.output - data = json.load(open("foo.json")) + with open("foo.json") as fp: + data = json.load(fp) assert ["fixtures"] == list(data.keys()) diff --git a/tests/test_cli_serve_get.py b/tests/test_cli_serve_get.py index aaa692e5..90fbfe3b 100644 --- a/tests/test_cli_serve_get.py +++ b/tests/test_cli_serve_get.py @@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory): @hookimpl def startup(datasette): - open("{}", "w").write("hello") + with open("{}", "w") as fp: + fp.write("hello") """.format( str(plugins_dir / "hello.txt") ), diff --git a/tests/test_docs.py b/tests/test_docs.py index 44b0810a..efd267b9 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -19,13 +19,13 @@ def get_headings(content, underline="-"): def get_labels(filename): - content = (docs_path / filename).open().read() + content = (docs_path / filename).read_text() return set(label_re.findall(content)) @pytest.fixture(scope="session") def settings_headings(): - return get_headings((docs_path / "settings.rst").open().read(), "~") + return get_headings((docs_path / "settings.rst").read_text(), "~") @pytest.mark.parametrize("setting", app.SETTINGS) @@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting): ), ) def test_help_includes(name, filename): - expected = open(str(docs_path / filename)).read() + expected = (docs_path / filename).read_text() runner = CliRunner() result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" @@ -55,7 +55,7 @@ def test_help_includes(name, filename): @pytest.fixture(scope="session") def plugin_hooks_content(): - return (docs_path / "plugin_hooks.rst").open().read() + return (docs_path / "plugin_hooks.rst").read_text() @pytest.mark.parametrize( diff --git a/tests/test_package.py b/tests/test_package.py index 3248b3a4..bb939643 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -32,7 +32,8 @@ def test_package(mock_call, mock_which): capture = CaptureDockerfile() mock_call.side_effect = capture with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"]) assert 0 == result.exit_code mock_call.assert_has_calls([mock.call(["docker", "build", "."])]) @@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which): mock_call.side_effect = capture runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"] ) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 715c7c17..ee6f1efa 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client): def test_plugin_config_file(app_client): - open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE") + with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp: + fp.write("FROM_FILE") assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin") # Ensure secrets aren't visible in /-/metadata.json metadata = app_client.get("/-/metadata.json") diff --git a/tests/test_publish_cloudrun.py b/tests/test_publish_cloudrun.py index 2ef90705..7881ebae 100644 --- a/tests/test_publish_cloudrun.py +++ b/tests/test_publish_cloudrun.py @@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"]) assert result.exit_code == 1 assert "Publishing to Google Cloud requires gcloud" in result.output @@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db"], input="input-service" ) @@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which): mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"] ) @@ -120,7 +123,8 @@ def test_publish_cloudrun_memory( mock_which.return_value = True runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory], @@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") - open("metadata.yml", "w").write( - textwrap.dedent( - """ + with open("test.db", "w") as fp: + fp.write("data") + with open("metadata.yml", "w") as fp: + fp.write( + textwrap.dedent( + """ title: Hello from metadata YAML plugins: datasette-auth-github: foo: bar """ - ).strip() - ) + ).strip() + ) result = runner.invoke( cli.cli, [ @@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which): runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ @@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options( runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_publish_heroku.py b/tests/test_publish_heroku.py index c7a38031..c011ab43 100644 --- a/tests/test_publish_heroku.py +++ b/tests/test_publish_heroku.py @@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which): mock_which.return_value = False runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"]) assert result.exit_code == 1 assert "Publishing to Heroku requires heroku" in result.output @@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("t.db", "w").write("data") + with open("t.db", "w") as fp: + fp.write("data") result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n") assert 0 != result.exit_code mock_check_output.assert_has_calls( @@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which): }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"] ) @@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which) }[repr(s)] runner = CliRunner() with runner.isolated_filesystem(): - open("test.db", "w").write("data") + with open("test.db", "w") as fp: + fp.write("data") result = runner.invoke( cli.cli, [ diff --git a/tests/test_utils.py b/tests/test_utils.py index 56306339..ecef6f7a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -232,7 +232,8 @@ def test_to_css_class(s, expected): def test_temporary_docker_directory_uses_hard_link(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -249,7 +250,8 @@ def test_temporary_docker_directory_uses_hard_link(): secret="secret", ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a hard link assert 2 == os.stat(hello).st_nlink @@ -260,7 +262,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): mock_link.side_effect = OSError with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") # Default usage of this should use symlink with utils.temporary_docker_directory( files=["hello"], @@ -277,7 +280,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): secret=None, ) as temp_docker: hello = os.path.join(temp_docker, "hello") - assert "world" == open(hello).read() + with open(hello) as fp: + assert "world" == fp.read() # It should be a copy, not a hard link assert 1 == os.stat(hello).st_nlink @@ -285,7 +289,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): def test_temporary_docker_directory_quotes_args(): with tempfile.TemporaryDirectory() as td: os.chdir(td) - open("hello", "w").write("world") + with open("hello", "w") as fp: + fp.write("world") with utils.temporary_docker_directory( files=["hello"], name="t", @@ -301,7 +306,8 @@ def test_temporary_docker_directory_quotes_args(): secret="secret", ) as temp_docker: df = os.path.join(temp_docker, "Dockerfile") - df_contents = open(df).read() + with open(df) as fp: + df_contents = fp.read() assert "'$PWD'" in df_contents assert "'--$HOME'" in df_contents assert "ENV DATASETTE_SECRET 'secret'" in df_contents diff --git a/update-docs-help.py b/update-docs-help.py index 3a192575..292d1dcd 100644 --- a/update-docs-help.py +++ b/update-docs-help.py @@ -18,7 +18,7 @@ def update_help_includes(): result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88) actual = f"$ datasette {name} --help\n\n{result.output}" actual = actual.replace("Usage: cli ", "Usage: datasette ") - open(docs_path / filename, "w").write(actual) + (docs_path / filename).write_text(actual) if __name__ == "__main__": From c4f1ec7f33fd7d5b93f0f895dafb5351cc3bfc5b Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 20 Mar 2021 14:32:23 -0700 Subject: [PATCH 0031/1323] Documentation for Response.asgi_send(), closes #1266 --- docs/internals.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/internals.rst b/docs/internals.rst index e3bb83fd..18032406 100644 --- a/docs/internals.rst +++ b/docs/internals.rst @@ -138,6 +138,28 @@ Each of these responses will use the correct corresponding content-type - ``text Each of the helper methods take optional ``status=`` and ``headers=`` arguments, documented above. +.. _internals_response_asgi_send: + +Returning a response with .asgi_send(send) +------------------------------------------ + + +In most cases you will return ``Response`` objects from your own view functions. You can also use a ``Response`` instance to respond at a lower level via ASGI, for example if you are writing code that uses the :ref:`plugin_asgi_wrapper` hook. + +Create a ``Response`` object and then use ``await response.asgi_send(send)``, passing the ASGI ``send`` function. For example: + +.. code-block:: python + + async def require_authorization(scope, recieve, send): + response = Response.text( + "401 Authorization Required", + headers={ + "www-authenticate": 'Basic realm="Datasette", charset="UTF-8"' + }, + status=401, + ) + await response.asgi_send(send) + .. _internals_response_set_cookie: Setting cookies with response.set_cookie() From 6ad544df5e6bd027a8e27317041e6168aee07459 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 23 Mar 2021 09:19:41 -0700 Subject: [PATCH 0032/1323] Fixed master -> main in a bunch of places, mainly docs --- datasette/cli.py | 2 +- datasette/publish/common.py | 2 +- datasette/templates/patterns.html | 16 ++++++++-------- docs/contributing.rst | 2 +- docs/custom_templates.rst | 2 +- docs/datasette-package-help.txt | 2 +- docs/datasette-publish-cloudrun-help.txt | 2 +- docs/datasette-publish-heroku-help.txt | 2 +- docs/plugin_hooks.rst | 4 ++-- docs/publish.rst | 4 ++-- docs/spatialite.rst | 2 +- tests/fixtures.py | 4 ++-- tests/test_html.py | 9 ++++----- 13 files changed, 26 insertions(+), 27 deletions(-) diff --git a/datasette/cli.py b/datasette/cli.py index 2fa039a0..42b5c115 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -191,7 +191,7 @@ def plugins(all, plugins_dir): help="Path to JSON/YAML file containing metadata to publish", ) @click.option("--extra-options", help="Extra options to pass to datasette serve") -@click.option("--branch", help="Install datasette from a GitHub branch e.g. master") +@click.option("--branch", help="Install datasette from a GitHub branch e.g. main") @click.option( "--template-dir", type=click.Path(exists=True, file_okay=False, dir_okay=True), diff --git a/datasette/publish/common.py b/datasette/publish/common.py index b6570290..29665eb3 100644 --- a/datasette/publish/common.py +++ b/datasette/publish/common.py @@ -19,7 +19,7 @@ def add_common_publish_arguments_and_options(subcommand): "--extra-options", help="Extra options to pass to datasette serve" ), click.option( - "--branch", help="Install datasette from a GitHub branch e.g. master" + "--branch", help="Install datasette from a GitHub branch e.g. main" ), click.option( "--template-dir", diff --git a/datasette/templates/patterns.html b/datasette/templates/patterns.html index 984c1bf6..3f9b5a16 100644 --- a/datasette/templates/patterns.html +++ b/datasette/templates/patterns.html @@ -70,10 +70,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -118,10 +118,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -177,10 +177,10 @@

Data license: - Apache License 2.0 + Apache License 2.0 · Data source: - + tests/fixtures.py · About: @@ -478,10 +478,10 @@