mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Upgrade to Black 20.8b1, closes #958
This commit is contained in:
parent
26b2922f17
commit
a648bb82ba
22 changed files with 203 additions and 58 deletions
|
|
@ -396,7 +396,9 @@ class Datasette:
|
|||
async def get_canned_queries(self, database_name, actor):
|
||||
queries = self.metadata("queries", database=database_name, fallback=False) or {}
|
||||
for more_queries in pm.hook.canned_queries(
|
||||
datasette=self, database=database_name, actor=actor,
|
||||
datasette=self,
|
||||
database=database_name,
|
||||
actor=actor,
|
||||
):
|
||||
more_queries = await await_me_maybe(more_queries)
|
||||
queries.update(more_queries or {})
|
||||
|
|
@ -468,7 +470,10 @@ class Datasette:
|
|||
"Check permissions using the permissions_allowed plugin hook"
|
||||
result = None
|
||||
for check in pm.hook.permission_allowed(
|
||||
datasette=self, actor=actor, action=action, resource=resource,
|
||||
datasette=self,
|
||||
actor=actor,
|
||||
action=action,
|
||||
resource=resource,
|
||||
):
|
||||
check = await await_me_maybe(check)
|
||||
if check is not None:
|
||||
|
|
@ -861,22 +866,28 @@ class Datasette:
|
|||
r"/-/actor(?P<as_format>(\.json)?)$",
|
||||
)
|
||||
add_route(
|
||||
AuthTokenView.as_view(self), r"/-/auth-token$",
|
||||
AuthTokenView.as_view(self),
|
||||
r"/-/auth-token$",
|
||||
)
|
||||
add_route(
|
||||
LogoutView.as_view(self), r"/-/logout$",
|
||||
LogoutView.as_view(self),
|
||||
r"/-/logout$",
|
||||
)
|
||||
add_route(
|
||||
PermissionsDebugView.as_view(self), r"/-/permissions$",
|
||||
PermissionsDebugView.as_view(self),
|
||||
r"/-/permissions$",
|
||||
)
|
||||
add_route(
|
||||
MessagesDebugView.as_view(self), r"/-/messages$",
|
||||
MessagesDebugView.as_view(self),
|
||||
r"/-/messages$",
|
||||
)
|
||||
add_route(
|
||||
AllowDebugView.as_view(self), r"/-/allow-debug$",
|
||||
AllowDebugView.as_view(self),
|
||||
r"/-/allow-debug$",
|
||||
)
|
||||
add_route(
|
||||
PatternPortfolioView.as_view(self), r"/-/patterns$",
|
||||
PatternPortfolioView.as_view(self),
|
||||
r"/-/patterns$",
|
||||
)
|
||||
add_route(
|
||||
DatabaseDownload.as_view(self), r"/(?P<db_name>[^/]+?)(?P<as_db>\.db)$"
|
||||
|
|
@ -1079,7 +1090,12 @@ class DatasetteRouter:
|
|||
if status != 500:
|
||||
templates = ["{}.html".format(status)] + templates
|
||||
info.update(
|
||||
{"ok": False, "error": message, "status": status, "title": title,}
|
||||
{
|
||||
"ok": False,
|
||||
"error": message,
|
||||
"status": status,
|
||||
"title": title,
|
||||
}
|
||||
)
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
|
|
|
|||
|
|
@ -174,7 +174,10 @@ def plugins(all, plugins_dir):
|
|||
default=lambda: os.urandom(32).hex(),
|
||||
)
|
||||
@click.option(
|
||||
"-p", "--port", default=8001, help="Port to run the server on, defaults to 8001",
|
||||
"-p",
|
||||
"--port",
|
||||
default=8001,
|
||||
help="Port to run the server on, defaults to 8001",
|
||||
)
|
||||
@click.option("--title", help="Title for metadata")
|
||||
@click.option("--license", help="License label for metadata")
|
||||
|
|
@ -344,7 +347,8 @@ def uninstall(packages, yes):
|
|||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--get", help="Run an HTTP GET request against this path, print results and exit",
|
||||
"--get",
|
||||
help="Run an HTTP GET request against this path, print results and exit",
|
||||
)
|
||||
@click.option("--version-note", help="Additional note to show on /-/versions")
|
||||
@click.option("--help-config", is_flag=True, help="Show available config options")
|
||||
|
|
|
|||
|
|
@ -66,8 +66,8 @@ def urlsafe_components(token):
|
|||
|
||||
|
||||
def path_from_row_pks(row, pks, use_rowid, quote=True):
|
||||
""" Generate an optionally URL-quoted unique identifier
|
||||
for a row from its primary keys."""
|
||||
"""Generate an optionally URL-quoted unique identifier
|
||||
for a row from its primary keys."""
|
||||
if use_rowid:
|
||||
bits = [row["rowid"]]
|
||||
else:
|
||||
|
|
@ -839,7 +839,9 @@ def check_connection(conn):
|
|||
]
|
||||
for table in tables:
|
||||
try:
|
||||
conn.execute("PRAGMA table_info({});".format(escape_sqlite(table)),)
|
||||
conn.execute(
|
||||
"PRAGMA table_info({});".format(escape_sqlite(table)),
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
if e.args[0] == "no such module: VirtualSpatialIndex":
|
||||
raise SpatialiteConnectionProblem(e)
|
||||
|
|
@ -915,12 +917,18 @@ def actor_matches_allow(actor, allow):
|
|||
async def check_visibility(datasette, actor, action, resource, default=True):
|
||||
"Returns (visible, private) - visible = can you see it, private = can others see it too"
|
||||
visible = await datasette.permission_allowed(
|
||||
actor, action, resource=resource, default=default,
|
||||
actor,
|
||||
action,
|
||||
resource=resource,
|
||||
default=default,
|
||||
)
|
||||
if not visible:
|
||||
return (False, False)
|
||||
private = not await datasette.permission_allowed(
|
||||
None, action, resource=resource, default=default,
|
||||
None,
|
||||
action,
|
||||
resource=resource,
|
||||
default=default,
|
||||
)
|
||||
return visible, private
|
||||
|
||||
|
|
|
|||
|
|
@ -65,7 +65,10 @@ class BaseView:
|
|||
|
||||
async def check_permission(self, request, action, resource=None):
|
||||
ok = await self.ds.permission_allowed(
|
||||
request.actor, action, resource=resource, default=True,
|
||||
request.actor,
|
||||
action,
|
||||
resource=resource,
|
||||
default=True,
|
||||
)
|
||||
if not ok:
|
||||
raise Forbidden(action)
|
||||
|
|
@ -85,7 +88,10 @@ class BaseView:
|
|||
repr(permission)
|
||||
)
|
||||
ok = await self.ds.permission_allowed(
|
||||
request.actor, action, resource=resource, default=None,
|
||||
request.actor,
|
||||
action,
|
||||
resource=resource,
|
||||
default=None,
|
||||
)
|
||||
if ok is not None:
|
||||
if ok:
|
||||
|
|
@ -343,10 +349,10 @@ class DataView(BaseView):
|
|||
return AsgiStream(stream_fn, headers=headers, content_type=content_type)
|
||||
|
||||
async def get_format(self, request, database, args):
|
||||
""" Determine the format of the response from the request, from URL
|
||||
parameters or from a file extension.
|
||||
"""Determine the format of the response from the request, from URL
|
||||
parameters or from a file extension.
|
||||
|
||||
`args` is a dict of the path components parsed from the URL by the router.
|
||||
`args` is a dict of the path components parsed from the URL by the router.
|
||||
"""
|
||||
# If ?_format= is provided, use that as the format
|
||||
_format = request.args.get("_format", None)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,11 @@ class DatabaseView(DataView):
|
|||
|
||||
async def data(self, request, database, hash, default_labels=False, _size=None):
|
||||
await self.check_permissions(
|
||||
request, [("view-database", database), "view-instance",],
|
||||
request,
|
||||
[
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
metadata = (self.ds.metadata("databases") or {}).get(database, {})
|
||||
self.ds.update_with_inherited_metadata(metadata)
|
||||
|
|
@ -42,17 +46,26 @@ class DatabaseView(DataView):
|
|||
views = []
|
||||
for view_name in await db.view_names():
|
||||
visible, private = await check_visibility(
|
||||
self.ds, request.actor, "view-table", (database, view_name),
|
||||
self.ds,
|
||||
request.actor,
|
||||
"view-table",
|
||||
(database, view_name),
|
||||
)
|
||||
if visible:
|
||||
views.append(
|
||||
{"name": view_name, "private": private,}
|
||||
{
|
||||
"name": view_name,
|
||||
"private": private,
|
||||
}
|
||||
)
|
||||
|
||||
tables = []
|
||||
for table in table_counts:
|
||||
visible, private = await check_visibility(
|
||||
self.ds, request.actor, "view-table", (database, table),
|
||||
self.ds,
|
||||
request.actor,
|
||||
"view-table",
|
||||
(database, table),
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
|
@ -76,7 +89,10 @@ class DatabaseView(DataView):
|
|||
await self.ds.get_canned_queries(database, request.actor)
|
||||
).values():
|
||||
visible, private = await check_visibility(
|
||||
self.ds, request.actor, "view-query", (database, query["name"]),
|
||||
self.ds,
|
||||
request.actor,
|
||||
"view-query",
|
||||
(database, query["name"]),
|
||||
)
|
||||
if visible:
|
||||
canned_queries.append(dict(query, private=private))
|
||||
|
|
|
|||
|
|
@ -26,7 +26,10 @@ class IndexView(BaseView):
|
|||
databases = []
|
||||
for name, db in self.ds.databases.items():
|
||||
visible, database_private = await check_visibility(
|
||||
self.ds, request.actor, "view-database", name,
|
||||
self.ds,
|
||||
request.actor,
|
||||
"view-database",
|
||||
name,
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
|
@ -36,7 +39,10 @@ class IndexView(BaseView):
|
|||
views = []
|
||||
for view_name in await db.view_names():
|
||||
visible, private = await check_visibility(
|
||||
self.ds, request.actor, "view-table", (name, view_name),
|
||||
self.ds,
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, view_name),
|
||||
)
|
||||
if visible:
|
||||
views.append({"name": view_name, "private": private})
|
||||
|
|
@ -52,7 +58,10 @@ class IndexView(BaseView):
|
|||
tables = {}
|
||||
for table in table_names:
|
||||
visible, private = await check_visibility(
|
||||
self.ds, request.actor, "view-table", (name, table),
|
||||
self.ds,
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, table),
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -82,7 +82,11 @@ class LogoutView(BaseView):
|
|||
async def get(self, request):
|
||||
if not request.actor:
|
||||
return Response.redirect("/")
|
||||
return await self.render(["logout.html"], request, {"actor": request.actor},)
|
||||
return await self.render(
|
||||
["logout.html"],
|
||||
request,
|
||||
{"actor": request.actor},
|
||||
)
|
||||
|
||||
async def post(self, request):
|
||||
response = Response.redirect("/")
|
||||
|
|
|
|||
|
|
@ -350,7 +350,10 @@ class TableView(RowTableShared):
|
|||
# Add _where= from querystring
|
||||
if "_where" in request.args:
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "execute-sql", resource=database, default=True,
|
||||
request.actor,
|
||||
"execute-sql",
|
||||
resource=database,
|
||||
default=True,
|
||||
):
|
||||
raise DatasetteError("_where= is not allowed", status=403)
|
||||
else:
|
||||
|
|
|
|||
2
setup.py
2
setup.py
|
|
@ -71,7 +71,7 @@ setup(
|
|||
"pytest>=5.2.2,<6.1.0",
|
||||
"pytest-asyncio>=0.10,<0.15",
|
||||
"beautifulsoup4>=4.8.1,<4.10.0",
|
||||
"black~=19.10b0",
|
||||
"black==20.8b1",
|
||||
"pytest-timeout>=1.4.2,<1.5",
|
||||
],
|
||||
},
|
||||
|
|
|
|||
|
|
@ -700,7 +700,9 @@ def assert_permissions_checked(datasette, actions):
|
|||
], """Missing expected permission check: action={}, resource={}
|
||||
Permission checks seen: {}
|
||||
""".format(
|
||||
action, resource, json.dumps(list(datasette._permission_checks), indent=4),
|
||||
action,
|
||||
resource,
|
||||
json.dumps(list(datasette._permission_checks), indent=4),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -56,7 +56,10 @@ def extra_css_urls(template, database, table, view_name, columns, request, datas
|
|||
@hookimpl
|
||||
def extra_js_urls():
|
||||
return [
|
||||
{"url": "https://plugin-example.com/jquery.js", "sri": "SRIHASH",},
|
||||
{
|
||||
"url": "https://plugin-example.com/jquery.js",
|
||||
"sri": "SRIHASH",
|
||||
},
|
||||
"https://plugin-example.com/plugin1.js",
|
||||
]
|
||||
|
||||
|
|
@ -73,7 +76,9 @@ def extra_body_script(
|
|||
"database": database,
|
||||
"table": table,
|
||||
"config": datasette.plugin_config(
|
||||
"name-of-plugin", database=database, table=table,
|
||||
"name-of-plugin",
|
||||
database=database,
|
||||
table=table,
|
||||
),
|
||||
"view_name": view_name,
|
||||
"request_path": request.path if request is not None else None,
|
||||
|
|
@ -99,7 +104,9 @@ def render_cell(value, column, table, database, datasette):
|
|||
"table": table,
|
||||
"database": database,
|
||||
"config": datasette.plugin_config(
|
||||
"name-of-plugin", database=database, table=table,
|
||||
"name-of-plugin",
|
||||
database=database,
|
||||
table=table,
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,10 @@ import json
|
|||
@hookimpl
|
||||
def extra_js_urls():
|
||||
return [
|
||||
{"url": "https://plugin-example.com/jquery.js", "sri": "SRIHASH",},
|
||||
{
|
||||
"url": "https://plugin-example.com/jquery.js",
|
||||
"sri": "SRIHASH",
|
||||
},
|
||||
"https://plugin-example.com/plugin2.js",
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -1786,7 +1786,11 @@ def test_null_foreign_keys_are_not_expanded(app_client):
|
|||
"foreign_key_with_label": {"value": "1", "label": "hello"},
|
||||
"foreign_key_with_no_label": {"value": "1", "label": "1"},
|
||||
},
|
||||
{"pk": "2", "foreign_key_with_label": None, "foreign_key_with_no_label": None,},
|
||||
{
|
||||
"pk": "2",
|
||||
"foreign_key_with_label": None,
|
||||
"foreign_key_with_no_label": None,
|
||||
},
|
||||
] == response.json
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,10 @@ def test_auth_token(app_client):
|
|||
"The /-/auth-token endpoint sets the correct cookie"
|
||||
assert app_client.ds._root_token is not None
|
||||
path = "/-/auth-token?token={}".format(app_client.ds._root_token)
|
||||
response = app_client.get(path, allow_redirects=False,)
|
||||
response = app_client.get(
|
||||
path,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert 302 == response.status
|
||||
assert "/" == response.headers["Location"]
|
||||
assert {"a": {"id": "root"}} == app_client.ds.unsign(
|
||||
|
|
@ -16,7 +19,13 @@ def test_auth_token(app_client):
|
|||
)
|
||||
# Check that a second with same token fails
|
||||
assert app_client.ds._root_token is None
|
||||
assert 403 == app_client.get(path, allow_redirects=False,).status
|
||||
assert (
|
||||
403
|
||||
== app_client.get(
|
||||
path,
|
||||
allow_redirects=False,
|
||||
).status
|
||||
)
|
||||
|
||||
|
||||
def test_actor_cookie(app_client):
|
||||
|
|
@ -38,7 +47,11 @@ def test_actor_cookie_invalid(app_client):
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"offset,expected", [((24 * 60 * 60), {"id": "test"}), (-(24 * 60 * 60), None),]
|
||||
"offset,expected",
|
||||
[
|
||||
((24 * 60 * 60), {"id": "test"}),
|
||||
(-(24 * 60 * 60), None),
|
||||
],
|
||||
)
|
||||
def test_actor_cookie_that_expires(app_client, offset, expected):
|
||||
expires_at = int(time.time()) + offset
|
||||
|
|
|
|||
|
|
@ -72,7 +72,11 @@ def test_insert(canned_write_client):
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
"query_name,expect_csrf_hidden_field",
|
||||
[("canned_read", False), ("add_name_specify_id", True), ("add_name", True),],
|
||||
[
|
||||
("canned_read", False),
|
||||
("add_name_specify_id", True),
|
||||
("add_name", True),
|
||||
],
|
||||
)
|
||||
def test_canned_query_form_csrf_hidden_field(
|
||||
canned_write_client, query_name, expect_csrf_hidden_field
|
||||
|
|
|
|||
|
|
@ -16,7 +16,8 @@ def custom_pages_client(tmp_path_factory):
|
|||
"utf-8",
|
||||
)
|
||||
(pages_dir / "atom.html").write_text(
|
||||
'{{ custom_header("content-type", "application/xml") }}<?xml ...>', "utf-8",
|
||||
'{{ custom_header("content-type", "application/xml") }}<?xml ...>',
|
||||
"utf-8",
|
||||
)
|
||||
(pages_dir / "redirect.html").write_text(
|
||||
'{{ custom_redirect("/example") }}', "utf-8"
|
||||
|
|
|
|||
|
|
@ -33,7 +33,11 @@ import pytest
|
|||
["2%2", "3%3"],
|
||||
),
|
||||
# notlike:
|
||||
((("foo__notlike", "2%2"),), ['"foo" not like :p0'], ["2%2"],),
|
||||
(
|
||||
(("foo__notlike", "2%2"),),
|
||||
['"foo" not like :p0'],
|
||||
["2%2"],
|
||||
),
|
||||
(
|
||||
(("foo__isnull", "1"), ("baz__isnull", "1"), ("bar__gt", "10")),
|
||||
['"bar" > :p0', '"baz" is null', '"foo" is null'],
|
||||
|
|
|
|||
|
|
@ -149,7 +149,8 @@ async def test_execute_write_block_true(db):
|
|||
@pytest.mark.asyncio
|
||||
async def test_execute_write_block_false(db):
|
||||
await db.execute_write(
|
||||
"update roadside_attractions set name = ? where pk = ?", ["Mystery!", 1],
|
||||
"update roadside_attractions set name = ? where pk = ?",
|
||||
["Mystery!", 1],
|
||||
)
|
||||
time.sleep(0.1)
|
||||
rows = await db.execute("select name from roadside_attractions where pk = 1")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,11 @@ import urllib
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
"allow,expected_anon,expected_auth",
|
||||
[(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
|
||||
[
|
||||
(None, 200, 200),
|
||||
({}, 403, 403),
|
||||
({"id": "root"}, 403, 200),
|
||||
],
|
||||
)
|
||||
def test_view_instance(allow, expected_anon, expected_auth):
|
||||
with make_app_client(metadata={"allow": allow}) as client:
|
||||
|
|
@ -23,7 +27,8 @@ def test_view_instance(allow, expected_anon, expected_auth):
|
|||
# Should be no padlock
|
||||
assert "<h1>Datasette 🔒</h1>" not in anon_response.text
|
||||
auth_response = client.get(
|
||||
path, cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
path,
|
||||
cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
)
|
||||
assert expected_auth == auth_response.status
|
||||
# Check for the padlock
|
||||
|
|
@ -33,7 +38,11 @@ def test_view_instance(allow, expected_anon, expected_auth):
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
"allow,expected_anon,expected_auth",
|
||||
[(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
|
||||
[
|
||||
(None, 200, 200),
|
||||
({}, 403, 403),
|
||||
({"id": "root"}, 403, 200),
|
||||
],
|
||||
)
|
||||
def test_view_database(allow, expected_anon, expected_auth):
|
||||
with make_app_client(
|
||||
|
|
@ -50,7 +59,8 @@ def test_view_database(allow, expected_anon, expected_auth):
|
|||
# Should be no padlock
|
||||
assert ">fixtures 🔒</h1>" not in anon_response.text
|
||||
auth_response = client.get(
|
||||
path, cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
path,
|
||||
cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
)
|
||||
assert expected_auth == auth_response.status
|
||||
if (
|
||||
|
|
@ -71,7 +81,8 @@ def test_database_list_respects_view_database():
|
|||
assert '<a href="/data">data</a></h2>' in anon_response.text
|
||||
assert '<a href="/fixtures">fixtures</a>' not in anon_response.text
|
||||
auth_response = client.get(
|
||||
"/", cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
"/",
|
||||
cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
)
|
||||
assert '<a href="/data">data</a></h2>' in auth_response.text
|
||||
assert '<a href="/fixtures">fixtures</a> 🔒</h2>' in auth_response.text
|
||||
|
|
@ -102,7 +113,8 @@ def test_database_list_respects_view_table():
|
|||
for html_fragment in html_fragments:
|
||||
assert html_fragment not in anon_response_text
|
||||
auth_response_text = client.get(
|
||||
"/", cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
"/",
|
||||
cookies={"ds_actor": client.actor_cookie({"id": "root"})},
|
||||
).text
|
||||
for html_fragment in html_fragments:
|
||||
assert html_fragment in auth_response_text
|
||||
|
|
@ -110,7 +122,11 @@ def test_database_list_respects_view_table():
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
"allow,expected_anon,expected_auth",
|
||||
[(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
|
||||
[
|
||||
(None, 200, 200),
|
||||
({}, 403, 403),
|
||||
({"id": "root"}, 403, 200),
|
||||
],
|
||||
)
|
||||
def test_view_table(allow, expected_anon, expected_auth):
|
||||
with make_app_client(
|
||||
|
|
@ -166,7 +182,11 @@ def test_table_list_respects_view_table():
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
"allow,expected_anon,expected_auth",
|
||||
[(None, 200, 200), ({}, 403, 403), ({"id": "root"}, 403, 200),],
|
||||
[
|
||||
(None, 200, 200),
|
||||
({}, 403, 403),
|
||||
({"id": "root"}, 403, 200),
|
||||
],
|
||||
)
|
||||
def test_view_query(allow, expected_anon, expected_auth):
|
||||
with make_app_client(
|
||||
|
|
@ -332,7 +352,10 @@ def test_allow_debug(app_client, actor, allow, expected_fragment):
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
"allow,expected",
|
||||
[({"id": "root"}, 403), ({"id": "root", "unauthenticated": True}, 200),],
|
||||
[
|
||||
({"id": "root"}, 403),
|
||||
({"id": "root", "unauthenticated": True}, 200),
|
||||
],
|
||||
)
|
||||
def test_allow_unauthenticated(allow, expected):
|
||||
with make_app_client(metadata={"allow": allow}) as client:
|
||||
|
|
@ -420,7 +443,8 @@ def test_permissions_cascade(cascade_app_client, path, expected_status, permissi
|
|||
] = (allow if "query" in permissions else deny)
|
||||
cascade_app_client.ds._metadata = updated_metadata
|
||||
response = cascade_app_client.get(
|
||||
path, cookies={"ds_actor": cascade_app_client.actor_cookie({"id": "test"})},
|
||||
path,
|
||||
cookies={"ds_actor": cascade_app_client.actor_cookie({"id": "test"})},
|
||||
)
|
||||
assert expected_status == response.status
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -737,7 +737,9 @@ def test_hook_register_magic_parameters(restore_working_directory):
|
|||
"sql": "insert into logs (line) values (:_request_http_version)",
|
||||
"write": True,
|
||||
},
|
||||
"get_uuid": {"sql": "select :_uuid_new",},
|
||||
"get_uuid": {
|
||||
"sql": "select :_uuid_new",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -105,7 +105,13 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
|
|||
@mock.patch("datasette.publish.cloudrun.check_call")
|
||||
@pytest.mark.parametrize(
|
||||
"memory,should_fail",
|
||||
[["1Gi", False], ["2G", False], ["256Mi", False], ["4", True], ["GB", True],],
|
||||
[
|
||||
["1Gi", False],
|
||||
["2G", False],
|
||||
["256Mi", False],
|
||||
["4", True],
|
||||
["GB", True],
|
||||
],
|
||||
)
|
||||
def test_publish_cloudrun_memory(
|
||||
mock_call, mock_output, mock_which, memory, should_fail
|
||||
|
|
|
|||
|
|
@ -59,7 +59,15 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
|
|||
assert 0 == result.exit_code, result.output
|
||||
mock_call.assert_has_calls(
|
||||
[
|
||||
mock.call(["heroku", "config:set", "-a", "f", "WEB_CONCURRENCY=1",]),
|
||||
mock.call(
|
||||
[
|
||||
"heroku",
|
||||
"config:set",
|
||||
"-a",
|
||||
"f",
|
||||
"WEB_CONCURRENCY=1",
|
||||
]
|
||||
),
|
||||
mock.call(
|
||||
["heroku", "builds:create", "-a", "f", "--include-vcs-ignore"]
|
||||
),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue