mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Use dash encoding for database names, too, refs #1439
This commit is contained in:
parent
b760264ae6
commit
32548b88fd
5 changed files with 28 additions and 14 deletions
|
|
@ -31,10 +31,10 @@ class Urls:
|
||||||
db = self.ds.databases[database]
|
db = self.ds.databases[database]
|
||||||
if self.ds.setting("hash_urls") and db.hash:
|
if self.ds.setting("hash_urls") and db.hash:
|
||||||
path = self.path(
|
path = self.path(
|
||||||
f"{urllib.parse.quote(database)}-{db.hash[:HASH_LENGTH]}", format=format
|
f"{dash_encode(database)}-{db.hash[:HASH_LENGTH]}", format=format
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
path = self.path(urllib.parse.quote(database), format=format)
|
path = self.path(dash_encode(database), format=format)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def table(self, database, table, format=None):
|
def table(self, database, table, format=None):
|
||||||
|
|
|
||||||
|
|
@ -205,17 +205,17 @@ class DataView(BaseView):
|
||||||
async def resolve_db_name(self, request, db_name, **kwargs):
|
async def resolve_db_name(self, request, db_name, **kwargs):
|
||||||
hash = None
|
hash = None
|
||||||
name = None
|
name = None
|
||||||
db_name = urllib.parse.unquote_plus(db_name)
|
decoded_name = dash_decode(db_name)
|
||||||
if db_name not in self.ds.databases and "-" in db_name:
|
if decoded_name not in self.ds.databases and "-" in db_name:
|
||||||
# No matching DB found, maybe it's a name-hash?
|
# No matching DB found, maybe it's a name-hash?
|
||||||
name_bit, hash_bit = db_name.rsplit("-", 1)
|
name_bit, hash_bit = db_name.rsplit("-", 1)
|
||||||
if name_bit not in self.ds.databases:
|
if dash_decode(name_bit) not in self.ds.databases:
|
||||||
raise NotFound(f"Database not found: {name}")
|
raise NotFound(f"Database not found: {name}")
|
||||||
else:
|
else:
|
||||||
name = name_bit
|
name = dash_decode(name_bit)
|
||||||
hash = hash_bit
|
hash = hash_bit
|
||||||
else:
|
else:
|
||||||
name = db_name
|
name = decoded_name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
db = self.ds.databases[name]
|
db = self.ds.databases[name]
|
||||||
|
|
|
||||||
|
|
@ -942,7 +942,7 @@ def test_cors(app_client_with_cors, path, status_code):
|
||||||
)
|
)
|
||||||
def test_database_with_space_in_name(app_client_two_attached_databases, path):
|
def test_database_with_space_in_name(app_client_two_attached_databases, path):
|
||||||
response = app_client_two_attached_databases.get(
|
response = app_client_two_attached_databases.get(
|
||||||
"/extra database" + path, follow_redirects=True
|
"/extra-20database" + path, follow_redirects=True
|
||||||
)
|
)
|
||||||
assert response.status == 200
|
assert response.status == 200
|
||||||
|
|
||||||
|
|
@ -953,7 +953,7 @@ def test_common_prefix_database_names(app_client_conflicting_database_names):
|
||||||
d["name"]
|
d["name"]
|
||||||
for d in app_client_conflicting_database_names.get("/-/databases.json").json
|
for d in app_client_conflicting_database_names.get("/-/databases.json").json
|
||||||
]
|
]
|
||||||
for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-bar.json")):
|
for db_name, path in (("foo", "/foo.json"), ("foo-bar", "/foo-2Dbar.json")):
|
||||||
data = app_client_conflicting_database_names.get(path).json
|
data = app_client_conflicting_database_names.get(path).json
|
||||||
assert db_name == data["database"]
|
assert db_name == data["database"]
|
||||||
|
|
||||||
|
|
@ -992,3 +992,16 @@ async def test_hidden_sqlite_stat1_table():
|
||||||
data = (await ds.client.get("/db.json?_show_hidden=1")).json()
|
data = (await ds.client.get("/db.json?_show_hidden=1")).json()
|
||||||
tables = [(t["name"], t["hidden"]) for t in data["tables"]]
|
tables = [(t["name"], t["hidden"]) for t in data["tables"]]
|
||||||
assert tables == [("normal", False), ("sqlite_stat1", True)]
|
assert tables == [("normal", False), ("sqlite_stat1", True)]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.parametrize("db_name", ("foo", r"fo%o", "f~/c.d"))
|
||||||
|
async def test_dash_encoded_database_names(db_name):
|
||||||
|
ds = Datasette()
|
||||||
|
ds.add_memory_database(db_name)
|
||||||
|
response = await ds.client.get("/.json")
|
||||||
|
assert db_name in response.json().keys()
|
||||||
|
path = response.json()[db_name]["path"]
|
||||||
|
# And the JSON for that database
|
||||||
|
response2 = await ds.client.get(path + ".json")
|
||||||
|
assert response2.status_code == 200
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ from datasette.app import SETTINGS
|
||||||
from datasette.plugins import DEFAULT_PLUGINS
|
from datasette.plugins import DEFAULT_PLUGINS
|
||||||
from datasette.cli import cli, serve
|
from datasette.cli import cli, serve
|
||||||
from datasette.version import __version__
|
from datasette.version import __version__
|
||||||
|
from datasette.utils import dash_encode
|
||||||
from datasette.utils.sqlite import sqlite3
|
from datasette.utils.sqlite import sqlite3
|
||||||
from click.testing import CliRunner
|
from click.testing import CliRunner
|
||||||
import io
|
import io
|
||||||
|
|
@ -294,12 +295,12 @@ def test_weird_database_names(ensure_eventloop, tmpdir, filename):
|
||||||
assert result1.exit_code == 0, result1.output
|
assert result1.exit_code == 0, result1.output
|
||||||
filename_no_stem = filename.rsplit(".", 1)[0]
|
filename_no_stem = filename.rsplit(".", 1)[0]
|
||||||
expected_link = '<a href="/{}">{}</a>'.format(
|
expected_link = '<a href="/{}">{}</a>'.format(
|
||||||
urllib.parse.quote(filename_no_stem), filename_no_stem
|
dash_encode(filename_no_stem), filename_no_stem
|
||||||
)
|
)
|
||||||
assert expected_link in result1.output
|
assert expected_link in result1.output
|
||||||
# Now try hitting that database page
|
# Now try hitting that database page
|
||||||
result2 = runner.invoke(
|
result2 = runner.invoke(
|
||||||
cli, [db_path, "--get", "/{}".format(urllib.parse.quote(filename_no_stem))]
|
cli, [db_path, "--get", "/{}".format(dash_encode(filename_no_stem))]
|
||||||
)
|
)
|
||||||
assert result2.exit_code == 0, result2.output
|
assert result2.exit_code == 0, result2.output
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ def test_homepage(app_client_two_attached_databases):
|
||||||
)
|
)
|
||||||
# Should be two attached databases
|
# Should be two attached databases
|
||||||
assert [
|
assert [
|
||||||
{"href": r"/extra%20database", "text": "extra database"},
|
{"href": r"/extra-20database", "text": "extra database"},
|
||||||
{"href": "/fixtures", "text": "fixtures"},
|
{"href": "/fixtures", "text": "fixtures"},
|
||||||
] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")]
|
] == [{"href": a["href"], "text": a.text.strip()} for a in soup.select("h2 a")]
|
||||||
# Database should show count text and attached tables
|
# Database should show count text and attached tables
|
||||||
|
|
@ -44,8 +44,8 @@ def test_homepage(app_client_two_attached_databases):
|
||||||
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
|
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
|
||||||
]
|
]
|
||||||
assert [
|
assert [
|
||||||
{"href": r"/extra%20database/searchable", "text": "searchable"},
|
{"href": r"/extra-20database/searchable", "text": "searchable"},
|
||||||
{"href": r"/extra%20database/searchable_view", "text": "searchable_view"},
|
{"href": r"/extra-20database/searchable_view", "text": "searchable_view"},
|
||||||
] == table_links
|
] == table_links
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue