mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
render_cell(value) plugin hook
Still needs performance testing before I merge this into master
This commit is contained in:
parent
295d005ca4
commit
510e01f224
5 changed files with 111 additions and 7 deletions
|
|
@ -208,6 +208,8 @@ def extra_js_urls():
|
|||
|
||||
PLUGIN2 = '''
|
||||
from datasette import hookimpl
|
||||
import jinja2
|
||||
import json
|
||||
|
||||
|
||||
@hookimpl
|
||||
|
|
@ -216,6 +218,30 @@ def extra_js_urls():
|
|||
'url': 'https://example.com/jquery.js',
|
||||
'sri': 'SRIHASH',
|
||||
}, 'https://example.com/plugin2.js']
|
||||
|
||||
|
||||
@hookimpl
|
||||
def render_cell(value):
|
||||
# Render {"href": "...", "label": "..."} as link
|
||||
stripped = value.strip()
|
||||
if not stripped.startswith("{") and stripped.endswith("}"):
|
||||
return None
|
||||
try:
|
||||
data = json.loads(value)
|
||||
except ValueError:
|
||||
return None
|
||||
if set(data.keys()) != {"href", "label"}:
|
||||
return None
|
||||
href = data["href"]
|
||||
if not (
|
||||
href.startswith("/") or href.startswith("http://")
|
||||
or href.startswith("https://")
|
||||
):
|
||||
return None
|
||||
return jinja2.Markup('<a href="{href}">{label}</a>'.format(
|
||||
href=jinja2.escape(data["href"]),
|
||||
label=jinja2.escape(data["label"] or "") or " "
|
||||
))
|
||||
'''
|
||||
|
||||
TABLES = '''
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from .fixtures import ( # noqa
|
|||
app_client,
|
||||
)
|
||||
import pytest
|
||||
import urllib
|
||||
|
||||
|
||||
def test_plugins_dir_plugin(app_client):
|
||||
|
|
@ -67,3 +68,20 @@ def test_plugins_with_duplicate_js_urls(app_client):
|
|||
) < srcs.index(
|
||||
'https://example.com/plugin2.js'
|
||||
)
|
||||
|
||||
|
||||
def test_plugins_render_cell(app_client):
|
||||
sql = """
|
||||
select '{"href": "http://example.com/", "label":"Example"}'
|
||||
""".strip()
|
||||
path = "/fixtures?" + urllib.parse.urlencode({
|
||||
"sql": sql,
|
||||
})
|
||||
response = app_client.get(path)
|
||||
td = Soup(
|
||||
response.body, "html.parser"
|
||||
).find("table").find("tbody").find("td")
|
||||
a = td.find("a")
|
||||
assert a is not None, str(a)
|
||||
assert a.attrs["href"] == "http://example.com/"
|
||||
assert a.text == "Example"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue