Renamed datasette.config() to .setting(), closes #1107

This commit is contained in:
Simon Willison 2020-11-24 14:06:32 -08:00
commit f2e2bfcdd9
10 changed files with 86 additions and 60 deletions

View file

@ -264,15 +264,15 @@ class Datasette:
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.load((config_dir / "settings.json").open())
self._config = dict(DEFAULT_SETTINGS, **(config or {}))
self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
self.executor = futures.ThreadPoolExecutor(
max_workers=self.config("num_sql_threads")
max_workers=self.setting("num_sql_threads")
)
self.max_returned_rows = self.config("max_returned_rows")
self.sql_time_limit_ms = self.config("sql_time_limit_ms")
self.page_size = self.config("default_page_size")
self.max_returned_rows = self.setting("max_returned_rows")
self.sql_time_limit_ms = self.setting("sql_time_limit_ms")
self.page_size = self.setting("default_page_size")
# Execute plugins in constructor, to ensure they are available
# when the rest of `datasette inspect` executes
if self.plugins_dir:
@ -347,12 +347,12 @@ class Datasette:
def remove_database(self, name):
self.databases.pop(name)
def config(self, key):
return self._config.get(key, None)
def setting(self, key):
return self._settings.get(key, None)
def config_dict(self):
# Returns a fully resolved config dictionary, useful for templates
return {option.name: self.config(option.name) for option in SETTINGS}
return {option.name: self.setting(option.name) for option in SETTINGS}
def metadata(self, key=None, database=None, table=None, fallback=True):
"""
@ -454,8 +454,8 @@ class Datasette:
conn.enable_load_extension(True)
for extension in self.sqlite_extensions:
conn.execute(f"SELECT load_extension('{extension}')")
if self.config("cache_size_kb"):
conn.execute(f"PRAGMA cache_size=-{self.config('cache_size_kb')}")
if self.setting("cache_size_kb"):
conn.execute(f"PRAGMA cache_size=-{self.setting('cache_size_kb')}")
# pylint: disable=no-member
pm.hook.prepare_connection(conn=conn, database=database, datasette=self)
@ -567,7 +567,7 @@ class Datasette:
def absolute_url(self, request, path):
url = urllib.parse.urljoin(request.url, path)
if url.startswith("http://") and self.config("force_https_urls"):
if url.startswith("http://") and self.setting("force_https_urls"):
url = "https://" + url[len("http://") :]
return url
@ -781,12 +781,12 @@ class Datasette:
"extra_js_urls": await self._asset_urls(
"extra_js_urls", template, context, request, view_name
),
"base_url": self.config("base_url"),
"base_url": self.setting("base_url"),
"csrftoken": request.scope["csrftoken"] if request else lambda: "",
},
**extra_template_vars,
}
if request and request.args.get("_context") and self.config("template_debug"):
if request and request.args.get("_context") and self.setting("template_debug"):
return "<pre>{}</pre>".format(
jinja2.escape(json.dumps(template_context, default=repr, indent=4))
)
@ -882,7 +882,7 @@ class Datasette:
r"/-/plugins(?P<as_format>(\.json)?)$",
)
add_route(
JsonDataView.as_view(self, "settings.json", lambda: self._config),
JsonDataView.as_view(self, "settings.json", lambda: self._settings),
r"/-/settings(?P<as_format>(\.json)?)$",
)
add_route(
@ -1001,7 +1001,7 @@ class DatasetteRouter:
async def route_path(self, scope, receive, send, path):
# Strip off base_url if present before routing
base_url = self.ds.config("base_url")
base_url = self.ds.setting("base_url")
if base_url != "/" and path.startswith(base_url):
path = "/" + path[len(base_url) :]
request = Request(scope, receive)
@ -1016,7 +1016,7 @@ class DatasetteRouter:
scope_modifications = {}
# Apply force_https_urls, if set
if (
self.ds.config("force_https_urls")
self.ds.setting("force_https_urls")
and scope["type"] == "http"
and scope.get("scheme") != "https"
):

View file

@ -136,7 +136,7 @@ class ColumnFacet(Facet):
async def suggest(self):
row_count = await self.get_row_count()
columns = await self.get_columns(self.sql, self.params)
facet_size = self.ds.config("default_facet_size")
facet_size = self.ds.setting("default_facet_size")
suggested_facets = []
already_enabled = [c["config"]["simple"] for c in self.get_configs()]
for column in columns:
@ -158,7 +158,7 @@ class ColumnFacet(Facet):
suggested_facet_sql,
self.params,
truncate=False,
custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"),
custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"),
)
num_distinct_values = len(distinct_values)
if (
@ -188,7 +188,7 @@ class ColumnFacet(Facet):
qs_pairs = self.get_querystring_pairs()
facet_size = self.ds.config("default_facet_size")
facet_size = self.ds.setting("default_facet_size")
for source_and_config in self.get_configs():
config = source_and_config["config"]
source = source_and_config["source"]
@ -208,7 +208,7 @@ class ColumnFacet(Facet):
facet_sql,
self.params,
truncate=False,
custom_time_limit=self.ds.config("facet_time_limit_ms"),
custom_time_limit=self.ds.setting("facet_time_limit_ms"),
)
facet_results_values = []
facet_results[column] = {
@ -290,7 +290,7 @@ class ArrayFacet(Facet):
suggested_facet_sql,
self.params,
truncate=False,
custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"),
custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"),
log_sql_errors=False,
)
types = tuple(r[0] for r in results.rows)
@ -305,7 +305,7 @@ class ArrayFacet(Facet):
),
self.params,
truncate=False,
custom_time_limit=self.ds.config(
custom_time_limit=self.ds.setting(
"facet_suggest_time_limit_ms"
),
log_sql_errors=False,
@ -335,7 +335,7 @@ class ArrayFacet(Facet):
facet_results = {}
facets_timed_out = []
facet_size = self.ds.config("default_facet_size")
facet_size = self.ds.setting("default_facet_size")
for source_and_config in self.get_configs():
config = source_and_config["config"]
source = source_and_config["source"]
@ -354,7 +354,7 @@ class ArrayFacet(Facet):
facet_sql,
self.params,
truncate=False,
custom_time_limit=self.ds.config("facet_time_limit_ms"),
custom_time_limit=self.ds.setting("facet_time_limit_ms"),
)
facet_results_values = []
facet_results[column] = {
@ -421,7 +421,7 @@ class DateFacet(Facet):
suggested_facet_sql,
self.params,
truncate=False,
custom_time_limit=self.ds.config("facet_suggest_time_limit_ms"),
custom_time_limit=self.ds.setting("facet_suggest_time_limit_ms"),
log_sql_errors=False,
)
values = tuple(r[0] for r in results.rows)
@ -446,7 +446,7 @@ class DateFacet(Facet):
facet_results = {}
facets_timed_out = []
args = dict(self.get_querystring_pairs())
facet_size = self.ds.config("default_facet_size")
facet_size = self.ds.setting("default_facet_size")
for source_and_config in self.get_configs():
config = source_and_config["config"]
source = source_and_config["source"]
@ -467,7 +467,7 @@ class DateFacet(Facet):
facet_sql,
self.params,
truncate=False,
custom_time_limit=self.ds.config("facet_time_limit_ms"),
custom_time_limit=self.ds.setting("facet_time_limit_ms"),
)
facet_results_values = []
facet_results[column] = {

View file

@ -10,7 +10,7 @@ class Urls:
if not isinstance(path, PrefixedUrlString):
if path.startswith("/"):
path = path[1:]
path = self.ds.config("base_url") + path
path = self.ds.setting("base_url") + path
if format is not None:
path = path_with_format(path=path, format=format)
return PrefixedUrlString(path)
@ -29,7 +29,7 @@ class Urls:
def database(self, database, format=None):
db = self.ds.databases[database]
if self.ds.config("hash_urls") and db.hash:
if self.ds.setting("hash_urls") and db.hash:
path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format)
else:
path = self.path(database, format=format)

View file

@ -230,7 +230,7 @@ class DataView(BaseView):
should_redirect += kwargs["as_db"]
if (
(self.ds.config("hash_urls") or "_hash" in request.args)
(self.ds.setting("hash_urls") or "_hash" in request.args)
and
# Redirect only if database is immutable
not self.ds.databases[name].is_mutable
@ -260,7 +260,7 @@ class DataView(BaseView):
stream = request.args.get("_stream")
if stream:
# Some quick sanity checks
if not self.ds.config("allow_csv_stream"):
if not self.ds.setting("allow_csv_stream"):
raise BadRequest("CSV streaming is disabled")
if request.args.get("_next"):
raise BadRequest("_next not allowed for CSV streaming")
@ -296,7 +296,7 @@ class DataView(BaseView):
async def stream_fn(r):
nonlocal data
writer = csv.writer(LimitedWriter(r, self.ds.config("max_csv_mb")))
writer = csv.writer(LimitedWriter(r, self.ds.setting("max_csv_mb")))
first = True
next = None
while first or (next and stream):
@ -566,9 +566,9 @@ class DataView(BaseView):
ttl = request.args.get("_ttl", None)
if ttl is None or not ttl.isdigit():
if correct_hash_provided:
ttl = self.ds.config("default_cache_ttl_hashed")
ttl = self.ds.setting("default_cache_ttl_hashed")
else:
ttl = self.ds.config("default_cache_ttl")
ttl = self.ds.setting("default_cache_ttl")
return self.set_response_headers(r, ttl)

View file

@ -136,7 +136,7 @@ class DatabaseView(DataView):
"show_hidden": request.args.get("_show_hidden"),
"editable": True,
"metadata": metadata,
"allow_download": self.ds.config("allow_download")
"allow_download": self.ds.setting("allow_download")
and not db.is_mutable
and database != ":memory:",
},
@ -161,7 +161,7 @@ class DatabaseDownload(DataView):
db = self.ds.databases[database]
if db.is_memory:
raise DatasetteError("Cannot download :memory: database", status=404)
if not self.ds.config("allow_download") or db.is_mutable:
if not self.ds.setting("allow_download") or db.is_mutable:
raise Forbidden("Database download is forbidden")
if not db.path:
raise DatasetteError("Cannot download database", status=404)

View file

@ -121,7 +121,7 @@ class RowTableShared(DataView):
}
cell_rows = []
base_url = self.ds.config("base_url")
base_url = self.ds.setting("base_url")
for row in rows:
cells = []
# Unless we are a view, the first column is a link - either to the rowid
@ -654,7 +654,7 @@ class TableView(RowTableShared):
pass
# facets support
if not self.ds.config("allow_facet") and any(
if not self.ds.setting("allow_facet") and any(
arg.startswith("_facet") for arg in request.args
):
raise BadRequest("_facet= is not allowed")
@ -772,8 +772,8 @@ class TableView(RowTableShared):
suggested_facets = []
if (
self.ds.config("suggest_facets")
and self.ds.config("allow_facet")
self.ds.setting("suggest_facets")
and self.ds.setting("allow_facet")
and not _next
):
for facet in facet_instances:
@ -801,7 +801,7 @@ class TableView(RowTableShared):
results.description,
rows,
link_column=not is_view,
truncate_cells=self.ds.config("truncate_cells_html"),
truncate_cells=self.ds.setting("truncate_cells_html"),
)
metadata = (
(self.ds.metadata("databases") or {})

View file

@ -350,7 +350,21 @@ Returns the absolute URL for the given path, including the protocol and host. Fo
absolute_url = datasette.absolute_url(request, "/dbname/table.json")
# Would return "http://localhost:8001/dbname/table.json"
The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`settings_force_https_urls` configuration setting is taken into account.
The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`setting_force_https_urls` configuration setting is taken into account.
.setting(key)
-------------
``key`` - string
The name of the setting, e.g. ``base_url``.
Returns the configured value for the specified :ref:`setting <settings>`. This can be a string, boolean or integer depending on the requested setting.
For example:
.. code-block:: python
downloads_are_allowed = datasette.setting("allow_download")
.. _internals_datasette_client:

View file

@ -33,3 +33,15 @@ def test_sign_unsign(datasette, value, namespace):
assert value == datasette.unsign(signed, *extra_args)
with pytest.raises(BadSignature):
datasette.unsign(signed[:-1] + ("!" if signed[-1] != "!" else ":"))
@pytest.mark.parametrize(
"setting,expected",
(
("base_url", "/"),
("max_csv_mb", 100),
("allow_csv_stream", True),
),
)
def test_datasette_setting(datasette, setting, expected):
assert datasette.setting(setting) == expected

View file

@ -33,10 +33,10 @@ async def test_client_methods(datasette, method, path, expected_status):
@pytest.mark.asyncio
@pytest.mark.parametrize("prefix", [None, "/prefix/"])
async def test_client_post(datasette, prefix):
original_base_url = datasette._config["base_url"]
original_base_url = datasette._settings["base_url"]
try:
if prefix is not None:
datasette._config["base_url"] = prefix
datasette._settings["base_url"] = prefix
response = await datasette.client.post(
"/-/messages",
data={
@ -48,7 +48,7 @@ async def test_client_post(datasette, prefix):
assert response.status_code == 302
assert "ds_messages" in response.cookies
finally:
datasette._config["base_url"] = original_base_url
datasette._settings["base_url"] = original_base_url
@pytest.mark.asyncio
@ -56,12 +56,12 @@ async def test_client_post(datasette, prefix):
"prefix,expected_path", [(None, "/asgi-scope"), ("/prefix/", "/prefix/asgi-scope")]
)
async def test_client_path(datasette, prefix, expected_path):
original_base_url = datasette._config["base_url"]
original_base_url = datasette._settings["base_url"]
try:
if prefix is not None:
datasette._config["base_url"] = prefix
datasette._settings["base_url"] = prefix
response = await datasette.client.get("/asgi-scope")
path = response.json()["path"]
assert path == expected_path
finally:
datasette._config["base_url"] = original_base_url
datasette._settings["base_url"] = original_base_url

View file

@ -20,14 +20,14 @@ def ds():
],
)
def test_path(ds, base_url, path, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.path(path)
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
def test_path_applied_twice_does_not_double_prefix(ds):
ds._config["base_url"] = "/prefix/"
ds._settings["base_url"] = "/prefix/"
path = ds.urls.path("/")
assert path == "/prefix/"
path = ds.urls.path(path)
@ -42,7 +42,7 @@ def test_path_applied_twice_does_not_double_prefix(ds):
],
)
def test_instance(ds, base_url, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.instance()
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
@ -56,7 +56,7 @@ def test_instance(ds, base_url, expected):
],
)
def test_static(ds, base_url, file, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.static(file)
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
@ -80,7 +80,7 @@ def test_static(ds, base_url, file, expected):
],
)
def test_static_plugins(ds, base_url, plugin, file, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.static_plugins(plugin, file)
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
@ -94,7 +94,7 @@ def test_static_plugins(ds, base_url, plugin, file, expected):
],
)
def test_logout(ds, base_url, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.logout()
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
@ -109,7 +109,7 @@ def test_logout(ds, base_url, expected):
],
)
def test_database(ds, base_url, format, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.database(":memory:", format=format)
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
@ -125,7 +125,7 @@ def test_database(ds, base_url, format, expected):
],
)
def test_table_and_query(ds, base_url, name, format, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual1 = ds.urls.table(":memory:", name, format=format)
assert actual1 == expected
assert isinstance(actual1, PrefixedUrlString)
@ -143,7 +143,7 @@ def test_table_and_query(ds, base_url, name, format, expected):
],
)
def test_row(ds, base_url, format, expected):
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
actual = ds.urls.row(":memory:", "facetable", "1", format=format)
assert actual == expected
assert isinstance(actual, PrefixedUrlString)
@ -152,9 +152,9 @@ def test_row(ds, base_url, format, expected):
@pytest.mark.parametrize("base_url", ["/", "/prefix/"])
def test_database_hashed(app_client_with_hash, base_url):
ds = app_client_with_hash.ds
original_base_url = ds._config["base_url"]
original_base_url = ds._settings["base_url"]
try:
ds._config["base_url"] = base_url
ds._settings["base_url"] = base_url
db_hash = ds.get_database("fixtures").hash
assert len(db_hash) == 64
expected = f"{base_url}fixtures-{db_hash[:7]}"
@ -163,4 +163,4 @@ def test_database_hashed(app_client_with_hash, base_url):
assert ds.urls.query("fixtures", "name") == expected + "/name"
finally:
# Reset this since fixture is shared with other tests
ds._config["base_url"] = original_base_url
ds._settings["base_url"] = original_base_url