mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Renamed --limit to --config, added --help-config, closes #274
Removed the --page_size= argument to datasette serve in favour of:
datasette serve --config default_page_size:50 mydb.db
Added new help section:
$ datasette --help-config
Config options:
default_page_size Default page size for the table view
(default=100)
max_returned_rows Maximum rows that can be returned from a table
or custom query (default=1000)
sql_time_limit_ms Time limit for a SQL query in milliseconds
(default=1000)
default_facet_size Number of values to return for requested facets
(default=30)
facet_time_limit_ms Time limit for calculating a requested facet
(default=200)
facet_suggest_time_limit_ms Time limit for calculating a suggested facet
(default=50)
This commit is contained in:
parent
918de9403e
commit
f6183ff5fa
10 changed files with 142 additions and 95 deletions
13
README.md
13
README.md
|
|
@ -113,7 +113,6 @@ http://localhost:8001/History/downloads.json?_shape=objects will return that dat
|
||||||
useful for development
|
useful for development
|
||||||
--cors Enable CORS by serving Access-Control-Allow-
|
--cors Enable CORS by serving Access-Control-Allow-
|
||||||
Origin: *
|
Origin: *
|
||||||
--page_size INTEGER Page size - default is 100
|
|
||||||
--load-extension PATH Path to a SQLite extension to load
|
--load-extension PATH Path to a SQLite extension to load
|
||||||
--inspect-file TEXT Path to JSON file created using "datasette
|
--inspect-file TEXT Path to JSON file created using "datasette
|
||||||
inspect"
|
inspect"
|
||||||
|
|
@ -123,8 +122,9 @@ http://localhost:8001/History/downloads.json?_shape=objects will return that dat
|
||||||
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
||||||
--static STATIC MOUNT mountpoint:path-to-directory for serving static
|
--static STATIC MOUNT mountpoint:path-to-directory for serving static
|
||||||
files
|
files
|
||||||
--limit LIMIT Set a limit using limitname:integer
|
--config CONFIG Set config option using configname:value
|
||||||
datasette.readthedocs.io/en/latest/limits.html
|
datasette.readthedocs.io/en/latest/config.html
|
||||||
|
--help-config Show available config options
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
|
|
||||||
## metadata.json
|
## metadata.json
|
||||||
|
|
@ -213,13 +213,14 @@ If you have docker installed you can use `datasette package` to create a new Doc
|
||||||
|
|
||||||
Both publish and package accept an `extra_options` argument option, which will affect how the resulting application is executed. For example, say you want to increase the SQL time limit for a particular container:
|
Both publish and package accept an `extra_options` argument option, which will affect how the resulting application is executed. For example, say you want to increase the SQL time limit for a particular container:
|
||||||
|
|
||||||
datasette package parlgov.db --extra-options="--limit sql_time_limit_ms:2500 --page_size=10"
|
datasette package parlgov.db \
|
||||||
|
--extra-options="--config sql_time_limit_ms:2500 --config default_page_size:10"
|
||||||
|
|
||||||
The resulting container will run the application with those options.
|
The resulting container will run the application with those options.
|
||||||
|
|
||||||
Here's example output for the package command:
|
Here's example output for the package command:
|
||||||
|
|
||||||
$ datasette package parlgov.db --extra-options="--limit sql_time_limit_ms:2500 --page_size=10"
|
$ datasette package parlgov.db --extra-options="--config sql_time_limit_ms:2500"
|
||||||
Sending build context to Docker daemon 4.459MB
|
Sending build context to Docker daemon 4.459MB
|
||||||
Step 1/7 : FROM python:3
|
Step 1/7 : FROM python:3
|
||||||
---> 79e1dc9af1c1
|
---> 79e1dc9af1c1
|
||||||
|
|
@ -238,7 +239,7 @@ Here's example output for the package command:
|
||||||
Step 6/7 : EXPOSE 8001
|
Step 6/7 : EXPOSE 8001
|
||||||
---> Using cache
|
---> Using cache
|
||||||
---> 8e83844b0fed
|
---> 8e83844b0fed
|
||||||
Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --limit sql_time_limit_ms:2500 --page_size=10
|
Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --config sql_time_limit_ms:2500
|
||||||
---> Using cache
|
---> Using cache
|
||||||
---> 1bd380ea8af3
|
---> 1bd380ea8af3
|
||||||
Successfully built 1bd380ea8af3
|
Successfully built 1bd380ea8af3
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
|
@ -45,12 +46,32 @@ pm.add_hookspecs(hookspecs)
|
||||||
pm.load_setuptools_entrypoints("datasette")
|
pm.load_setuptools_entrypoints("datasette")
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LIMITS = {
|
ConfigOption = collections.namedtuple(
|
||||||
"max_returned_rows": 1000,
|
"ConfigOption", ("name", "default", "help")
|
||||||
"sql_time_limit_ms": 1000,
|
)
|
||||||
"default_facet_size": 30,
|
CONFIG_OPTIONS = (
|
||||||
"facet_time_limit_ms": 200,
|
ConfigOption("default_page_size", 100, """
|
||||||
"facet_suggest_time_limit_ms": 50,
|
Default page size for the table view
|
||||||
|
""".strip()),
|
||||||
|
ConfigOption("max_returned_rows", 1000, """
|
||||||
|
Maximum rows that can be returned from a table or custom query
|
||||||
|
""".strip()),
|
||||||
|
ConfigOption("sql_time_limit_ms", 1000, """
|
||||||
|
Time limit for a SQL query in milliseconds
|
||||||
|
""".strip()),
|
||||||
|
ConfigOption("default_facet_size", 30, """
|
||||||
|
Number of values to return for requested facets
|
||||||
|
""".strip()),
|
||||||
|
ConfigOption("facet_time_limit_ms", 200, """
|
||||||
|
Time limit for calculating a requested facet
|
||||||
|
""".strip()),
|
||||||
|
ConfigOption("facet_suggest_time_limit_ms", 50, """
|
||||||
|
Time limit for calculating a suggested facet
|
||||||
|
""".strip()),
|
||||||
|
)
|
||||||
|
DEFAULT_CONFIG = {
|
||||||
|
option.name: option.default
|
||||||
|
for option in CONFIG_OPTIONS
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -87,7 +108,6 @@ class Datasette:
|
||||||
files,
|
files,
|
||||||
num_threads=3,
|
num_threads=3,
|
||||||
cache_headers=True,
|
cache_headers=True,
|
||||||
page_size=100,
|
|
||||||
cors=False,
|
cors=False,
|
||||||
inspect_data=None,
|
inspect_data=None,
|
||||||
metadata=None,
|
metadata=None,
|
||||||
|
|
@ -95,13 +115,12 @@ class Datasette:
|
||||||
template_dir=None,
|
template_dir=None,
|
||||||
plugins_dir=None,
|
plugins_dir=None,
|
||||||
static_mounts=None,
|
static_mounts=None,
|
||||||
limits=None,
|
config=None,
|
||||||
):
|
):
|
||||||
self.files = files
|
self.files = files
|
||||||
self.num_threads = num_threads
|
self.num_threads = num_threads
|
||||||
self.executor = futures.ThreadPoolExecutor(max_workers=num_threads)
|
self.executor = futures.ThreadPoolExecutor(max_workers=num_threads)
|
||||||
self.cache_headers = cache_headers
|
self.cache_headers = cache_headers
|
||||||
self.page_size = page_size
|
|
||||||
self.cors = cors
|
self.cors = cors
|
||||||
self._inspect = inspect_data
|
self._inspect = inspect_data
|
||||||
self.metadata = metadata or {}
|
self.metadata = metadata or {}
|
||||||
|
|
@ -110,9 +129,10 @@ class Datasette:
|
||||||
self.template_dir = template_dir
|
self.template_dir = template_dir
|
||||||
self.plugins_dir = plugins_dir
|
self.plugins_dir = plugins_dir
|
||||||
self.static_mounts = static_mounts or []
|
self.static_mounts = static_mounts or []
|
||||||
self.limits = dict(DEFAULT_LIMITS, **(limits or {}))
|
self.config = dict(DEFAULT_CONFIG, **(config or {}))
|
||||||
self.max_returned_rows = self.limits["max_returned_rows"]
|
self.max_returned_rows = self.config["max_returned_rows"]
|
||||||
self.sql_time_limit_ms = self.limits["sql_time_limit_ms"]
|
self.sql_time_limit_ms = self.config["sql_time_limit_ms"]
|
||||||
|
self.page_size = self.config["default_page_size"]
|
||||||
# Execute plugins in constructor, to ensure they are available
|
# Execute plugins in constructor, to ensure they are available
|
||||||
# when the rest of `datasette inspect` executes
|
# when the rest of `datasette inspect` executes
|
||||||
if self.plugins_dir:
|
if self.plugins_dir:
|
||||||
|
|
@ -443,8 +463,8 @@ class Datasette:
|
||||||
"/-/plugins<as_json:(\.json)?$>",
|
"/-/plugins<as_json:(\.json)?$>",
|
||||||
)
|
)
|
||||||
app.add_route(
|
app.add_route(
|
||||||
JsonDataView.as_view(self, "limits.json", lambda: self.limits),
|
JsonDataView.as_view(self, "config.json", lambda: self.config),
|
||||||
"/-/limits<as_json:(\.json)?$>",
|
"/-/config<as_json:(\.json)?$>",
|
||||||
)
|
)
|
||||||
app.add_route(
|
app.add_route(
|
||||||
DatabaseView.as_view(self), "/<db_name:[^/\.]+?><as_json:(\.jsono?)?$>"
|
DatabaseView.as_view(self), "/<db_name:[^/\.]+?><as_json:(\.jsono?)?$>"
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,12 @@
|
||||||
import click
|
import click
|
||||||
|
from click import formatting
|
||||||
from click_default_group import DefaultGroup
|
from click_default_group import DefaultGroup
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from subprocess import call, check_output
|
from subprocess import call, check_output
|
||||||
import sys
|
import sys
|
||||||
from .app import Datasette, DEFAULT_LIMITS
|
from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS
|
||||||
from .utils import temporary_docker_directory, temporary_heroku_directory
|
from .utils import temporary_docker_directory, temporary_heroku_directory
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -24,8 +25,8 @@ class StaticMount(click.ParamType):
|
||||||
return path, dirpath
|
return path, dirpath
|
||||||
|
|
||||||
|
|
||||||
class Limit(click.ParamType):
|
class Config(click.ParamType):
|
||||||
name = "limit"
|
name = "config"
|
||||||
|
|
||||||
def convert(self, value, param, ctx):
|
def convert(self, value, param, ctx):
|
||||||
ok = True
|
ok = True
|
||||||
|
|
@ -39,7 +40,7 @@ class Limit(click.ParamType):
|
||||||
'"{}" should be of format name:integer'.format(value),
|
'"{}" should be of format name:integer'.format(value),
|
||||||
param, ctx
|
param, ctx
|
||||||
)
|
)
|
||||||
if name not in DEFAULT_LIMITS:
|
if name not in DEFAULT_CONFIG:
|
||||||
self.fail("{} is not a valid limit".format(name), param, ctx)
|
self.fail("{} is not a valid limit".format(name), param, ctx)
|
||||||
return name, int(intvalue)
|
return name, int(intvalue)
|
||||||
|
|
||||||
|
|
@ -384,7 +385,6 @@ def package(
|
||||||
@click.option(
|
@click.option(
|
||||||
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
|
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
|
||||||
)
|
)
|
||||||
@click.option("--page_size", default=100, help="Page size - default is 100")
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"sqlite_extensions",
|
"sqlite_extensions",
|
||||||
"--load-extension",
|
"--load-extension",
|
||||||
|
|
@ -419,11 +419,16 @@ def package(
|
||||||
multiple=True,
|
multiple=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--limit",
|
"--config",
|
||||||
type=Limit(),
|
type=Config(),
|
||||||
help="Set a limit using limitname:integer datasette.readthedocs.io/en/latest/limits.html",
|
help="Set config option using configname:value datasette.readthedocs.io/en/latest/config.html",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
)
|
)
|
||||||
|
@click.option(
|
||||||
|
"--help-config",
|
||||||
|
is_flag=True,
|
||||||
|
help="Show available config options",
|
||||||
|
)
|
||||||
def serve(
|
def serve(
|
||||||
files,
|
files,
|
||||||
host,
|
host,
|
||||||
|
|
@ -431,16 +436,27 @@ def serve(
|
||||||
debug,
|
debug,
|
||||||
reload,
|
reload,
|
||||||
cors,
|
cors,
|
||||||
page_size,
|
|
||||||
sqlite_extensions,
|
sqlite_extensions,
|
||||||
inspect_file,
|
inspect_file,
|
||||||
metadata,
|
metadata,
|
||||||
template_dir,
|
template_dir,
|
||||||
plugins_dir,
|
plugins_dir,
|
||||||
static,
|
static,
|
||||||
limit,
|
config,
|
||||||
|
help_config,
|
||||||
):
|
):
|
||||||
"""Serve up specified SQLite database files with a web UI"""
|
"""Serve up specified SQLite database files with a web UI"""
|
||||||
|
if help_config:
|
||||||
|
formatter = formatting.HelpFormatter()
|
||||||
|
with formatter.section("Config options"):
|
||||||
|
formatter.write_dl([
|
||||||
|
(option.name, '{} (default={})'.format(
|
||||||
|
option.help, option.default
|
||||||
|
))
|
||||||
|
for option in CONFIG_OPTIONS
|
||||||
|
])
|
||||||
|
click.echo(formatter.getvalue())
|
||||||
|
sys.exit(0)
|
||||||
if reload:
|
if reload:
|
||||||
import hupper
|
import hupper
|
||||||
|
|
||||||
|
|
@ -461,14 +477,13 @@ def serve(
|
||||||
files,
|
files,
|
||||||
cache_headers=not debug and not reload,
|
cache_headers=not debug and not reload,
|
||||||
cors=cors,
|
cors=cors,
|
||||||
page_size=page_size,
|
|
||||||
inspect_data=inspect_data,
|
inspect_data=inspect_data,
|
||||||
metadata=metadata_data,
|
metadata=metadata_data,
|
||||||
sqlite_extensions=sqlite_extensions,
|
sqlite_extensions=sqlite_extensions,
|
||||||
template_dir=template_dir,
|
template_dir=template_dir,
|
||||||
plugins_dir=plugins_dir,
|
plugins_dir=plugins_dir,
|
||||||
static_mounts=static,
|
static_mounts=static,
|
||||||
limits=dict(limit),
|
config=dict(config),
|
||||||
)
|
)
|
||||||
# Force initial hashing/table counting
|
# Force initial hashing/table counting
|
||||||
ds.inspect()
|
ds.inspect()
|
||||||
|
|
|
||||||
|
|
@ -500,7 +500,7 @@ class TableView(RowTableShared):
|
||||||
return await self.custom_sql(request, name, hash, sql, editable=True)
|
return await self.custom_sql(request, name, hash, sql, editable=True)
|
||||||
|
|
||||||
extra_args = {}
|
extra_args = {}
|
||||||
# Handle ?_page_size=500
|
# Handle ?_size=500
|
||||||
page_size = request.raw_args.get("_size")
|
page_size = request.raw_args.get("_size")
|
||||||
if page_size:
|
if page_size:
|
||||||
if page_size == "max":
|
if page_size == "max":
|
||||||
|
|
@ -539,7 +539,7 @@ class TableView(RowTableShared):
|
||||||
)
|
)
|
||||||
|
|
||||||
# facets support
|
# facets support
|
||||||
facet_size = self.ds.limits["default_facet_size"]
|
facet_size = self.ds.config["default_facet_size"]
|
||||||
metadata_facets = table_metadata.get("facets", [])
|
metadata_facets = table_metadata.get("facets", [])
|
||||||
facets = metadata_facets[:]
|
facets = metadata_facets[:]
|
||||||
try:
|
try:
|
||||||
|
|
@ -563,7 +563,7 @@ class TableView(RowTableShared):
|
||||||
facet_rows = await self.execute(
|
facet_rows = await self.execute(
|
||||||
name, facet_sql, params,
|
name, facet_sql, params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.limits["facet_time_limit_ms"],
|
custom_time_limit=self.ds.config["facet_time_limit_ms"],
|
||||||
)
|
)
|
||||||
facet_results_values = []
|
facet_results_values = []
|
||||||
facet_results[column] = {
|
facet_results[column] = {
|
||||||
|
|
@ -668,7 +668,7 @@ class TableView(RowTableShared):
|
||||||
distinct_values = await self.execute(
|
distinct_values = await self.execute(
|
||||||
name, suggested_facet_sql, from_sql_params,
|
name, suggested_facet_sql, from_sql_params,
|
||||||
truncate=False,
|
truncate=False,
|
||||||
custom_time_limit=self.ds.limits["facet_suggest_time_limit_ms"],
|
custom_time_limit=self.ds.config["facet_suggest_time_limit_ms"],
|
||||||
)
|
)
|
||||||
num_distinct_values = len(distinct_values)
|
num_distinct_values = len(distinct_values)
|
||||||
if (
|
if (
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,17 @@
|
||||||
Limits
|
Config
|
||||||
======
|
======
|
||||||
|
|
||||||
To prevent rogue, long-running queries from making a Datasette instance inaccessible to other users, Datasette imposes some limits on the SQL that you can execute.
|
Datasette provides a number of configuration options. These can be set using the ``--config name:value`` option to ``datasette serve``.
|
||||||
|
|
||||||
|
To prevent rogue, long-running queries from making a Datasette instance inaccessible to other users, Datasette imposes some limits on the SQL that you can execute. These are exposed as config options which you can over-ride.
|
||||||
|
|
||||||
|
default_page_size
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--config`` like so::
|
||||||
|
|
||||||
|
datasette mydatabase.db --config default_page_size:50
|
||||||
|
|
||||||
|
|
||||||
sql_time_limit_ms
|
sql_time_limit_ms
|
||||||
-----------------
|
-----------------
|
||||||
|
|
@ -10,9 +20,9 @@ By default, queries have a time limit of one second. If a query takes longer tha
|
||||||
|
|
||||||
If this time limit is too short for you, you can customize it using the ``sql_time_limit_ms`` limit - for example, to increase it to 3.5 seconds::
|
If this time limit is too short for you, you can customize it using the ``sql_time_limit_ms`` limit - for example, to increase it to 3.5 seconds::
|
||||||
|
|
||||||
datasette mydatabase.db --limit sql_time_limit_ms:3500
|
datasette mydatabase.db --config sql_time_limit_ms:3500
|
||||||
|
|
||||||
You can optionally set a lower time limit for an individual query using the ``_timelimit`` query string argument::
|
You can optionally set a lower time limit for an individual query using the ``?_timelimit=100`` query string argument::
|
||||||
|
|
||||||
/my-database/my-table?qSpecies=44&_timelimit=100
|
/my-database/my-table?qSpecies=44&_timelimit=100
|
||||||
|
|
||||||
|
|
@ -25,21 +35,21 @@ Datasette returns a maximum of 1,000 rows of data at a time. If you execute a qu
|
||||||
|
|
||||||
You can increase or decrease this limit like so::
|
You can increase or decrease this limit like so::
|
||||||
|
|
||||||
datasette mydatabase.db --limit max_returned_rows:2000
|
datasette mydatabase.db --config max_returned_rows:2000
|
||||||
|
|
||||||
default_facet_size
|
default_facet_size
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
The default number of unique rows returned by :ref:`facets` is 30. You can customize it like this::
|
The default number of unique rows returned by :ref:`facets` is 30. You can customize it like this::
|
||||||
|
|
||||||
datasette mydatabase.db --limit default_facet_size:50
|
datasette mydatabase.db --config default_facet_size:50
|
||||||
|
|
||||||
facet_time_limit_ms
|
facet_time_limit_ms
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
This is the time limit Datasette allows for calculating a facet, which defaults to 200ms::
|
This is the time limit Datasette allows for calculating a facet, which defaults to 200ms::
|
||||||
|
|
||||||
datasette mydatabase.db --limit facet_time_limit_ms:1000
|
datasette mydatabase.db --config facet_time_limit_ms:1000
|
||||||
|
|
||||||
facet_suggest_time_limit_ms
|
facet_suggest_time_limit_ms
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
@ -48,4 +58,4 @@ When Datasette calculates suggested facets it needs to run a SQL query for every
|
||||||
|
|
||||||
You can increase this time limit like so::
|
You can increase this time limit like so::
|
||||||
|
|
||||||
datasette mydatabase.db --limit facet_suggest_time_limit_ms:500
|
datasette mydatabase.db --config facet_suggest_time_limit_ms:500
|
||||||
|
|
@ -100,7 +100,6 @@ datasette serve options
|
||||||
useful for development
|
useful for development
|
||||||
--cors Enable CORS by serving Access-Control-Allow-
|
--cors Enable CORS by serving Access-Control-Allow-
|
||||||
Origin: *
|
Origin: *
|
||||||
--page_size INTEGER Page size - default is 100
|
|
||||||
--load-extension PATH Path to a SQLite extension to load
|
--load-extension PATH Path to a SQLite extension to load
|
||||||
--inspect-file TEXT Path to JSON file created using "datasette
|
--inspect-file TEXT Path to JSON file created using "datasette
|
||||||
inspect"
|
inspect"
|
||||||
|
|
@ -110,6 +109,7 @@ datasette serve options
|
||||||
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
--plugins-dir DIRECTORY Path to directory containing custom plugins
|
||||||
--static STATIC MOUNT mountpoint:path-to-directory for serving static
|
--static STATIC MOUNT mountpoint:path-to-directory for serving static
|
||||||
files
|
files
|
||||||
--limit LIMIT Set a limit using limitname:integer
|
--config CONFIG Set config option using configname:value
|
||||||
datasette.readthedocs.io/en/latest/limits.html
|
datasette.readthedocs.io/en/latest/config.html
|
||||||
|
--help-config Show available config options
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ Contents
|
||||||
facets
|
facets
|
||||||
full_text_search
|
full_text_search
|
||||||
metadata
|
metadata
|
||||||
limits
|
config
|
||||||
custom_templates
|
custom_templates
|
||||||
plugins
|
plugins
|
||||||
changelog
|
changelog
|
||||||
|
|
|
||||||
|
|
@ -20,10 +20,10 @@ def app_client(sql_time_limit_ms=None, max_returned_rows=None):
|
||||||
open(os.path.join(plugins_dir, 'my_plugin.py'), 'w').write(PLUGIN)
|
open(os.path.join(plugins_dir, 'my_plugin.py'), 'w').write(PLUGIN)
|
||||||
ds = Datasette(
|
ds = Datasette(
|
||||||
[filepath],
|
[filepath],
|
||||||
page_size=50,
|
|
||||||
metadata=METADATA,
|
metadata=METADATA,
|
||||||
plugins_dir=plugins_dir,
|
plugins_dir=plugins_dir,
|
||||||
limits={
|
config={
|
||||||
|
'default_page_size': 50,
|
||||||
'max_returned_rows': max_returned_rows or 100,
|
'max_returned_rows': max_returned_rows or 100,
|
||||||
'sql_time_limit_ms': sql_time_limit_ms or 200,
|
'sql_time_limit_ms': sql_time_limit_ms or 200,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -897,12 +897,13 @@ def test_versions_json(app_client):
|
||||||
assert 'fts_versions' in response.json['sqlite']
|
assert 'fts_versions' in response.json['sqlite']
|
||||||
|
|
||||||
|
|
||||||
def test_limits_json(app_client):
|
def test_config_json(app_client):
|
||||||
response = app_client.get(
|
response = app_client.get(
|
||||||
"/-/limits.json",
|
"/-/config.json",
|
||||||
gather_request=False
|
gather_request=False
|
||||||
)
|
)
|
||||||
assert {
|
assert {
|
||||||
|
"default_page_size": 50,
|
||||||
"default_facet_size": 30,
|
"default_facet_size": 30,
|
||||||
"facet_suggest_time_limit_ms": 50,
|
"facet_suggest_time_limit_ms": 50,
|
||||||
"facet_time_limit_ms": 200,
|
"facet_time_limit_ms": 200,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue