mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Ran black against datasette/cli.py
https://pypi.org/project/black/ cli.py was getting a bit untidy due to all of the heavily annotated click function methods - used black to clean it up and make it easier to read.
This commit is contained in:
parent
f8f818711b
commit
2b344f6a34
1 changed files with 305 additions and 135 deletions
440
datasette/cli.py
440
datasette/cli.py
|
|
@ -6,24 +6,24 @@ import shutil
|
||||||
from subprocess import call, check_output
|
from subprocess import call, check_output
|
||||||
import sys
|
import sys
|
||||||
from .app import Datasette
|
from .app import Datasette
|
||||||
from .utils import (
|
from .utils import temporary_docker_directory, temporary_heroku_directory
|
||||||
temporary_docker_directory, temporary_heroku_directory
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StaticMount(click.ParamType):
|
class StaticMount(click.ParamType):
|
||||||
name = 'static mount'
|
name = "static mount"
|
||||||
|
|
||||||
def convert(self, value, param, ctx):
|
def convert(self, value, param, ctx):
|
||||||
if ':' not in value:
|
if ":" not in value:
|
||||||
self.fail('"%s" should be of format mountpoint:directory' % value, param, ctx)
|
self.fail(
|
||||||
path, dirpath = value.split(':')
|
'"%s" should be of format mountpoint:directory' % value, param, ctx
|
||||||
|
)
|
||||||
|
path, dirpath = value.split(":")
|
||||||
if not os.path.exists(dirpath) or not os.path.isdir(dirpath):
|
if not os.path.exists(dirpath) or not os.path.isdir(dirpath):
|
||||||
self.fail('%s is not a valid directory path' % value, param, ctx)
|
self.fail("%s is not a valid directory path" % value, param, ctx)
|
||||||
return path, dirpath
|
return path, dirpath
|
||||||
|
|
||||||
|
|
||||||
@click.group(cls=DefaultGroup, default='serve', default_if_no_args=True)
|
@click.group(cls=DefaultGroup, default="serve", default_if_no_args=True)
|
||||||
@click.version_option()
|
@click.version_option()
|
||||||
def cli():
|
def cli():
|
||||||
"""
|
"""
|
||||||
|
|
@ -32,40 +32,73 @@ def cli():
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||||
@click.option('--inspect-file', default='inspect-data.json')
|
@click.option("--inspect-file", default="inspect-data.json")
|
||||||
@click.option(
|
@click.option(
|
||||||
'sqlite_extensions', '--load-extension', envvar='SQLITE_EXTENSIONS', multiple=True,
|
"sqlite_extensions",
|
||||||
type=click.Path(exists=True, resolve_path=True), help='Path to a SQLite extension to load'
|
"--load-extension",
|
||||||
|
envvar="SQLITE_EXTENSIONS",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(exists=True, resolve_path=True),
|
||||||
|
help="Path to a SQLite extension to load",
|
||||||
)
|
)
|
||||||
def inspect(files, inspect_file, sqlite_extensions):
|
def inspect(files, inspect_file, sqlite_extensions):
|
||||||
app = Datasette(files, sqlite_extensions=sqlite_extensions)
|
app = Datasette(files, sqlite_extensions=sqlite_extensions)
|
||||||
open(inspect_file, 'w').write(json.dumps(app.inspect(), indent=2))
|
open(inspect_file, "w").write(json.dumps(app.inspect(), indent=2))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument('publisher', type=click.Choice(['now', 'heroku']))
|
@click.argument("publisher", type=click.Choice(["now", "heroku"]))
|
||||||
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||||
@click.option(
|
@click.option(
|
||||||
'-n', '--name', default='datasette',
|
"-n",
|
||||||
help='Application name to use when deploying to Now (ignored for Heroku)'
|
"--name",
|
||||||
|
default="datasette",
|
||||||
|
help="Application name to use when deploying to Now (ignored for Heroku)",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
'-m', '--metadata', type=click.File(mode='r'),
|
"-m",
|
||||||
help='Path to JSON file containing metadata to publish'
|
"--metadata",
|
||||||
|
type=click.File(mode="r"),
|
||||||
|
help="Path to JSON file containing metadata to publish",
|
||||||
)
|
)
|
||||||
@click.option('--extra-options', help='Extra options to pass to datasette serve')
|
@click.option("--extra-options", help="Extra options to pass to datasette serve")
|
||||||
@click.option('--force', is_flag=True, help='Pass --force option to now')
|
@click.option("--force", is_flag=True, help="Pass --force option to now")
|
||||||
@click.option('--branch', help='Install datasette from a GitHub branch e.g. master')
|
@click.option("--branch", help="Install datasette from a GitHub branch e.g. master")
|
||||||
@click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates')
|
@click.option(
|
||||||
@click.option('--plugins-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom plugins')
|
"--template-dir",
|
||||||
@click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True)
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
@click.option('--title', help='Title for metadata')
|
help="Path to directory containing custom templates",
|
||||||
@click.option('--license', help='License label for metadata')
|
)
|
||||||
@click.option('--license_url', help='License URL for metadata')
|
@click.option(
|
||||||
@click.option('--source', help='Source label for metadata')
|
"--plugins-dir",
|
||||||
@click.option('--source_url', help='Source URL for metadata')
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
def publish(publisher, files, name, metadata, extra_options, force, branch, template_dir, plugins_dir, static, **extra_metadata):
|
help="Path to directory containing custom plugins",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--static",
|
||||||
|
type=StaticMount(),
|
||||||
|
help="mountpoint:path-to-directory for serving static files",
|
||||||
|
multiple=True,
|
||||||
|
)
|
||||||
|
@click.option("--title", help="Title for metadata")
|
||||||
|
@click.option("--license", help="License label for metadata")
|
||||||
|
@click.option("--license_url", help="License URL for metadata")
|
||||||
|
@click.option("--source", help="Source label for metadata")
|
||||||
|
@click.option("--source_url", help="Source URL for metadata")
|
||||||
|
def publish(
|
||||||
|
publisher,
|
||||||
|
files,
|
||||||
|
name,
|
||||||
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
force,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
**extra_metadata
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Publish specified SQLite database files to the internet along with a datasette API.
|
Publish specified SQLite database files to the internet along with a datasette API.
|
||||||
|
|
||||||
|
|
@ -75,67 +108,105 @@ def publish(publisher, files, name, metadata, extra_options, force, branch, temp
|
||||||
|
|
||||||
Example usage: datasette publish now my-database.db
|
Example usage: datasette publish now my-database.db
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _fail_if_publish_binary_not_installed(binary, publish_target, install_link):
|
def _fail_if_publish_binary_not_installed(binary, publish_target, install_link):
|
||||||
"""Exit (with error message) if ``binary` isn't installed"""
|
"""Exit (with error message) if ``binary` isn't installed"""
|
||||||
if not shutil.which(binary):
|
if not shutil.which(binary):
|
||||||
click.secho(
|
click.secho(
|
||||||
"Publishing to {publish_target} requires {binary} to be installed and configured".format(
|
"Publishing to {publish_target} requires {binary} to be installed and configured".format(
|
||||||
publish_target=publish_target,
|
publish_target=publish_target, binary=binary
|
||||||
binary=binary,
|
|
||||||
),
|
),
|
||||||
bg='red',
|
bg="red",
|
||||||
fg='white',
|
fg="white",
|
||||||
bold=True,
|
bold=True,
|
||||||
err=True
|
err=True,
|
||||||
|
)
|
||||||
|
click.echo(
|
||||||
|
"Follow the instructions at {install_link}".format(
|
||||||
|
install_link=install_link
|
||||||
|
),
|
||||||
|
err=True,
|
||||||
)
|
)
|
||||||
click.echo("Follow the instructions at {install_link}".format(
|
|
||||||
install_link=install_link,
|
|
||||||
), err=True)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if publisher == 'now':
|
if publisher == "now":
|
||||||
_fail_if_publish_binary_not_installed('now', 'Zeit Now', 'https://zeit.co/now')
|
_fail_if_publish_binary_not_installed("now", "Zeit Now", "https://zeit.co/now")
|
||||||
with temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata):
|
with temporary_docker_directory(
|
||||||
|
files,
|
||||||
|
name,
|
||||||
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
extra_metadata,
|
||||||
|
):
|
||||||
if force:
|
if force:
|
||||||
call(['now', '--force'])
|
call(["now", "--force"])
|
||||||
else:
|
else:
|
||||||
call('now')
|
call("now")
|
||||||
|
|
||||||
elif publisher == 'heroku':
|
elif publisher == "heroku":
|
||||||
_fail_if_publish_binary_not_installed('heroku', 'Heroku', 'https://cli.heroku.com')
|
_fail_if_publish_binary_not_installed(
|
||||||
|
"heroku", "Heroku", "https://cli.heroku.com"
|
||||||
|
)
|
||||||
|
|
||||||
# Check for heroku-builds plugin
|
# Check for heroku-builds plugin
|
||||||
plugins = [line.split()[0] for line in check_output(['heroku', 'plugins']).splitlines()]
|
plugins = [
|
||||||
if b'heroku-builds' not in plugins:
|
line.split()[0] for line in check_output(["heroku", "plugins"]).splitlines()
|
||||||
click.echo('Publishing to Heroku requires the heroku-builds plugin to be installed.')
|
]
|
||||||
click.confirm('Install it? (this will run `heroku plugins:install heroku-builds`)', abort=True)
|
if b"heroku-builds" not in plugins:
|
||||||
|
click.echo(
|
||||||
|
"Publishing to Heroku requires the heroku-builds plugin to be installed."
|
||||||
|
)
|
||||||
|
click.confirm(
|
||||||
|
"Install it? (this will run `heroku plugins:install heroku-builds`)",
|
||||||
|
abort=True,
|
||||||
|
)
|
||||||
call(["heroku", "plugins:install", "heroku-builds"])
|
call(["heroku", "plugins:install", "heroku-builds"])
|
||||||
|
|
||||||
with temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata):
|
with temporary_heroku_directory(
|
||||||
create_output = check_output(
|
files,
|
||||||
['heroku', 'apps:create', '--json']
|
name,
|
||||||
).decode('utf8')
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
extra_metadata,
|
||||||
|
):
|
||||||
|
create_output = check_output(["heroku", "apps:create", "--json"]).decode(
|
||||||
|
"utf8"
|
||||||
|
)
|
||||||
app_name = json.loads(create_output)["name"]
|
app_name = json.loads(create_output)["name"]
|
||||||
call(["heroku", "builds:create", "-a", app_name])
|
call(["heroku", "builds:create", "-a", app_name])
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument('files', type=click.Path(exists=True), nargs=-1, required=True)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1, required=True)
|
||||||
@click.option(
|
@click.option(
|
||||||
'-m', '--metadata', default='metadata.json',
|
"-m",
|
||||||
help='Name of metadata file to generate'
|
"--metadata",
|
||||||
|
default="metadata.json",
|
||||||
|
help="Name of metadata file to generate",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
'sqlite_extensions', '--load-extension', envvar='SQLITE_EXTENSIONS', multiple=True,
|
"sqlite_extensions",
|
||||||
type=click.Path(exists=True, resolve_path=True), help='Path to a SQLite extension to load'
|
"--load-extension",
|
||||||
|
envvar="SQLITE_EXTENSIONS",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(exists=True, resolve_path=True),
|
||||||
|
help="Path to a SQLite extension to load",
|
||||||
)
|
)
|
||||||
def skeleton(files, metadata, sqlite_extensions):
|
def skeleton(files, metadata, sqlite_extensions):
|
||||||
"Generate a skeleton metadata.json file for specified SQLite databases"
|
"Generate a skeleton metadata.json file for specified SQLite databases"
|
||||||
if os.path.exists(metadata):
|
if os.path.exists(metadata):
|
||||||
click.secho(
|
click.secho(
|
||||||
'File {} already exists, will not over-write'.format(metadata),
|
"File {} already exists, will not over-write".format(metadata),
|
||||||
bg='red',
|
bg="red",
|
||||||
fg='white',
|
fg="white",
|
||||||
bold=True,
|
bold=True,
|
||||||
err=True,
|
err=True,
|
||||||
)
|
)
|
||||||
|
|
@ -144,104 +215,203 @@ def skeleton(files, metadata, sqlite_extensions):
|
||||||
databases = {}
|
databases = {}
|
||||||
for database_name, info in app.inspect().items():
|
for database_name, info in app.inspect().items():
|
||||||
databases[database_name] = {
|
databases[database_name] = {
|
||||||
'title': None,
|
"title": None,
|
||||||
'description': None,
|
"description": None,
|
||||||
'description_html': None,
|
"description_html": None,
|
||||||
'license': None,
|
"license": None,
|
||||||
'license_url': None,
|
"license_url": None,
|
||||||
'source': None,
|
"source": None,
|
||||||
'source_url': None,
|
"source_url": None,
|
||||||
'queries': {},
|
"queries": {},
|
||||||
'tables': {
|
"tables": {
|
||||||
table_name: {
|
table_name: {
|
||||||
'title': None,
|
"title": None,
|
||||||
'description': None,
|
"description": None,
|
||||||
'description_html': None,
|
"description_html": None,
|
||||||
'license': None,
|
"license": None,
|
||||||
'license_url': None,
|
"license_url": None,
|
||||||
'source': None,
|
"source": None,
|
||||||
'source_url': None,
|
"source_url": None,
|
||||||
'units': {}
|
"units": {},
|
||||||
} for table_name in (info.get('tables') or {})
|
}
|
||||||
}
|
for table_name in (info.get("tables") or {})
|
||||||
|
},
|
||||||
}
|
}
|
||||||
open(metadata, 'w').write(json.dumps({
|
open(metadata, "w").write(
|
||||||
'title': None,
|
json.dumps(
|
||||||
'description': None,
|
{
|
||||||
'description_html': None,
|
"title": None,
|
||||||
'license': None,
|
"description": None,
|
||||||
'license_url': None,
|
"description_html": None,
|
||||||
'source': None,
|
"license": None,
|
||||||
'source_url': None,
|
"license_url": None,
|
||||||
'databases': databases
|
"source": None,
|
||||||
}, indent=4))
|
"source_url": None,
|
||||||
click.echo('Wrote skeleton to {}'.format(metadata))
|
"databases": databases,
|
||||||
|
},
|
||||||
|
indent=4,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
click.echo("Wrote skeleton to {}".format(metadata))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument('files', type=click.Path(exists=True), nargs=-1, required=True)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1, required=True)
|
||||||
@click.option(
|
@click.option(
|
||||||
'-t', '--tag',
|
"-t",
|
||||||
help='Name for the resulting Docker container, can optionally use name:tag format'
|
"--tag",
|
||||||
|
help="Name for the resulting Docker container, can optionally use name:tag format",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
'-m', '--metadata', type=click.File(mode='r'),
|
"-m",
|
||||||
help='Path to JSON file containing metadata to publish'
|
"--metadata",
|
||||||
|
type=click.File(mode="r"),
|
||||||
|
help="Path to JSON file containing metadata to publish",
|
||||||
)
|
)
|
||||||
@click.option('--extra-options', help='Extra options to pass to datasette serve')
|
@click.option("--extra-options", help="Extra options to pass to datasette serve")
|
||||||
@click.option('--branch', help='Install datasette from a GitHub branch e.g. master')
|
@click.option("--branch", help="Install datasette from a GitHub branch e.g. master")
|
||||||
@click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates')
|
@click.option(
|
||||||
@click.option('--plugins-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom plugins')
|
"--template-dir",
|
||||||
@click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True)
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
@click.option('--title', help='Title for metadata')
|
help="Path to directory containing custom templates",
|
||||||
@click.option('--license', help='License label for metadata')
|
)
|
||||||
@click.option('--license_url', help='License URL for metadata')
|
@click.option(
|
||||||
@click.option('--source', help='Source label for metadata')
|
"--plugins-dir",
|
||||||
@click.option('--source_url', help='Source URL for metadata')
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
def package(files, tag, metadata, extra_options, branch, template_dir, plugins_dir, static, **extra_metadata):
|
help="Path to directory containing custom plugins",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--static",
|
||||||
|
type=StaticMount(),
|
||||||
|
help="mountpoint:path-to-directory for serving static files",
|
||||||
|
multiple=True,
|
||||||
|
)
|
||||||
|
@click.option("--title", help="Title for metadata")
|
||||||
|
@click.option("--license", help="License label for metadata")
|
||||||
|
@click.option("--license_url", help="License URL for metadata")
|
||||||
|
@click.option("--source", help="Source label for metadata")
|
||||||
|
@click.option("--source_url", help="Source URL for metadata")
|
||||||
|
def package(
|
||||||
|
files,
|
||||||
|
tag,
|
||||||
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
**extra_metadata
|
||||||
|
):
|
||||||
"Package specified SQLite files into a new datasette Docker container"
|
"Package specified SQLite files into a new datasette Docker container"
|
||||||
if not shutil.which('docker'):
|
if not shutil.which("docker"):
|
||||||
click.secho(
|
click.secho(
|
||||||
' The package command requires "docker" to be installed and configured ',
|
' The package command requires "docker" to be installed and configured ',
|
||||||
bg='red',
|
bg="red",
|
||||||
fg='white',
|
fg="white",
|
||||||
bold=True,
|
bold=True,
|
||||||
err=True,
|
err=True,
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
with temporary_docker_directory(files, 'datasette', metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata):
|
with temporary_docker_directory(
|
||||||
args = ['docker', 'build']
|
files,
|
||||||
|
"datasette",
|
||||||
|
metadata,
|
||||||
|
extra_options,
|
||||||
|
branch,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
extra_metadata,
|
||||||
|
):
|
||||||
|
args = ["docker", "build"]
|
||||||
if tag:
|
if tag:
|
||||||
args.append('-t')
|
args.append("-t")
|
||||||
args.append(tag)
|
args.append(tag)
|
||||||
args.append('.')
|
args.append(".")
|
||||||
call(args)
|
call(args)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
||||||
@click.option('-h', '--host', default='127.0.0.1', help='host for server, defaults to 127.0.0.1')
|
|
||||||
@click.option('-p', '--port', default=8001, help='port for server, defaults to 8001')
|
|
||||||
@click.option('--debug', is_flag=True, help='Enable debug mode - useful for development')
|
|
||||||
@click.option('--reload', is_flag=True, help='Automatically reload if code change detected - useful for development')
|
|
||||||
@click.option('--cors', is_flag=True, help='Enable CORS by serving Access-Control-Allow-Origin: *')
|
|
||||||
@click.option('--page_size', default=100, help='Page size - default is 100')
|
|
||||||
@click.option('--max_returned_rows', default=1000, help='Max allowed rows to return at once - default is 1000. Set to 0 to disable check entirely.')
|
|
||||||
@click.option('--sql_time_limit_ms', default=1000, help='Max time allowed for SQL queries in ms')
|
|
||||||
@click.option(
|
@click.option(
|
||||||
'sqlite_extensions', '--load-extension', envvar='SQLITE_EXTENSIONS', multiple=True,
|
"-h", "--host", default="127.0.0.1", help="host for server, defaults to 127.0.0.1"
|
||||||
type=click.Path(exists=True, resolve_path=True), help='Path to a SQLite extension to load'
|
|
||||||
)
|
)
|
||||||
@click.option('--inspect-file', help='Path to JSON file created using "datasette inspect"')
|
@click.option("-p", "--port", default=8001, help="port for server, defaults to 8001")
|
||||||
@click.option('-m', '--metadata', type=click.File(mode='r'), help='Path to JSON file containing license/source metadata')
|
@click.option(
|
||||||
@click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates')
|
"--debug", is_flag=True, help="Enable debug mode - useful for development"
|
||||||
@click.option('--plugins-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom plugins')
|
)
|
||||||
@click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True)
|
@click.option(
|
||||||
def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, sql_time_limit_ms, sqlite_extensions, inspect_file, metadata, template_dir, plugins_dir, static):
|
"--reload",
|
||||||
|
is_flag=True,
|
||||||
|
help="Automatically reload if code change detected - useful for development",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
|
||||||
|
)
|
||||||
|
@click.option("--page_size", default=100, help="Page size - default is 100")
|
||||||
|
@click.option(
|
||||||
|
"--max_returned_rows",
|
||||||
|
default=1000,
|
||||||
|
help="Max allowed rows to return at once - default is 1000. Set to 0 to disable check entirely.",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--sql_time_limit_ms", default=1000, help="Max time allowed for SQL queries in ms"
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"sqlite_extensions",
|
||||||
|
"--load-extension",
|
||||||
|
envvar="SQLITE_EXTENSIONS",
|
||||||
|
multiple=True,
|
||||||
|
type=click.Path(exists=True, resolve_path=True),
|
||||||
|
help="Path to a SQLite extension to load",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--inspect-file", help='Path to JSON file created using "datasette inspect"'
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-m",
|
||||||
|
"--metadata",
|
||||||
|
type=click.File(mode="r"),
|
||||||
|
help="Path to JSON file containing license/source metadata",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--template-dir",
|
||||||
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
|
help="Path to directory containing custom templates",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--plugins-dir",
|
||||||
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||||
|
help="Path to directory containing custom plugins",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--static",
|
||||||
|
type=StaticMount(),
|
||||||
|
help="mountpoint:path-to-directory for serving static files",
|
||||||
|
multiple=True,
|
||||||
|
)
|
||||||
|
def serve(
|
||||||
|
files,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
debug,
|
||||||
|
reload,
|
||||||
|
cors,
|
||||||
|
page_size,
|
||||||
|
max_returned_rows,
|
||||||
|
sql_time_limit_ms,
|
||||||
|
sqlite_extensions,
|
||||||
|
inspect_file,
|
||||||
|
metadata,
|
||||||
|
template_dir,
|
||||||
|
plugins_dir,
|
||||||
|
static,
|
||||||
|
):
|
||||||
"""Serve up specified SQLite database files with a web UI"""
|
"""Serve up specified SQLite database files with a web UI"""
|
||||||
if reload:
|
if reload:
|
||||||
import hupper
|
import hupper
|
||||||
reloader = hupper.start_reloader('datasette.cli.serve')
|
|
||||||
|
reloader = hupper.start_reloader("datasette.cli.serve")
|
||||||
if metadata:
|
if metadata:
|
||||||
reloader.watch_files([metadata.name])
|
reloader.watch_files([metadata.name])
|
||||||
|
|
||||||
|
|
@ -253,7 +423,7 @@ def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows,
|
||||||
if metadata:
|
if metadata:
|
||||||
metadata_data = json.loads(metadata.read())
|
metadata_data = json.loads(metadata.read())
|
||||||
|
|
||||||
click.echo('Serve! files={} on port {}'.format(files, port))
|
click.echo("Serve! files={} on port {}".format(files, port))
|
||||||
ds = Datasette(
|
ds = Datasette(
|
||||||
files,
|
files,
|
||||||
cache_headers=not debug and not reload,
|
cache_headers=not debug and not reload,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue