diff --git a/datasette/app.py b/datasette/app.py index c20b8275..3adb8925 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -31,6 +31,7 @@ from .utils import ( get_all_foreign_keys, is_url, InvalidSql, + module_from_path, path_from_row_pks, path_with_added_args, path_with_ext, @@ -1032,7 +1033,7 @@ class Datasette: self, files, num_threads=3, cache_headers=True, page_size=100, max_returned_rows=1000, sql_time_limit_ms=1000, cors=False, inspect_data=None, metadata=None, sqlite_extensions=None, - template_dir=None, static_mounts=None): + template_dir=None, plugins_dir=None, static_mounts=None): self.files = files self.num_threads = num_threads self.executor = futures.ThreadPoolExecutor( @@ -1048,7 +1049,20 @@ class Datasette: self.sqlite_functions = [] self.sqlite_extensions = sqlite_extensions or [] self.template_dir = template_dir + self.plugins_dir = plugins_dir self.static_mounts = static_mounts or [] + # Execute plugins in constructor, to ensure they are available + # when the rest of `datasette inspect` executes + if self.plugins_dir: + for filename in os.listdir(self.plugins_dir): + filepath = os.path.join(self.plugins_dir, filename) + with open(filepath) as f: + mod = module_from_path(filepath, name=filename) + try: + pm.register(mod) + except ValueError: + # Plugin already registered + pass def app_css_hash(self): if not hasattr(self, '_app_css_hash'): diff --git a/datasette/cli.py b/datasette/cli.py index 0fde53cf..8880c079 100644 --- a/datasette/cli.py +++ b/datasette/cli.py @@ -58,13 +58,14 @@ def inspect(files, inspect_file, sqlite_extensions): @click.option('--force', is_flag=True, help='Pass --force option to now') @click.option('--branch', help='Install datasette from a GitHub branch e.g. master') @click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates') +@click.option('--plugins-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom plugins') @click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True) @click.option('--title', help='Title for metadata') @click.option('--license', help='License label for metadata') @click.option('--license_url', help='License URL for metadata') @click.option('--source', help='Source label for metadata') @click.option('--source_url', help='Source URL for metadata') -def publish(publisher, files, name, metadata, extra_options, force, branch, template_dir, static, **extra_metadata): +def publish(publisher, files, name, metadata, extra_options, force, branch, template_dir, plugins_dir, static, **extra_metadata): """ Publish specified SQLite database files to the internet along with a datasette API. @@ -94,7 +95,7 @@ def publish(publisher, files, name, metadata, extra_options, force, branch, temp if publisher == 'now': _fail_if_publish_binary_not_installed('now', 'Zeit Now', 'https://zeit.co/now') - with temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata): + with temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata): if force: call(['now', '--force']) else: @@ -110,7 +111,7 @@ def publish(publisher, files, name, metadata, extra_options, force, branch, temp click.confirm('Install it? (this will run `heroku plugins:install heroku-builds`)', abort=True) call(["heroku", "plugins:install", "heroku-builds"]) - with temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata): + with temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata): create_output = check_output( ['heroku', 'apps:create', '--json'] ).decode('utf8') @@ -190,13 +191,14 @@ def skeleton(files, metadata, sqlite_extensions): @click.option('--extra-options', help='Extra options to pass to datasette serve') @click.option('--branch', help='Install datasette from a GitHub branch e.g. master') @click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates') +@click.option('--plugins-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom plugins') @click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True) @click.option('--title', help='Title for metadata') @click.option('--license', help='License label for metadata') @click.option('--license_url', help='License URL for metadata') @click.option('--source', help='Source label for metadata') @click.option('--source_url', help='Source URL for metadata') -def package(files, tag, metadata, extra_options, branch, template_dir, static, **extra_metadata): +def package(files, tag, metadata, extra_options, branch, template_dir, plugins_dir, static, **extra_metadata): "Package specified SQLite files into a new datasette Docker container" if not shutil.which('docker'): click.secho( @@ -207,7 +209,7 @@ def package(files, tag, metadata, extra_options, branch, template_dir, static, * err=True, ) sys.exit(1) - with temporary_docker_directory(files, 'datasette', metadata, extra_options, branch, template_dir, static, extra_metadata): + with temporary_docker_directory(files, 'datasette', metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata): args = ['docker', 'build'] if tag: args.append('-t') @@ -233,8 +235,9 @@ def package(files, tag, metadata, extra_options, branch, template_dir, static, * @click.option('--inspect-file', help='Path to JSON file created using "datasette inspect"') @click.option('-m', '--metadata', type=click.File(mode='r'), help='Path to JSON file containing license/source metadata') @click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates') +@click.option('--plugins-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom plugins') @click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True) -def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, sql_time_limit_ms, sqlite_extensions, inspect_file, metadata, template_dir, static): +def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, sql_time_limit_ms, sqlite_extensions, inspect_file, metadata, template_dir, plugins_dir, static): """Serve up specified SQLite database files with a web UI""" if reload: import hupper @@ -262,6 +265,7 @@ def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, metadata=metadata_data, sqlite_extensions=sqlite_extensions, template_dir=template_dir, + plugins_dir=plugins_dir, static_mounts=static, ) # Force initial hashing/table counting diff --git a/datasette/utils.py b/datasette/utils.py index b5020be2..1f296a0b 100644 --- a/datasette/utils.py +++ b/datasette/utils.py @@ -1,6 +1,7 @@ from contextlib import contextmanager import base64 import hashlib +import imp import json import os import re @@ -182,7 +183,7 @@ def escape_sqlite(s): return '[{}]'.format(s) -def make_dockerfile(files, metadata_file, extra_options, branch, template_dir, static): +def make_dockerfile(files, metadata_file, extra_options, branch, template_dir, plugins_dir, static): cmd = ['"datasette"', '"serve"', '"--host"', '"0.0.0.0"'] cmd.append('"' + '", "'.join(files) + '"') cmd.extend(['"--cors"', '"--port"', '"8001"', '"--inspect-file"', '"inspect-data.json"']) @@ -190,6 +191,8 @@ def make_dockerfile(files, metadata_file, extra_options, branch, template_dir, s cmd.extend(['"--metadata"', '"{}"'.format(metadata_file)]) if template_dir: cmd.extend(['"--template-dir"', '"templates/"']) + if plugins_dir: + cmd.extend(['"--plugins-dir"', '"plugins/"']) if static: for mount_point, _ in static: cmd.extend(['"--static"', '"{}:{}"'.format(mount_point, mount_point)]) @@ -216,7 +219,7 @@ CMD [{cmd}]'''.format( @contextmanager -def temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata=None): +def temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata=None): extra_metadata = extra_metadata or {} tmp = tempfile.TemporaryDirectory() # We create a datasette folder in there to get a nicer now deploy name @@ -242,6 +245,7 @@ def temporary_docker_directory(files, name, metadata, extra_options, branch, tem extra_options, branch, template_dir, + plugins_dir, static, ) os.chdir(datasette_dir) @@ -255,6 +259,11 @@ def temporary_docker_directory(files, name, metadata, extra_options, branch, tem os.path.join(saved_cwd, template_dir), os.path.join(datasette_dir, 'templates') ) + if plugins_dir: + link_or_copy_directory( + os.path.join(saved_cwd, plugins_dir), + os.path.join(datasette_dir, 'plugins') + ) for mount_point, path in static: link_or_copy_directory( os.path.join(saved_cwd, path), @@ -267,7 +276,7 @@ def temporary_docker_directory(files, name, metadata, extra_options, branch, tem @contextmanager -def temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata=None): +def temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, plugins_dir, static, extra_metadata=None): # FIXME: lots of duplicated code from above extra_metadata = extra_metadata or {} @@ -314,6 +323,13 @@ def temporary_heroku_directory(files, name, metadata, extra_options, branch, tem os.path.join(tmp.name, 'templates') ) extras.extend(['--template-dir', 'templates/']) + if plugins_dir: + link_or_copy_directory( + os.path.join(saved_cwd, plugins_dir), + os.path.join(tmp.name, 'plugins') + ) + extras.extend(['--plugins-dir', 'plugins/']) + if metadata: extras.extend(['--metadata', 'metadata.json']) for mount_point, path in static: @@ -625,3 +641,13 @@ def link_or_copy_directory(src, dst): shutil.copytree(src, dst, copy_function=os.link) except OSError: shutil.copytree(src, dst) + + +def module_from_path(path, name): + # Adapted from http://sayspy.blogspot.com/2011/07/how-to-import-module-from-just-file.html + mod = imp.new_module(name) + mod.__file__ = path + with open(path, 'r') as file: + code = compile(file.read(), path, 'exec', dont_inherit=True) + exec(code, mod.__dict__) + return mod diff --git a/tests/fixtures.py b/tests/fixtures.py index 9889f2b4..38df8b00 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -15,12 +15,16 @@ def app_client(): conn = sqlite3.connect(filepath) conn.executescript(TABLES) os.chdir(os.path.dirname(filepath)) + plugins_dir = os.path.join(tmpdir, 'plugins') + os.mkdir(plugins_dir) + open(os.path.join(plugins_dir, 'my_plugin.py'), 'w').write(PLUGIN) ds = Datasette( [filepath], page_size=50, max_returned_rows=100, sql_time_limit_ms=20, metadata=METADATA, + plugins_dir=plugins_dir, ) ds.sqlite_functions.append( ('sleep', 1, lambda n: time.sleep(float(n))), @@ -90,6 +94,20 @@ METADATA = { } } +PLUGIN = ''' +from datasette import hookimpl +import pint + +ureg = pint.UnitRegistry() + + +@hookimpl +def prepare_connection(conn): + def convert_units(amount, from_, to_): + "select convert_units(100, 'm', 'ft');" + return (amount * ureg(from_)).to(to_).to_tuple()[0] + conn.create_function('convert_units', 3, convert_units) +''' TABLES = ''' CREATE TABLE simple_primary_key ( diff --git a/tests/test_api.py b/tests/test_api.py index 050359d3..21fc34bf 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -588,8 +588,10 @@ def test_row_foreign_key_tables(app_client): def test_unit_filters(app_client): - response = app_client.get('/test_tables/units.json?distance__lt=75km&frequency__gt=1kHz', - gather_request=False) + response = app_client.get( + '/test_tables/units.json?distance__lt=75km&frequency__gt=1kHz', + gather_request=False + ) assert response.status == 200 data = response.json @@ -598,3 +600,11 @@ def test_unit_filters(app_client): assert len(data['rows']) == 1 assert data['rows'][0][0] == 2 + + +def test_plugins_dir_plugin(app_client): + response = app_client.get( + "/test_tables.json?sql=select+convert_units(100%2C+'m'%2C+'ft')", + gather_request=False + ) + assert pytest.approx(328.0839) == response.json['rows'][0][0] diff --git a/tests/test_utils.py b/tests/test_utils.py index d71ecc60..3b3b8946 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -195,6 +195,7 @@ def test_temporary_docker_directory_uses_hard_link(): extra_options=None, branch=None, template_dir=None, + plugins_dir=None, static=[], ) as temp_docker: hello = os.path.join(temp_docker, 'hello') @@ -218,6 +219,7 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link): extra_options=None, branch=None, template_dir=None, + plugins_dir=None, static=[], ) as temp_docker: hello = os.path.join(temp_docker, 'hello')