Sort commits using isort

Also added lint test to ensure they stay sorted.
This commit is contained in:
Simon Willison 2019-06-23 22:04:00 -07:00
commit ecae805d37
40 changed files with 208 additions and 141 deletions

View file

@ -1,3 +1,6 @@
[settings] [settings]
multi_line_output=3 multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
use_parentheses=True
line_length=88

View file

@ -1,3 +1,4 @@
from datasette.version import __version_info__, __version__ # noqa from datasette.version import __version__, __version_info__ # noqa
from .hookspecs import hookimpl # noqa from .hookspecs import hookimpl # noqa
from .hookspecs import hookspec # noqa from .hookspecs import hookspec # noqa

View file

@ -10,17 +10,13 @@ from concurrent import futures
from pathlib import Path from pathlib import Path
import click import click
from markupsafe import Markup
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
from markupsafe import Markup
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
from .views.index import IndexView
from .views.special import JsonDataView
from .views.table import RowView, TableView
from .renderer import json_renderer
from .database import Database from .database import Database
from .plugins import DEFAULT_PLUGINS, pm
from .renderer import json_renderer
from .tracer import AsgiTracer, trace
from .utils import ( from .utils import (
QueryInterrupted, QueryInterrupted,
Results, Results,
@ -35,15 +31,18 @@ from .utils import (
from .utils.asgi import ( from .utils.asgi import (
AsgiLifespan, AsgiLifespan,
NotFound, NotFound,
asgi_static,
asgi_send, asgi_send,
asgi_send_html, asgi_send_html,
asgi_send_json, asgi_send_json,
asgi_send_redirect, asgi_send_redirect,
asgi_static,
) )
from .tracer import trace, AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS
from .version import __version__ from .version import __version__
from .views.base import AsgiRouter, DatasetteError, ureg
from .views.database import DatabaseDownload, DatabaseView
from .views.index import IndexView
from .views.special import JsonDataView
from .views.table import RowView, TableView
app_root = Path(__file__).parent.parent app_root = Path(__file__).parent.parent

View file

@ -1,19 +1,21 @@
import asyncio import asyncio
import uvicorn
import click
from click import formatting
from click_default_group import DefaultGroup
import json import json
import os import os
import shutil import shutil
from subprocess import call
import sys import sys
from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm from subprocess import call
import click
import uvicorn
from click import formatting
from click_default_group import DefaultGroup
from .app import CONFIG_OPTIONS, DEFAULT_CONFIG, Datasette, pm
from .utils import ( from .utils import (
temporary_docker_directory,
value_as_boolean,
StaticMount, StaticMount,
ValueAsBooleanError, ValueAsBooleanError,
temporary_docker_directory,
value_as_boolean,
) )

View file

@ -1,5 +1,6 @@
from pathlib import Path from pathlib import Path
from .inspect import inspect_hash
from .utils import ( from .utils import (
QueryInterrupted, QueryInterrupted,
detect_fts, detect_fts,
@ -10,7 +11,6 @@ from .utils import (
sqlite3, sqlite3,
table_columns, table_columns,
) )
from .inspect import inspect_hash
class Database: class Database:

View file

@ -1,14 +1,15 @@
import json import json
import urllib
import re import re
import urllib
from datasette import hookimpl from datasette import hookimpl
from datasette.utils import ( from datasette.utils import (
InvalidSql,
QueryInterrupted,
detect_json1,
escape_sqlite, escape_sqlite,
path_with_added_args, path_with_added_args,
path_with_removed_args, path_with_removed_args,
detect_json1,
QueryInterrupted,
InvalidSql,
sqlite3, sqlite3,
) )

View file

@ -1,5 +1,4 @@
from pluggy import HookimplMarker from pluggy import HookimplMarker, HookspecMarker
from pluggy import HookspecMarker
hookspec = HookspecMarker("datasette") hookspec = HookspecMarker("datasette")
hookimpl = HookimplMarker("datasette") hookimpl = HookimplMarker("datasette")

View file

@ -1,16 +1,15 @@
import hashlib import hashlib
from .utils import ( from .utils import (
detect_spatialite,
detect_fts, detect_fts,
detect_primary_keys, detect_primary_keys,
detect_spatialite,
escape_sqlite, escape_sqlite,
get_all_foreign_keys, get_all_foreign_keys,
table_columns,
sqlite3, sqlite3,
table_columns,
) )
HASH_BLOCK_SIZE = 1024 * 1024 HASH_BLOCK_SIZE = 1024 * 1024

View file

@ -1,6 +1,8 @@
import importlib import importlib
import pluggy
import sys import sys
import pluggy
from . import hookspecs from . import hookspecs
DEFAULT_PLUGINS = ( DEFAULT_PLUGINS = (

View file

@ -1,13 +1,15 @@
from datasette import hookimpl
import click
import json import json
from subprocess import check_call, check_output from subprocess import check_call, check_output
import click
from datasette import hookimpl
from ..utils import temporary_docker_directory
from .common import ( from .common import (
add_common_publish_arguments_and_options, add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed, fail_if_publish_binary_not_installed,
) )
from ..utils import temporary_docker_directory
@hookimpl @hookimpl

View file

@ -1,8 +1,10 @@
from ..utils import StaticMount
import click
import shutil import shutil
import sys import sys
import click
from ..utils import StaticMount
def add_common_publish_arguments_and_options(subcommand): def add_common_publish_arguments_and_options(subcommand):
for decorator in reversed( for decorator in reversed(

View file

@ -1,17 +1,19 @@
from contextlib import contextmanager
from datasette import hookimpl
import click
import json import json
import os import os
import shlex import shlex
from subprocess import call, check_output
import tempfile import tempfile
from contextlib import contextmanager
from subprocess import call, check_output
import click
from datasette import hookimpl
from datasette.utils import link_or_copy, link_or_copy_directory
from .common import ( from .common import (
add_common_publish_arguments_and_options, add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed, fail_if_publish_binary_not_installed,
) )
from datasette.utils import link_or_copy, link_or_copy_directory
@hookimpl @hookimpl

View file

@ -1,13 +1,15 @@
from datasette import hookimpl
import click
import json import json
from subprocess import run, PIPE from subprocess import PIPE, run
import click
from datasette import hookimpl
from ..utils import temporary_docker_directory
from .common import ( from .common import (
add_common_publish_arguments_and_options, add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed, fail_if_publish_binary_not_installed,
) )
from ..utils import temporary_docker_directory
@hookimpl @hookimpl

View file

@ -1,9 +1,10 @@
import json import json
from datasette.utils import ( from datasette.utils import (
value_as_boolean,
remove_infinites,
CustomJSONEncoder, CustomJSONEncoder,
path_from_row_pks, path_from_row_pks,
remove_infinites,
value_as_boolean,
) )

View file

@ -1,8 +1,8 @@
import asyncio import asyncio
from contextlib import contextmanager
import time
import json import json
import time
import traceback import traceback
from contextlib import contextmanager
tracers = {} tracers = {}

View file

@ -1,19 +1,20 @@
from contextlib import contextmanager
from collections import OrderedDict
import base64 import base64
import click
import hashlib import hashlib
import imp import imp
import json import json
import numbers
import os import os
import pkg_resources
import re import re
import shlex import shlex
import shutil
import tempfile import tempfile
import time import time
import shutil
import urllib import urllib
import numbers from collections import OrderedDict
from contextlib import contextmanager
import click
import pkg_resources
try: try:
import pysqlite3 as sqlite3 import pysqlite3 as sqlite3

View file

@ -1,12 +1,14 @@
import json import json
from datasette.utils import RequestParameters
from mimetypes import guess_type
from urllib.parse import parse_qs, urlunparse
from pathlib import Path
from html import escape
import re import re
from html import escape
from mimetypes import guess_type
from pathlib import Path
from urllib.parse import parse_qs, urlunparse
import aiofiles import aiofiles
from datasette.utils import RequestParameters
class NotFound(Exception): class NotFound(Exception):
pass pass

View file

@ -4,32 +4,31 @@ import itertools
import re import re
import time import time
import urllib import urllib
from html import escape
import jinja2 import jinja2
import pint import pint
from html import escape
from datasette import __version__ from datasette import __version__
from datasette.plugins import pm from datasette.plugins import pm
from datasette.utils import ( from datasette.utils import (
QueryInterrupted,
InvalidSql, InvalidSql,
LimitedWriter, LimitedWriter,
QueryInterrupted,
format_bytes, format_bytes,
is_url, is_url,
path_with_added_args, path_with_added_args,
path_with_removed_args,
path_with_format, path_with_format,
path_with_removed_args,
resolve_table_and_format, resolve_table_and_format,
sqlite3, sqlite3,
to_css_class, to_css_class,
) )
from datasette.utils.asgi import ( from datasette.utils.asgi import (
AsgiStream,
AsgiWriter,
AsgiRouter, AsgiRouter,
AsgiStream,
AsgiView, AsgiView,
AsgiWriter,
NotFound, NotFound,
Response, Response,
) )

View file

@ -7,7 +7,6 @@ from datasette.version import __version__
from .base import BaseView from .base import BaseView
# Truncate table list on homepage at: # Truncate table list on homepage at:
TRUNCATE_AT = 5 TRUNCATE_AT = 5

View file

@ -1,5 +1,7 @@
import json import json
from datasette.utils.asgi import Response from datasette.utils.asgi import Response
from .base import BaseView from .base import BaseView

View file

@ -1,9 +1,10 @@
import urllib
import itertools import itertools
import json import json
import urllib
import jinja2 import jinja2
from datasette.filters import Filters
from datasette.plugins import pm from datasette.plugins import pm
from datasette.utils import ( from datasette.utils import (
CustomRow, CustomRow,
@ -24,8 +25,8 @@ from datasette.utils import (
value_as_boolean, value_as_boolean,
) )
from datasette.utils.asgi import NotFound from datasette.utils.asgi import NotFound
from datasette.filters import Filters
from .base import DataView, DatasetteError, ureg from .base import DatasetteError, DataView, ureg
LINK_WITH_LABEL = ( LINK_WITH_LABEL = (
'<a href="/{database}/{table}/{link_id}">{label}</a>&nbsp;<em>{id}</em>' '<a href="/{database}/{table}/{link_id}">{label}</a>&nbsp;<em>{id}</em>'

View file

@ -164,6 +164,3 @@ texinfo_documents = [
author, 'Datasette', 'One line description of project.', author, 'Datasette', 'One line description of project.',
'Miscellaneous'), 'Miscellaneous'),
] ]

View file

@ -1,7 +1,8 @@
from setuptools import setup, find_packages
import os import os
import sys import sys
from setuptools import find_packages, setup
import versioneer import versioneer
@ -62,6 +63,7 @@ setup(
"aiohttp==3.5.3", "aiohttp==3.5.3",
"beautifulsoup4==4.6.1", "beautifulsoup4==4.6.1",
"asgiref==3.1.2", "asgiref==3.1.2",
"isort==4.3.20",
] ]
+ maybe_black + maybe_black
}, },

View file

@ -1,19 +1,21 @@
from datasette.app import Datasette
from datasette.utils import sqlite3
from asgiref.testing import ApplicationCommunicator
from asgiref.sync import async_to_sync
import itertools import itertools
import json import json
import os import os
import pathlib import pathlib
import pytest
import random import random
import sys
import string import string
import sys
import tempfile import tempfile
import time import time
from urllib.parse import unquote from urllib.parse import unquote
import pytest
from asgiref.sync import async_to_sync
from asgiref.testing import ApplicationCommunicator
from datasette.app import Datasette
from datasette.utils import sqlite3
class TestResponse: class TestResponse:
def __init__(self, status, headers, body): def __init__(self, status, headers, body):

View file

@ -1,22 +1,25 @@
import json
import urllib
import pytest
from datasette.utils import detect_json1 from datasette.utils import detect_json1
from .fixtures import ( # noqa from .fixtures import ( # noqa
METADATA,
app_client, app_client,
app_client_no_files,
app_client_with_hash,
app_client_shorter_time_limit,
app_client_larger_cache_size, app_client_larger_cache_size,
app_client_no_files,
app_client_returned_rows_matches_page_size, app_client_returned_rows_matches_page_size,
app_client_shorter_time_limit,
app_client_two_attached_databases_one_immutable, app_client_two_attached_databases_one_immutable,
app_client_with_cors, app_client_with_cors,
app_client_with_dot, app_client_with_dot,
app_client_with_hash,
generate_compound_rows, generate_compound_rows,
generate_sortable_rows, generate_sortable_rows,
make_app_client, make_app_client,
METADATA,
) )
import json
import pytest
import urllib
def test_homepage(app_client): def test_homepage(app_client):

View file

@ -1,8 +1,11 @@
from .fixtures import app_client, make_app_client
from datasette.cli import cli
from click.testing import CliRunner
import pathlib
import json import json
import pathlib
from click.testing import CliRunner
from datasette.cli import cli
from .fixtures import app_client, make_app_client
def test_inspect_cli(app_client): def test_inspect_cli(app_client):

View file

@ -1,6 +1,7 @@
from .fixtures import app_client
import pytest import pytest
from .fixtures import app_client
@pytest.mark.parametrize( @pytest.mark.parametrize(
"tables,exists", "tables,exists",

View file

@ -1,13 +1,15 @@
""" """
Tests to ensure certain things are documented. Tests to ensure certain things are documented.
""" """
import re
from pathlib import Path
import pytest
from click.testing import CliRunner from click.testing import CliRunner
from datasette import app from datasette import app
from datasette.cli import cli from datasette.cli import cli
from datasette.filters import Filters from datasette.filters import Filters
from pathlib import Path
import pytest
import re
docs_path = Path(__file__).parent.parent / "docs" docs_path = Path(__file__).parent.parent / "docs"
label_re = re.compile(r"\.\. _([^\s:]+):") label_re = re.compile(r"\.\. _([^\s:]+):")

View file

@ -1,9 +1,12 @@
from datasette.facets import ColumnFacet, ArrayFacet, DateFacet, ManyToManyFacet from collections import namedtuple
import pytest
from datasette.facets import ArrayFacet, ColumnFacet, DateFacet, ManyToManyFacet
from datasette.utils import detect_json1 from datasette.utils import detect_json1
from .fixtures import app_client # noqa from .fixtures import app_client # noqa
from .utils import MockRequest from .utils import MockRequest
from collections import namedtuple
import pytest
@pytest.mark.asyncio @pytest.mark.asyncio

View file

@ -1,6 +1,7 @@
from datasette.filters import Filters
import pytest import pytest
from datasette.filters import Filters
@pytest.mark.parametrize( @pytest.mark.parametrize(
"args,expected_where,expected_params", "args,expected_where,expected_params",

View file

@ -1,17 +1,19 @@
import json
import pathlib
import re
import urllib.parse
import pytest
from bs4 import BeautifulSoup as Soup from bs4 import BeautifulSoup as Soup
from .fixtures import ( # noqa from .fixtures import ( # noqa
METADATA,
app_client, app_client,
app_client_shorter_time_limit, app_client_shorter_time_limit,
app_client_two_attached_databases, app_client_two_attached_databases,
app_client_with_hash, app_client_with_hash,
make_app_client, make_app_client,
METADATA,
) )
import json
import pathlib
import pytest
import re
import urllib.parse
def test_homepage(app_client_two_attached_databases): def test_homepage(app_client_two_attached_databases):

View file

@ -1,7 +1,10 @@
from click.testing import CliRunner import io
from pathlib import Path
import pytest
import sys import sys
from pathlib import Path
import isort
import pytest
from click.testing import CliRunner
code_root = Path(__file__).parent.parent code_root = Path(__file__).parent.parent
@ -18,3 +21,18 @@ def test_black():
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"] black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
) )
assert result.exit_code == 0, result.output assert result.exit_code == 0, result.output
@pytest.mark.parametrize(
"path",
list(code_root.glob("tests/**/*.py")) + list(code_root.glob("datasette/**/*.py")),
)
def test_isort(path):
# Have to capture stdout because isort uses print() directly
stdout = sys.stdout
sys.stdout = io.StringIO()
result = isort.SortImports(path, check=True)
assert (
not result.incorrectly_sorted
), "{} has incorrectly sorted imports, fix with 'isort -rc tests && isort -rc datasette && black tests datasette'"
sys.stdout = stdout

View file

@ -1,11 +1,13 @@
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client # noqa
import base64 import base64
import json import json
import re import re
import pytest
import urllib import urllib
import pytest
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client # noqa
def test_plugins_dir_plugin(app_client): def test_plugins_dir_plugin(app_client):
response = app_client.get( response = app_client.get(

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock from unittest import mock
from click.testing import CliRunner
from datasette import cli
@mock.patch("shutil.which") @mock.patch("shutil.which")
def test_publish_cloudrun_requires_gcloud(mock_which): def test_publish_cloudrun_requires_gcloud(mock_which):

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock from unittest import mock
from click.testing import CliRunner
from datasette import cli
@mock.patch("shutil.which") @mock.patch("shutil.which")
def test_publish_heroku_requires_heroku(mock_which): def test_publish_heroku_requires_heroku(mock_which):

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock
import subprocess import subprocess
from unittest import mock
from click.testing import CliRunner
from datasette import cli
@mock.patch("shutil.which") @mock.patch("shutil.which")

View file

@ -2,16 +2,18 @@
Tests for various datasette helper functions. Tests for various datasette helper functions.
""" """
from datasette import utils
from datasette.utils.asgi import Request
from datasette.filters import Filters
import json import json
import os import os
import pytest
import sqlite3 import sqlite3
import tempfile import tempfile
from unittest.mock import patch from unittest.mock import patch
import pytest
from datasette import utils
from datasette.filters import Filters
from datasette.utils.asgi import Request
@pytest.mark.parametrize( @pytest.mark.parametrize(
"path,expected", "path,expected",

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette.cli import cli
from pathlib import Path from pathlib import Path
from click.testing import CliRunner
from datasette.cli import cli
docs_path = Path(__file__).parent / "docs" docs_path = Path(__file__).parent / "docs"
includes = ( includes = (

View file

@ -277,10 +277,7 @@ https://creativecommons.org/publicdomain/zero/1.0/ .
""" """
from __future__ import print_function from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno import errno
import json import json
import os import os
@ -288,6 +285,11 @@ import re
import subprocess import subprocess
import sys import sys
try:
import configparser
except ImportError:
import ConfigParser as configparser
class VersioneerConfig: class VersioneerConfig:
"""Container for Versioneer configuration parameters.""" """Container for Versioneer configuration parameters."""