Sort commits using isort

Also added lint test to ensure they stay sorted.
This commit is contained in:
Simon Willison 2019-06-23 22:04:00 -07:00
commit ecae805d37
40 changed files with 208 additions and 141 deletions

View file

@ -1,3 +1,6 @@
[settings]
multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
use_parentheses=True
line_length=88

View file

@ -1,3 +1,4 @@
from datasette.version import __version_info__, __version__ # noqa
from datasette.version import __version__, __version_info__ # noqa
from .hookspecs import hookimpl # noqa
from .hookspecs import hookspec # noqa

View file

@ -10,17 +10,13 @@ from concurrent import futures
from pathlib import Path
import click
from markupsafe import Markup
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
from markupsafe import Markup
from .views.base import DatasetteError, ureg, AsgiRouter
from .views.database import DatabaseDownload, DatabaseView
from .views.index import IndexView
from .views.special import JsonDataView
from .views.table import RowView, TableView
from .renderer import json_renderer
from .database import Database
from .plugins import DEFAULT_PLUGINS, pm
from .renderer import json_renderer
from .tracer import AsgiTracer, trace
from .utils import (
QueryInterrupted,
Results,
@ -35,15 +31,18 @@ from .utils import (
from .utils.asgi import (
AsgiLifespan,
NotFound,
asgi_static,
asgi_send,
asgi_send_html,
asgi_send_json,
asgi_send_redirect,
asgi_static,
)
from .tracer import trace, AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS
from .version import __version__
from .views.base import AsgiRouter, DatasetteError, ureg
from .views.database import DatabaseDownload, DatabaseView
from .views.index import IndexView
from .views.special import JsonDataView
from .views.table import RowView, TableView
app_root = Path(__file__).parent.parent

View file

@ -1,19 +1,21 @@
import asyncio
import uvicorn
import click
from click import formatting
from click_default_group import DefaultGroup
import json
import os
import shutil
from subprocess import call
import sys
from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm
from subprocess import call
import click
import uvicorn
from click import formatting
from click_default_group import DefaultGroup
from .app import CONFIG_OPTIONS, DEFAULT_CONFIG, Datasette, pm
from .utils import (
temporary_docker_directory,
value_as_boolean,
StaticMount,
ValueAsBooleanError,
temporary_docker_directory,
value_as_boolean,
)

View file

@ -1,5 +1,6 @@
from pathlib import Path
from .inspect import inspect_hash
from .utils import (
QueryInterrupted,
detect_fts,
@ -10,7 +11,6 @@ from .utils import (
sqlite3,
table_columns,
)
from .inspect import inspect_hash
class Database:

View file

@ -1,14 +1,15 @@
import json
import urllib
import re
import urllib
from datasette import hookimpl
from datasette.utils import (
InvalidSql,
QueryInterrupted,
detect_json1,
escape_sqlite,
path_with_added_args,
path_with_removed_args,
detect_json1,
QueryInterrupted,
InvalidSql,
sqlite3,
)

View file

@ -1,5 +1,4 @@
from pluggy import HookimplMarker
from pluggy import HookspecMarker
from pluggy import HookimplMarker, HookspecMarker
hookspec = HookspecMarker("datasette")
hookimpl = HookimplMarker("datasette")

View file

@ -1,16 +1,15 @@
import hashlib
from .utils import (
detect_spatialite,
detect_fts,
detect_primary_keys,
detect_spatialite,
escape_sqlite,
get_all_foreign_keys,
table_columns,
sqlite3,
table_columns,
)
HASH_BLOCK_SIZE = 1024 * 1024

View file

@ -1,6 +1,8 @@
import importlib
import pluggy
import sys
import pluggy
from . import hookspecs
DEFAULT_PLUGINS = (

View file

@ -1,13 +1,15 @@
from datasette import hookimpl
import click
import json
from subprocess import check_call, check_output
import click
from datasette import hookimpl
from ..utils import temporary_docker_directory
from .common import (
add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed,
)
from ..utils import temporary_docker_directory
@hookimpl

View file

@ -1,8 +1,10 @@
from ..utils import StaticMount
import click
import shutil
import sys
import click
from ..utils import StaticMount
def add_common_publish_arguments_and_options(subcommand):
for decorator in reversed(

View file

@ -1,17 +1,19 @@
from contextlib import contextmanager
from datasette import hookimpl
import click
import json
import os
import shlex
from subprocess import call, check_output
import tempfile
from contextlib import contextmanager
from subprocess import call, check_output
import click
from datasette import hookimpl
from datasette.utils import link_or_copy, link_or_copy_directory
from .common import (
add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed,
)
from datasette.utils import link_or_copy, link_or_copy_directory
@hookimpl

View file

@ -1,13 +1,15 @@
from datasette import hookimpl
import click
import json
from subprocess import run, PIPE
from subprocess import PIPE, run
import click
from datasette import hookimpl
from ..utils import temporary_docker_directory
from .common import (
add_common_publish_arguments_and_options,
fail_if_publish_binary_not_installed,
)
from ..utils import temporary_docker_directory
@hookimpl

View file

@ -1,9 +1,10 @@
import json
from datasette.utils import (
value_as_boolean,
remove_infinites,
CustomJSONEncoder,
path_from_row_pks,
remove_infinites,
value_as_boolean,
)

View file

@ -1,8 +1,8 @@
import asyncio
from contextlib import contextmanager
import time
import json
import time
import traceback
from contextlib import contextmanager
tracers = {}

View file

@ -1,19 +1,20 @@
from contextlib import contextmanager
from collections import OrderedDict
import base64
import click
import hashlib
import imp
import json
import numbers
import os
import pkg_resources
import re
import shlex
import shutil
import tempfile
import time
import shutil
import urllib
import numbers
from collections import OrderedDict
from contextlib import contextmanager
import click
import pkg_resources
try:
import pysqlite3 as sqlite3

View file

@ -1,12 +1,14 @@
import json
from datasette.utils import RequestParameters
from mimetypes import guess_type
from urllib.parse import parse_qs, urlunparse
from pathlib import Path
from html import escape
import re
from html import escape
from mimetypes import guess_type
from pathlib import Path
from urllib.parse import parse_qs, urlunparse
import aiofiles
from datasette.utils import RequestParameters
class NotFound(Exception):
pass

View file

@ -4,32 +4,31 @@ import itertools
import re
import time
import urllib
from html import escape
import jinja2
import pint
from html import escape
from datasette import __version__
from datasette.plugins import pm
from datasette.utils import (
QueryInterrupted,
InvalidSql,
LimitedWriter,
QueryInterrupted,
format_bytes,
is_url,
path_with_added_args,
path_with_removed_args,
path_with_format,
path_with_removed_args,
resolve_table_and_format,
sqlite3,
to_css_class,
)
from datasette.utils.asgi import (
AsgiStream,
AsgiWriter,
AsgiRouter,
AsgiStream,
AsgiView,
AsgiWriter,
NotFound,
Response,
)

View file

@ -7,7 +7,6 @@ from datasette.version import __version__
from .base import BaseView
# Truncate table list on homepage at:
TRUNCATE_AT = 5

View file

@ -1,5 +1,7 @@
import json
from datasette.utils.asgi import Response
from .base import BaseView

View file

@ -1,9 +1,10 @@
import urllib
import itertools
import json
import urllib
import jinja2
from datasette.filters import Filters
from datasette.plugins import pm
from datasette.utils import (
CustomRow,
@ -24,8 +25,8 @@ from datasette.utils import (
value_as_boolean,
)
from datasette.utils.asgi import NotFound
from datasette.filters import Filters
from .base import DataView, DatasetteError, ureg
from .base import DatasetteError, DataView, ureg
LINK_WITH_LABEL = (
'<a href="/{database}/{table}/{link_id}">{label}</a>&nbsp;<em>{id}</em>'

View file

@ -164,6 +164,3 @@ texinfo_documents = [
author, 'Datasette', 'One line description of project.',
'Miscellaneous'),
]

View file

@ -1,7 +1,8 @@
from setuptools import setup, find_packages
import os
import sys
from setuptools import find_packages, setup
import versioneer
@ -62,6 +63,7 @@ setup(
"aiohttp==3.5.3",
"beautifulsoup4==4.6.1",
"asgiref==3.1.2",
"isort==4.3.20",
]
+ maybe_black
},

View file

@ -1,19 +1,21 @@
from datasette.app import Datasette
from datasette.utils import sqlite3
from asgiref.testing import ApplicationCommunicator
from asgiref.sync import async_to_sync
import itertools
import json
import os
import pathlib
import pytest
import random
import sys
import string
import sys
import tempfile
import time
from urllib.parse import unquote
import pytest
from asgiref.sync import async_to_sync
from asgiref.testing import ApplicationCommunicator
from datasette.app import Datasette
from datasette.utils import sqlite3
class TestResponse:
def __init__(self, status, headers, body):

View file

@ -1,22 +1,25 @@
import json
import urllib
import pytest
from datasette.utils import detect_json1
from .fixtures import ( # noqa
METADATA,
app_client,
app_client_no_files,
app_client_with_hash,
app_client_shorter_time_limit,
app_client_larger_cache_size,
app_client_no_files,
app_client_returned_rows_matches_page_size,
app_client_shorter_time_limit,
app_client_two_attached_databases_one_immutable,
app_client_with_cors,
app_client_with_dot,
app_client_with_hash,
generate_compound_rows,
generate_sortable_rows,
make_app_client,
METADATA,
)
import json
import pytest
import urllib
def test_homepage(app_client):

View file

@ -1,8 +1,11 @@
from .fixtures import app_client, make_app_client
from datasette.cli import cli
from click.testing import CliRunner
import pathlib
import json
import pathlib
from click.testing import CliRunner
from datasette.cli import cli
from .fixtures import app_client, make_app_client
def test_inspect_cli(app_client):

View file

@ -1,6 +1,7 @@
from .fixtures import app_client
import pytest
from .fixtures import app_client
@pytest.mark.parametrize(
"tables,exists",

View file

@ -1,13 +1,15 @@
"""
Tests to ensure certain things are documented.
"""
import re
from pathlib import Path
import pytest
from click.testing import CliRunner
from datasette import app
from datasette.cli import cli
from datasette.filters import Filters
from pathlib import Path
import pytest
import re
docs_path = Path(__file__).parent.parent / "docs"
label_re = re.compile(r"\.\. _([^\s:]+):")

View file

@ -1,9 +1,12 @@
from datasette.facets import ColumnFacet, ArrayFacet, DateFacet, ManyToManyFacet
from collections import namedtuple
import pytest
from datasette.facets import ArrayFacet, ColumnFacet, DateFacet, ManyToManyFacet
from datasette.utils import detect_json1
from .fixtures import app_client # noqa
from .utils import MockRequest
from collections import namedtuple
import pytest
@pytest.mark.asyncio

View file

@ -1,6 +1,7 @@
from datasette.filters import Filters
import pytest
from datasette.filters import Filters
@pytest.mark.parametrize(
"args,expected_where,expected_params",

View file

@ -1,17 +1,19 @@
import json
import pathlib
import re
import urllib.parse
import pytest
from bs4 import BeautifulSoup as Soup
from .fixtures import ( # noqa
METADATA,
app_client,
app_client_shorter_time_limit,
app_client_two_attached_databases,
app_client_with_hash,
make_app_client,
METADATA,
)
import json
import pathlib
import pytest
import re
import urllib.parse
def test_homepage(app_client_two_attached_databases):

View file

@ -1,7 +1,10 @@
from click.testing import CliRunner
from pathlib import Path
import pytest
import io
import sys
from pathlib import Path
import isort
import pytest
from click.testing import CliRunner
code_root = Path(__file__).parent.parent
@ -18,3 +21,18 @@ def test_black():
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
)
assert result.exit_code == 0, result.output
@pytest.mark.parametrize(
"path",
list(code_root.glob("tests/**/*.py")) + list(code_root.glob("datasette/**/*.py")),
)
def test_isort(path):
# Have to capture stdout because isort uses print() directly
stdout = sys.stdout
sys.stdout = io.StringIO()
result = isort.SortImports(path, check=True)
assert (
not result.incorrectly_sorted
), "{} has incorrectly sorted imports, fix with 'isort -rc tests && isort -rc datasette && black tests datasette'"
sys.stdout = stdout

View file

@ -1,11 +1,13 @@
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client # noqa
import base64
import json
import re
import pytest
import urllib
import pytest
from bs4 import BeautifulSoup as Soup
from .fixtures import app_client # noqa
def test_plugins_dir_plugin(app_client):
response = app_client.get(

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock
from click.testing import CliRunner
from datasette import cli
@mock.patch("shutil.which")
def test_publish_cloudrun_requires_gcloud(mock_which):

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock
from click.testing import CliRunner
from datasette import cli
@mock.patch("shutil.which")
def test_publish_heroku_requires_heroku(mock_which):

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette import cli
from unittest import mock
import subprocess
from unittest import mock
from click.testing import CliRunner
from datasette import cli
@mock.patch("shutil.which")

View file

@ -2,16 +2,18 @@
Tests for various datasette helper functions.
"""
from datasette import utils
from datasette.utils.asgi import Request
from datasette.filters import Filters
import json
import os
import pytest
import sqlite3
import tempfile
from unittest.mock import patch
import pytest
from datasette import utils
from datasette.filters import Filters
from datasette.utils.asgi import Request
@pytest.mark.parametrize(
"path,expected",

View file

@ -1,7 +1,9 @@
from click.testing import CliRunner
from datasette.cli import cli
from pathlib import Path
from click.testing import CliRunner
from datasette.cli import cli
docs_path = Path(__file__).parent / "docs"
includes = (

View file

@ -277,10 +277,7 @@ https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
@ -288,6 +285,11 @@ import re
import subprocess
import sys
try:
import configparser
except ImportError:
import ConfigParser as configparser
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""