mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Sort commits using isort
Also added lint test to ensure they stay sorted.
This commit is contained in:
parent
cdd24f3eaa
commit
ecae805d37
40 changed files with 208 additions and 141 deletions
|
|
@ -1,19 +1,21 @@
|
|||
from datasette.app import Datasette
|
||||
from datasette.utils import sqlite3
|
||||
from asgiref.testing import ApplicationCommunicator
|
||||
from asgiref.sync import async_to_sync
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import pytest
|
||||
import random
|
||||
import sys
|
||||
import string
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from urllib.parse import unquote
|
||||
|
||||
import pytest
|
||||
from asgiref.sync import async_to_sync
|
||||
from asgiref.testing import ApplicationCommunicator
|
||||
|
||||
from datasette.app import Datasette
|
||||
from datasette.utils import sqlite3
|
||||
|
||||
|
||||
class TestResponse:
|
||||
def __init__(self, status, headers, body):
|
||||
|
|
|
|||
|
|
@ -1,22 +1,25 @@
|
|||
import json
|
||||
import urllib
|
||||
|
||||
import pytest
|
||||
|
||||
from datasette.utils import detect_json1
|
||||
|
||||
from .fixtures import ( # noqa
|
||||
METADATA,
|
||||
app_client,
|
||||
app_client_no_files,
|
||||
app_client_with_hash,
|
||||
app_client_shorter_time_limit,
|
||||
app_client_larger_cache_size,
|
||||
app_client_no_files,
|
||||
app_client_returned_rows_matches_page_size,
|
||||
app_client_shorter_time_limit,
|
||||
app_client_two_attached_databases_one_immutable,
|
||||
app_client_with_cors,
|
||||
app_client_with_dot,
|
||||
app_client_with_hash,
|
||||
generate_compound_rows,
|
||||
generate_sortable_rows,
|
||||
make_app_client,
|
||||
METADATA,
|
||||
)
|
||||
import json
|
||||
import pytest
|
||||
import urllib
|
||||
|
||||
|
||||
def test_homepage(app_client):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
from .fixtures import app_client, make_app_client
|
||||
from datasette.cli import cli
|
||||
from click.testing import CliRunner
|
||||
import pathlib
|
||||
import json
|
||||
import pathlib
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datasette.cli import cli
|
||||
|
||||
from .fixtures import app_client, make_app_client
|
||||
|
||||
|
||||
def test_inspect_cli(app_client):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from .fixtures import app_client
|
||||
import pytest
|
||||
|
||||
from .fixtures import app_client
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"tables,exists",
|
||||
|
|
|
|||
|
|
@ -1,13 +1,15 @@
|
|||
"""
|
||||
Tests to ensure certain things are documented.
|
||||
"""
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datasette import app
|
||||
from datasette.cli import cli
|
||||
from datasette.filters import Filters
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
import re
|
||||
|
||||
docs_path = Path(__file__).parent.parent / "docs"
|
||||
label_re = re.compile(r"\.\. _([^\s:]+):")
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
from datasette.facets import ColumnFacet, ArrayFacet, DateFacet, ManyToManyFacet
|
||||
from collections import namedtuple
|
||||
|
||||
import pytest
|
||||
|
||||
from datasette.facets import ArrayFacet, ColumnFacet, DateFacet, ManyToManyFacet
|
||||
from datasette.utils import detect_json1
|
||||
|
||||
from .fixtures import app_client # noqa
|
||||
from .utils import MockRequest
|
||||
from collections import namedtuple
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from datasette.filters import Filters
|
||||
import pytest
|
||||
|
||||
from datasette.filters import Filters
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args,expected_where,expected_params",
|
||||
|
|
|
|||
|
|
@ -1,17 +1,19 @@
|
|||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
import pytest
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
|
||||
from .fixtures import ( # noqa
|
||||
METADATA,
|
||||
app_client,
|
||||
app_client_shorter_time_limit,
|
||||
app_client_two_attached_databases,
|
||||
app_client_with_hash,
|
||||
make_app_client,
|
||||
METADATA,
|
||||
)
|
||||
import json
|
||||
import pathlib
|
||||
import pytest
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
|
||||
def test_homepage(app_client_two_attached_databases):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
from click.testing import CliRunner
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
import io
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import isort
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
code_root = Path(__file__).parent.parent
|
||||
|
||||
|
|
@ -18,3 +21,18 @@ def test_black():
|
|||
black.main, [str(code_root / "tests"), str(code_root / "datasette"), "--check"]
|
||||
)
|
||||
assert result.exit_code == 0, result.output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
list(code_root.glob("tests/**/*.py")) + list(code_root.glob("datasette/**/*.py")),
|
||||
)
|
||||
def test_isort(path):
|
||||
# Have to capture stdout because isort uses print() directly
|
||||
stdout = sys.stdout
|
||||
sys.stdout = io.StringIO()
|
||||
result = isort.SortImports(path, check=True)
|
||||
assert (
|
||||
not result.incorrectly_sorted
|
||||
), "{} has incorrectly sorted imports, fix with 'isort -rc tests && isort -rc datasette && black tests datasette'"
|
||||
sys.stdout = stdout
|
||||
|
|
@ -1,11 +1,13 @@
|
|||
from bs4 import BeautifulSoup as Soup
|
||||
from .fixtures import app_client # noqa
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
import pytest
|
||||
import urllib
|
||||
|
||||
import pytest
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
|
||||
from .fixtures import app_client # noqa
|
||||
|
||||
|
||||
def test_plugins_dir_plugin(app_client):
|
||||
response = app_client.get(
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from click.testing import CliRunner
|
||||
from datasette import cli
|
||||
from unittest import mock
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datasette import cli
|
||||
|
||||
|
||||
@mock.patch("shutil.which")
|
||||
def test_publish_cloudrun_requires_gcloud(mock_which):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from click.testing import CliRunner
|
||||
from datasette import cli
|
||||
from unittest import mock
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datasette import cli
|
||||
|
||||
|
||||
@mock.patch("shutil.which")
|
||||
def test_publish_heroku_requires_heroku(mock_which):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from click.testing import CliRunner
|
||||
from datasette import cli
|
||||
from unittest import mock
|
||||
import subprocess
|
||||
from unittest import mock
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datasette import cli
|
||||
|
||||
|
||||
@mock.patch("shutil.which")
|
||||
|
|
|
|||
|
|
@ -2,16 +2,18 @@
|
|||
Tests for various datasette helper functions.
|
||||
"""
|
||||
|
||||
from datasette import utils
|
||||
from datasette.utils.asgi import Request
|
||||
from datasette.filters import Filters
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
import sqlite3
|
||||
import tempfile
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from datasette import utils
|
||||
from datasette.filters import Filters
|
||||
from datasette.utils.asgi import Request
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path,expected",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue