Fix type annotation bugs and remove unused imports

This fixes issues introduced by the ruff commit e57f391a which converted
Optional[x] to x | None:

- Fixed datasette/app.py line 1024: Dict[id | str, Dict] -> Dict[int | str, Dict]
  (was using id built-in function instead of int type)
- Fixed datasette/app.py line 1074: Optional["Resource"] -> "Resource" | None
- Added 'from __future__ import annotations' for Python 3.10 compatibility
- Added TYPE_CHECKING blocks to avoid circular imports
- Removed dead code (unused variable assignments) from cli.py and views
- Removed unused imports flagged by ruff across multiple files
- Fixed test fixtures: moved app_client fixture imports to conftest.py
  (fixed 71 test errors caused by fixtures not being registered)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Simon Willison 2025-10-26 15:52:36 -07:00
commit 5c537e0a3e
24 changed files with 45 additions and 31 deletions

View file

@ -2,7 +2,10 @@ from __future__ import annotations
from asgi_csrf import Errors
import asyncio
from typing import Any, Dict, Iterable, List
from typing import TYPE_CHECKING, Any, Dict, Iterable, List
if TYPE_CHECKING:
from datasette.permissions import AllowedResource, Resource
import asgi_csrf
import collections
import dataclasses
@ -117,8 +120,7 @@ from .tracer import AsgiTracer
from .plugins import pm, DEFAULT_PLUGINS, get_plugins
from .version import __version__
from .permissions import PermissionSQL
from .resources import InstanceResource, DatabaseResource, TableResource
from .resources import DatabaseResource, TableResource
app_root = Path(__file__).parent.parent
@ -1176,7 +1178,6 @@ class Datasette:
if table.private:
print(f"{table.child} is private")
"""
from datasette.permissions import Resource
action_obj = self.actions.get(action)
if not action_obj:
@ -1217,7 +1218,7 @@ class Datasette:
for allowed in debug_info:
print(f"{allowed.resource}: {allowed.reason}")
"""
from datasette.permissions import AllowedResource, Resource
from datasette.permissions import AllowedResource
action_obj = self.actions.get(action)
if not action_obj:
@ -1277,7 +1278,6 @@ class Datasette:
"""
from datasette.utils.actions_sql import check_permission_for_resource
from datasette.resources import InstanceResource
import datetime
if resource is None:
resource = InstanceResource()

View file

@ -146,7 +146,6 @@ def inspect(files, inspect_file, sqlite_extensions):
This can then be passed to "datasette --inspect-file" to speed up count
operations against immutable database files.
"""
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
inspect_data = run_sync(lambda: inspect_(files, sqlite_extensions))
if inspect_file == "-":
sys.stdout.write(json.dumps(inspect_data, indent=2))

View file

@ -1,3 +1,10 @@
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from datasette.app import Datasette
from datasette import hookimpl
from datasette.permissions import PermissionSQL
from datasette.utils import actor_matches_allow

View file

@ -2,7 +2,6 @@ from abc import ABC, abstractproperty
from dataclasses import asdict, dataclass, field
from datasette.hookspecs import hookimpl
from datetime import datetime, timezone
from typing import Optional
@dataclass

View file

@ -3,7 +3,6 @@ from datasette.resources import DatabaseResource
from datasette.views.base import DatasetteError
from datasette.utils.asgi import BadRequest
import json
import numbers
from .utils import detect_json1, escape_sqlite, path_with_removed_args

View file

@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Any, Dict, Optional, NamedTuple
from typing import Any, Dict, NamedTuple
class Resource(ABC):

View file

@ -50,7 +50,7 @@ def after(outcome, hook_name, hook_impls, kwargs):
results = outcome.get_result()
if not isinstance(results, list):
results = [results]
print(f"Results:", file=sys.stderr)
print("Results:", file=sys.stderr)
pprint(results, width=40, indent=4, stream=sys.stderr)

View file

@ -20,7 +20,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols):
if column in json_cols:
try:
value = json.loads(value)
except (TypeError, ValueError) as e:
except (TypeError, ValueError):
pass
new_row.append(value)
new_rows.append(new_row)

View file

@ -1,4 +1,3 @@
import hashlib
import json
from datasette.utils import MultiParams, calculate_etag
from mimetypes import guess_type

View file

@ -9,7 +9,6 @@ import os
import re
import sqlite_utils
import textwrap
from typing import List
from datasette.events import AlterTableEvent, CreateTableEvent, InsertRowsEvent
from datasette.database import QueryInterrupted
@ -71,7 +70,6 @@ class DatabaseView(View):
metadata = await datasette.get_database_metadata(database)
# Get all tables/views this actor can see in bulk with private flag
from datasette.resources import TableResource
allowed_tables = await datasette.allowed_resources(
"view-table", request.actor, parent=database, include_is_private=True
@ -344,7 +342,6 @@ async def get_tables(datasette, request, db, allowed_dict):
allowed_dict: Dict mapping table name -> Resource object with .private attribute
"""
tables = []
database = db.name
table_counts = await db.table_counts(100)
hidden_table_names = set(await db.hidden_table_names())
all_foreign_keys = await db.get_all_foreign_keys()
@ -512,7 +509,6 @@ class QueryView(View):
database = db.name
# Get all tables/views this actor can see in bulk with private flag
from datasette.resources import TableResource
allowed_tables = await datasette.allowed_resources(
"view-table", request.actor, parent=database, include_is_private=True

View file

@ -1,6 +1,5 @@
import json
from datasette import Forbidden
from datasette.plugins import pm
from datasette.utils import (
add_cors_headers,

View file

@ -247,7 +247,7 @@ class RowUpdateView(BaseView):
if not isinstance(data, dict):
return _error(["JSON must be a dictionary"])
if not "update" in data or not isinstance(data["update"], dict):
if "update" not in data or not isinstance(data["update"], dict):
return _error(["JSON must contain an update dictionary"])
invalid_keys = set(data.keys()) - {"update", "return", "alter"}

View file

@ -166,7 +166,6 @@ async def display_columns_and_rows(
column_details = {
col.name: col for col in await db.table_column_details(table_name)
}
table_config = await datasette.table_config(database_name, table_name)
pks = await db.primary_keys(table_name)
pks_for_display = pks
if not pks_for_display: