mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
results.dicts() method, closes #2414
This commit is contained in:
parent
dc288056b8
commit
92c4d41ca6
6 changed files with 28 additions and 17 deletions
|
|
@ -677,6 +677,9 @@ class Results:
|
|||
else:
|
||||
raise MultipleValues
|
||||
|
||||
def dicts(self):
|
||||
return [dict(row) for row in self.rows]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.rows)
|
||||
|
||||
|
|
|
|||
|
|
@ -277,8 +277,7 @@ class RowUpdateView(BaseView):
|
|||
results = await resolved.db.execute(
|
||||
resolved.sql, resolved.params, truncate=True
|
||||
)
|
||||
rows = list(results.rows)
|
||||
result["row"] = dict(rows[0])
|
||||
result["row"] = results.dicts()[0]
|
||||
|
||||
await self.ds.track_event(
|
||||
UpdateRowEvent(
|
||||
|
|
|
|||
|
|
@ -558,7 +558,7 @@ class TableInsertView(BaseView):
|
|||
),
|
||||
args,
|
||||
)
|
||||
result["rows"] = [dict(r) for r in fetched_rows.rows]
|
||||
result["rows"] = fetched_rows.dicts()
|
||||
else:
|
||||
result["rows"] = rows
|
||||
# We track the number of rows requested, but do not attempt to show which were actually
|
||||
|
|
|
|||
|
|
@ -1093,6 +1093,9 @@ The ``Results`` object also has the following properties and methods:
|
|||
``.rows`` - list of ``sqlite3.Row``
|
||||
This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``.
|
||||
|
||||
``.dicts()`` - list of ``dict``
|
||||
This method returns a list of Python dictionaries, one for each row.
|
||||
|
||||
``.first()`` - row or None
|
||||
Returns the first row in the results, or ``None`` if no rows were returned.
|
||||
|
||||
|
|
|
|||
|
|
@ -58,8 +58,8 @@ async def test_insert_row(ds_write, content_type):
|
|||
assert response.status_code == 201
|
||||
assert response.json()["ok"] is True
|
||||
assert response.json()["rows"] == [expected_row]
|
||||
rows = (await ds_write.get_database("data").execute("select * from docs")).rows
|
||||
assert dict(rows[0]) == expected_row
|
||||
rows = (await ds_write.get_database("data").execute("select * from docs")).dicts()
|
||||
assert rows[0] == expected_row
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
|
|
@ -118,12 +118,9 @@ async def test_insert_rows(ds_write, return_rows):
|
|||
assert not event.ignore
|
||||
assert not event.replace
|
||||
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
await ds_write.get_database("data").execute("select * from docs")
|
||||
).rows
|
||||
]
|
||||
actual_rows = (
|
||||
await ds_write.get_database("data").execute("select * from docs")
|
||||
).dicts()
|
||||
assert len(actual_rows) == 20
|
||||
assert actual_rows == [
|
||||
{"id": i + 1, "title": "Test {}".format(i), "score": 1.0, "age": 5}
|
||||
|
|
@ -469,12 +466,10 @@ async def test_insert_ignore_replace(
|
|||
assert event.ignore == ignore
|
||||
assert event.replace == replace
|
||||
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
await ds_write.get_database("data").execute("select * from docs")
|
||||
).rows
|
||||
]
|
||||
actual_rows = (
|
||||
await ds_write.get_database("data").execute("select * from docs")
|
||||
).dicts()
|
||||
|
||||
assert actual_rows == expected_rows
|
||||
assert response.json()["ok"] is True
|
||||
if should_return:
|
||||
|
|
|
|||
|
|
@ -40,6 +40,17 @@ async def test_results_bool(db, expected):
|
|||
assert bool(results) is expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_results_dicts(db):
|
||||
results = await db.execute("select pk, name from roadside_attractions")
|
||||
assert results.dicts() == [
|
||||
{"pk": 1, "name": "The Mystery Spot"},
|
||||
{"pk": 2, "name": "Winchester Mystery House"},
|
||||
{"pk": 3, "name": "Burlingame Museum of PEZ Memorabilia"},
|
||||
{"pk": 4, "name": "Bigfoot Discovery Museum"},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"query,expected",
|
||||
[
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue