mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Compare commits
3 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bce6aee485 | ||
|
|
aa5988cb63 | ||
|
|
3840070ddd |
2 changed files with 28 additions and 28 deletions
|
|
@ -63,6 +63,26 @@ class Row:
|
||||||
|
|
||||||
|
|
||||||
class RowTableShared(DataView):
|
class RowTableShared(DataView):
|
||||||
|
async def columns_to_select(self, db, table, request):
|
||||||
|
table_columns = await db.table_columns(table)
|
||||||
|
if "_col" in request.args and "_nocol" in request.args:
|
||||||
|
raise DatasetteError("Cannot use _col and _nocol at the same time")
|
||||||
|
if "_col" in request.args:
|
||||||
|
new_columns = []
|
||||||
|
for column in request.args["_col"]:
|
||||||
|
if column not in table_columns:
|
||||||
|
raise DatasetteError("_col={} is an invalid column".format(column))
|
||||||
|
new_columns.append(column)
|
||||||
|
return new_columns
|
||||||
|
elif "_nocol" in request.args:
|
||||||
|
# Return all columns EXCEPT these
|
||||||
|
bad_columns = [column for column in request.args["_nocol"] if column not in table_columns]
|
||||||
|
if bad_columns:
|
||||||
|
raise DatasetteError("_nocol={} - invalid columns".format(", ".join(bad_columns)))
|
||||||
|
return [column for column in table_columns if column not in request.args["_nocol"]]
|
||||||
|
else:
|
||||||
|
return table_columns
|
||||||
|
|
||||||
async def sortable_columns_for_table(self, database, table, use_rowid):
|
async def sortable_columns_for_table(self, database, table, use_rowid):
|
||||||
db = self.ds.databases[database]
|
db = self.ds.databases[database]
|
||||||
table_metadata = self.ds.table_metadata(database, table)
|
table_metadata = self.ds.table_metadata(database, table)
|
||||||
|
|
@ -235,17 +255,18 @@ class TableView(RowTableShared):
|
||||||
raise NotFound("Table not found: {}".format(table))
|
raise NotFound("Table not found: {}".format(table))
|
||||||
|
|
||||||
pks = await db.primary_keys(table)
|
pks = await db.primary_keys(table)
|
||||||
table_columns = await db.table_columns(table)
|
|
||||||
|
|
||||||
select_columns = ", ".join(escape_sqlite(t) for t in table_columns)
|
# Take _col= and _nocol= into account
|
||||||
|
table_columns = await self.columns_to_select(db, table, request)
|
||||||
|
select_clause = ", ".join(escape_sqlite(t) for t in table_columns)
|
||||||
|
|
||||||
use_rowid = not pks and not is_view
|
use_rowid = not pks and not is_view
|
||||||
if use_rowid:
|
if use_rowid:
|
||||||
select = "rowid, {}".format(select_columns)
|
select = "rowid, {}".format(select_clause)
|
||||||
order_by = "rowid"
|
order_by = "rowid"
|
||||||
order_by_pks = "rowid"
|
order_by_pks = "rowid"
|
||||||
else:
|
else:
|
||||||
select = select_columns
|
select = select_clause
|
||||||
order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
|
order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
|
||||||
order_by = order_by_pks
|
order_by = order_by_pks
|
||||||
|
|
||||||
|
|
@ -261,12 +282,10 @@ class TableView(RowTableShared):
|
||||||
# That's so if there is a column that starts with _
|
# That's so if there is a column that starts with _
|
||||||
# it can still be queried using ?_col__exact=blah
|
# it can still be queried using ?_col__exact=blah
|
||||||
special_args = {}
|
special_args = {}
|
||||||
special_args_lists = {}
|
|
||||||
other_args = []
|
other_args = []
|
||||||
for key, value in args.items():
|
for key, value in args.items():
|
||||||
if key.startswith("_") and "__" not in key:
|
if key.startswith("_") and "__" not in key:
|
||||||
special_args[key] = value[0]
|
special_args[key] = value[0]
|
||||||
special_args_lists[key] = value
|
|
||||||
else:
|
else:
|
||||||
for v in value:
|
for v in value:
|
||||||
other_args.append((key, v))
|
other_args.append((key, v))
|
||||||
|
|
@ -499,18 +518,6 @@ class TableView(RowTableShared):
|
||||||
if order_by:
|
if order_by:
|
||||||
order_by = "order by {} ".format(order_by)
|
order_by = "order by {} ".format(order_by)
|
||||||
|
|
||||||
# _group_count=col1&_group_count=col2
|
|
||||||
group_count = special_args_lists.get("_group_count") or []
|
|
||||||
if group_count:
|
|
||||||
sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format(
|
|
||||||
group_cols=", ".join(
|
|
||||||
'"{}"'.format(group_count_col) for group_count_col in group_count
|
|
||||||
),
|
|
||||||
table_name=escape_sqlite(table),
|
|
||||||
where=where_clause,
|
|
||||||
)
|
|
||||||
return await self.custom_sql(request, database, hash, sql, editable=True)
|
|
||||||
|
|
||||||
extra_args = {}
|
extra_args = {}
|
||||||
# Handle ?_size=500
|
# Handle ?_size=500
|
||||||
page_size = _size or request.raw_args.get("_size")
|
page_size = _size or request.raw_args.get("_size")
|
||||||
|
|
@ -598,7 +605,7 @@ class TableView(RowTableShared):
|
||||||
facets_timed_out.extend(instance_facets_timed_out)
|
facets_timed_out.extend(instance_facets_timed_out)
|
||||||
|
|
||||||
# Figure out columns and rows for the query
|
# Figure out columns and rows for the query
|
||||||
columns = [r[0] for r in results.description]
|
columns = table_columns
|
||||||
rows = list(results.rows)
|
rows = list(results.rows)
|
||||||
|
|
||||||
filter_columns = columns[:]
|
filter_columns = columns[:]
|
||||||
|
|
@ -626,6 +633,8 @@ class TableView(RowTableShared):
|
||||||
column = fk["column"]
|
column = fk["column"]
|
||||||
if column not in columns_to_expand:
|
if column not in columns_to_expand:
|
||||||
continue
|
continue
|
||||||
|
if column not in columns:
|
||||||
|
continue
|
||||||
expanded_columns.append(column)
|
expanded_columns.append(column)
|
||||||
# Gather the values
|
# Gather the values
|
||||||
column_index = columns.index(column)
|
column_index = columns.index(column)
|
||||||
|
|
|
||||||
|
|
@ -321,15 +321,6 @@ Special table arguments
|
||||||
|
|
||||||
Here's `an example <https://latest.datasette.io/fixtures/roadside_attractions?_through={%22table%22:%22roadside_attraction_characteristics%22,%22column%22:%22characteristic_id%22,%22value%22:%221%22}>`__.
|
Here's `an example <https://latest.datasette.io/fixtures/roadside_attractions?_through={%22table%22:%22roadside_attraction_characteristics%22,%22column%22:%22characteristic_id%22,%22value%22:%221%22}>`__.
|
||||||
|
|
||||||
|
|
||||||
``?_group_count=COLUMN``
|
|
||||||
Executes a SQL query that returns a count of the number of rows matching
|
|
||||||
each unique value in that column, with the most common ordered first.
|
|
||||||
|
|
||||||
``?_group_count=COLUMN1&_group_count=column2``
|
|
||||||
You can pass multiple ``_group_count`` columns to return counts against
|
|
||||||
unique combinations of those columns.
|
|
||||||
|
|
||||||
``?_next=TOKEN``
|
``?_next=TOKEN``
|
||||||
Pagination by continuation token - pass the token that was returned in the
|
Pagination by continuation token - pass the token that was returned in the
|
||||||
``"next"`` property by the previous page.
|
``"next"`` property by the previous page.
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue