mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Basic CSV export, refs #266
Tables and custom SQL query results can now be exported as CSV. The easiest way to do this is to use the .csv extension, e.g. /test_tables/facet_cities.csv By default this is served as Content-Type: text/plain so you can see it in your browser. If you want to download the file (using text/csv and with an appropriate Content-Disposition: attachment header) you can do so like this: /test_tables/facet_cities.csv?_dl=1 We link to the CSV and downloadable CSV URLs from the table and query pages. The links use ?_size=max and so by default will return 1,000 rows. Also fixes #303 - table names ending in .json or .csv are now detected and URLs are generated that look like this instead: /test_tables/table%2Fwith%2Fslashes.csv?_format=csv The ?_format= option is available for everything else too, but we link to the .csv / .json versions in most cases because they are aesthetically pleasing.
This commit is contained in:
parent
a246f476b4
commit
3a79ad98ea
12 changed files with 243 additions and 38 deletions
|
|
@ -507,7 +507,7 @@ def test_table_shape_object_compound_primary_Key(app_client):
|
|||
|
||||
|
||||
def test_table_with_slashes_in_name(app_client):
|
||||
response = app_client.get('/test_tables/table%2Fwith%2Fslashes.csv.json?_shape=objects')
|
||||
response = app_client.get('/test_tables/table%2Fwith%2Fslashes.csv?_shape=objects&_format=json')
|
||||
assert response.status == 200
|
||||
data = response.json
|
||||
assert data['rows'] == [{
|
||||
|
|
|
|||
37
tests/test_csv.py
Normal file
37
tests/test_csv.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from .fixtures import app_client # noqa
|
||||
|
||||
EXPECTED_TABLE_CSV = '''id,content
|
||||
1,hello
|
||||
2,world
|
||||
3,
|
||||
'''.replace('\n', '\r\n')
|
||||
|
||||
EXPECTED_CUSTOM_CSV = '''content
|
||||
hello
|
||||
world
|
||||
""
|
||||
'''.replace('\n', '\r\n')
|
||||
|
||||
|
||||
def test_table_csv(app_client):
|
||||
response = app_client.get('/test_tables/simple_primary_key.csv')
|
||||
assert response.status == 200
|
||||
assert 'text/plain; charset=utf-8' == response.headers['Content-Type']
|
||||
assert EXPECTED_TABLE_CSV == response.text
|
||||
|
||||
|
||||
def test_custom_sql_csv(app_client):
|
||||
response = app_client.get(
|
||||
'/test_tables.csv?sql=select+content+from+simple_primary_key'
|
||||
)
|
||||
assert response.status == 200
|
||||
assert 'text/plain; charset=utf-8' == response.headers['Content-Type']
|
||||
assert EXPECTED_CUSTOM_CSV == response.text
|
||||
|
||||
|
||||
def test_table_csv_download(app_client):
|
||||
response = app_client.get('/test_tables/simple_primary_key.csv?_dl=1')
|
||||
assert response.status == 200
|
||||
assert 'text/csv; charset=utf-8' == response.headers['Content-Type']
|
||||
expected_disposition = 'attachment; filename="simple_primary_key.csv"'
|
||||
assert expected_disposition == response.headers['Content-Disposition']
|
||||
|
|
@ -274,6 +274,21 @@ def test_table_html_simple_primary_key(app_client):
|
|||
] == [[str(td) for td in tr.select('td')] for tr in table.select('tbody tr')]
|
||||
|
||||
|
||||
def test_table_csv_json_export_links(app_client):
|
||||
response = app_client.get('/test_tables/simple_primary_key')
|
||||
assert response.status == 200
|
||||
links = Soup(response.body, "html.parser").find("p", {
|
||||
"class": "export-links"
|
||||
}).findAll("a")
|
||||
actual = [l["href"].split("/")[-1] for l in links]
|
||||
expected = [
|
||||
"simple_primary_key.json",
|
||||
"simple_primary_key.csv?_size=max",
|
||||
"simple_primary_key.csv?_dl=1&_size=max"
|
||||
]
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def test_row_html_simple_primary_key(app_client):
|
||||
response = app_client.get('/test_tables/simple_primary_key/1')
|
||||
assert response.status == 200
|
||||
|
|
|
|||
|
|
@ -299,3 +299,54 @@ def test_compound_keys_after_sql():
|
|||
or
|
||||
(a = :p0 and b = :p1 and c > :p2))
|
||||
'''.strip() == utils.compound_keys_after_sql(['a', 'b', 'c'])
|
||||
|
||||
|
||||
def table_exists(table):
|
||||
return table == "exists.csv"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"table_and_format,expected_table,expected_format",
|
||||
[
|
||||
("blah", "blah", None),
|
||||
("blah.csv", "blah", "csv"),
|
||||
("blah.json", "blah", "json"),
|
||||
("blah.baz", "blah.baz", None),
|
||||
("exists.csv", "exists.csv", None),
|
||||
],
|
||||
)
|
||||
def test_resolve_table_and_format(
|
||||
table_and_format, expected_table, expected_format
|
||||
):
|
||||
actual_table, actual_format = utils.resolve_table_and_format(
|
||||
table_and_format, table_exists
|
||||
)
|
||||
assert expected_table == actual_table
|
||||
assert expected_format == actual_format
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path,format,extra_qs,expected",
|
||||
[
|
||||
("/foo?sql=select+1", "csv", {}, "/foo.csv?sql=select+1"),
|
||||
("/foo?sql=select+1", "json", {}, "/foo.json?sql=select+1"),
|
||||
("/foo/bar", "json", {}, "/foo/bar.json"),
|
||||
("/foo/bar", "csv", {}, "/foo/bar.csv"),
|
||||
("/foo/bar.csv", "json", {}, "/foo/bar.csv?_format=json"),
|
||||
("/foo/bar", "csv", {"_dl": 1}, "/foo/bar.csv?_dl=1"),
|
||||
("/foo/b.csv", "json", {"_dl": 1}, "/foo/b.csv?_dl=1&_format=json"),
|
||||
(
|
||||
"/sf-trees/Street_Tree_List?_search=cherry&_size=1000",
|
||||
"csv",
|
||||
{"_dl": 1},
|
||||
"/sf-trees/Street_Tree_List.csv?_search=cherry&_size=1000&_dl=1",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_path_with_format(path, format, extra_qs, expected):
|
||||
request = Request(
|
||||
path.encode('utf8'),
|
||||
{}, '1.1', 'GET', None
|
||||
)
|
||||
actual = utils.path_with_format(request, format, extra_qs)
|
||||
assert expected == actual
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue