New config option max_csv_mb limiting size of CSV export - refs #266

This commit is contained in:
Simon Willison 2018-06-17 20:01:30 -07:00
commit 9d00718250
No known key found for this signature in database
GPG key ID: 17E2DEA2588B7F52
8 changed files with 89 additions and 29 deletions

View file

@ -71,6 +71,13 @@ def app_client_larger_cache_size():
})
@pytest.fixture(scope='session')
def app_client_csv_max_mb_one():
yield from app_client(config={
'max_csv_mb': 1,
})
def generate_compound_rows(num):
for a, b, c in itertools.islice(
itertools.product(string.ascii_lowercase, repeat=3), num

View file

@ -902,6 +902,7 @@ def test_config_json(app_client):
"num_sql_threads": 3,
"cache_size_kb": 0,
"allow_csv_stream": True,
"max_csv_mb": 100,
} == response.json

View file

@ -1,4 +1,4 @@
from .fixtures import app_client # noqa
from .fixtures import app_client, app_client_csv_max_mb_one # noqa
EXPECTED_TABLE_CSV = '''id,content
1,hello
@ -61,6 +61,18 @@ def test_table_csv_download(app_client):
assert expected_disposition == response.headers['Content-Disposition']
def test_max_csv_mb(app_client_csv_max_mb_one):
response = app_client_csv_max_mb_one.get(
"/fixtures.csv?sql=select+randomblob(10000)+"
"from+compound_three_primary_keys&_stream=1&_size=max"
)
# It's a 200 because we started streaming before we knew the error
assert response.status == 200
# Last line should be an error message
last_line = [line for line in response.body.split(b"\r\n") if line][-1]
assert last_line.startswith(b"CSV contains more than")
def test_table_csv_stream(app_client):
# Without _stream should return header + 100 rows:
response = app_client.get(