Compare commits

...

6 commits

Author SHA1 Message Date
Simon Willison
be018ce47f
Trying to get pytest-dist working on multiple cores 2018-06-23 17:55:00 -07:00
Simon Willison
7d7f5f61fd
Use extras_require so pip can install test dependencies
https://github.com/pypa/pip/issues/1197#issuecomment-228939212
2018-06-21 20:03:54 -07:00
Simon Willison
69b3a6c7dd
Run pytest manually
python setup.py test appeared to still download a bunch of stuff
https://travis-ci.org/simonw/datasette/jobs/395306188
2018-06-21 19:59:26 -07:00
Simon Willison
22831a1ac8
Don't use --download-cache argument 2018-06-21 19:53:22 -07:00
Simon Willison
b9046332c4
Tell pip to use explicit download cache 2018-06-21 19:51:10 -07:00
Simon Willison
749b6f0d74
Cache pip wheels between runs in Travis, refs #323 2018-06-21 16:57:49 -07:00
6 changed files with 58 additions and 29 deletions

View file

@ -7,7 +7,13 @@ python:
# Executed for 3.5 AND 3.5 as the first "test" stage: # Executed for 3.5 AND 3.5 as the first "test" stage:
script: script:
- python setup.py test - pip install -U pip wheel
- pip install .[test]
- pytest
cache:
directories:
- $HOME/.cache/pip
# This defines further stages that execute after the tests # This defines further stages that execute after the tests
jobs: jobs:

View file

@ -47,10 +47,15 @@ setup(
datasette=datasette.cli:cli datasette=datasette.cli:cli
''', ''',
setup_requires=['pytest-runner'], setup_requires=['pytest-runner'],
extras_require={
'test': [
'pytest==3.6.0',
'aiohttp==2.3.2',
'beautifulsoup4==4.6.0',
]
},
tests_require=[ tests_require=[
'pytest==3.6.0', 'datasette[test]',
'aiohttp==2.3.2',
'beautifulsoup4==4.6.0',
], ],
classifiers=[ classifiers=[
'Development Status :: 4 - Beta', 'Development Status :: 4 - Beta',

View file

@ -1,9 +1,11 @@
from datasette.app import Datasette from datasette.app import Datasette
from sanic.testing import PORT as PORT_BASE, SanicTestClient
import itertools import itertools
import json import json
import os import os
import pytest import pytest
import random import random
import re
import sqlite3 import sqlite3
import sys import sys
import string import string
@ -11,7 +13,7 @@ import tempfile
import time import time
class TestClient: class MyTestClient:
def __init__(self, sanic_test_client): def __init__(self, sanic_test_client):
self.sanic_test_client = sanic_test_client self.sanic_test_client = sanic_test_client
@ -24,7 +26,7 @@ class TestClient:
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def app_client(sql_time_limit_ms=None, max_returned_rows=None, config=None, filename="fixtures.db"): def app_client(worker_id, sql_time_limit_ms=None, max_returned_rows=None, config=None, filename="fixtures.db"):
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:
filepath = os.path.join(tmpdir, filename) filepath = os.path.join(tmpdir, filename)
conn = sqlite3.connect(filepath) conn = sqlite3.connect(filepath)
@ -49,38 +51,51 @@ def app_client(sql_time_limit_ms=None, max_returned_rows=None, config=None, file
ds.sqlite_functions.append( ds.sqlite_functions.append(
('sleep', 1, lambda n: time.sleep(float(n))), ('sleep', 1, lambda n: time.sleep(float(n))),
) )
client = TestClient(ds.app().test_client) m = re.search(r'[0-9]+', worker_id)
if m:
num_id = m.group(0)
else:
num_id = 0
port = PORT_BASE + int(num_id)
client = MyTestClient(SanicTestClient(ds.app(), port))
client.ds = ds client.ds = ds
yield client yield client
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def app_client_shorter_time_limit(): def app_client_factory(worker_id):
yield from app_client(20) def factory(**kwargs):
yield from app_client(worker_id, **kwargs)
return factory
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def app_client_returned_rows_matches_page_size(): def app_client_shorter_time_limit(worker_id):
yield from app_client(max_returned_rows=50) yield from app_client(worker_id, 20)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def app_client_larger_cache_size(): def app_client_returned_rows_matches_page_size(worker_id):
yield from app_client(config={ yield from app_client(worker_id, max_returned_rows=50)
@pytest.fixture(scope='session')
def app_client_larger_cache_size(worker_id):
yield from app_client(worker_id, config={
'cache_size_kb': 2500, 'cache_size_kb': 2500,
}) })
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def app_client_csv_max_mb_one(): def app_client_csv_max_mb_one(worker_id):
yield from app_client(config={ yield from app_client(worker_id, config={
'max_csv_mb': 1, 'max_csv_mb': 1,
}) })
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def app_client_with_dot(): def app_client_with_dot(worker_id):
yield from app_client(filename="fixtures.dot.db") yield from app_client(worker_id, filename="fixtures.dot.db")
def generate_compound_rows(num): def generate_compound_rows(num):

View file

@ -1,5 +1,6 @@
from .fixtures import ( # noqa from .fixtures import ( # noqa
app_client, app_client,
app_client_factory,
app_client_shorter_time_limit, app_client_shorter_time_limit,
app_client_larger_cache_size, app_client_larger_cache_size,
app_client_returned_rows_matches_page_size, app_client_returned_rows_matches_page_size,
@ -424,8 +425,8 @@ def test_invalid_custom_sql(app_client):
assert 'Statement must be a SELECT' == response.json['error'] assert 'Statement must be a SELECT' == response.json['error']
def test_allow_sql_off(): def test_allow_sql_off(worker_id):
for client in app_client(config={ for client in app_client(worker_id, config={
'allow_sql': False, 'allow_sql': False,
}): }):
assert 400 == client.get( assert 400 == client.get(
@ -971,11 +972,12 @@ def test_config_json(app_client):
} == response.json } == response.json
def test_page_size_matching_max_returned_rows(app_client_returned_rows_matches_page_size): def test_page_size_matching_max_returned_rows(app_client_factory):
app_client = app_client_factory(max_returned_rows=50)
fetched = [] fetched = []
path = '/fixtures/no_primary_key.json' path = '/fixtures/no_primary_key.json'
while path: while path:
response = app_client_returned_rows_matches_page_size.get(path) response = app_client.get(path)
fetched.extend(response.json['rows']) fetched.extend(response.json['rows'])
assert len(response.json['rows']) in (1, 50) assert len(response.json['rows']) in (1, 50)
path = response.json['next_url'] path = response.json['next_url']
@ -1140,8 +1142,8 @@ def test_suggested_facets(app_client):
).json["suggested_facets"]) > 0 ).json["suggested_facets"]) > 0
def test_allow_facet_off(): def test_allow_facet_off(worker_id):
for client in app_client(config={ for client in app_client(worker_id, config={
'allow_facet': False, 'allow_facet': False,
}): }):
assert 400 == client.get( assert 400 == client.get(
@ -1153,8 +1155,8 @@ def test_allow_facet_off():
).json["suggested_facets"] ).json["suggested_facets"]
def test_suggest_facets_off(): def test_suggest_facets_off(worker_id):
for client in app_client(config={ for client in app_client(worker_id, config={
'suggest_facets': False, 'suggest_facets': False,
}): }):
# Now suggested_facets should be [] # Now suggested_facets should be []

View file

@ -30,6 +30,7 @@ pk,planet_int,on_earth,state,city_id,city_id_label,neighborhood
15,2,0,MC,4,Memnonia,Arcadia Planitia 15,2,0,MC,4,Memnonia,Arcadia Planitia
'''.lstrip().replace('\n', '\r\n') '''.lstrip().replace('\n', '\r\n')
def test_table_csv(app_client): def test_table_csv(app_client):
response = app_client.get('/fixtures/simple_primary_key.csv') response = app_client.get('/fixtures/simple_primary_key.csv')
assert response.status == 200 assert response.status == 200

View file

@ -639,8 +639,8 @@ def test_allow_download_on(app_client):
assert len(soup.findAll('a', {'href': re.compile('\.db$')})) assert len(soup.findAll('a', {'href': re.compile('\.db$')}))
def test_allow_download_off(): def test_allow_download_off(worker_id):
for client in app_client(config={ for client in app_client(worker_id, config={
'allow_download': False, 'allow_download': False,
}): }):
response = client.get( response = client.get(
@ -665,8 +665,8 @@ def test_allow_sql_on(app_client):
assert len(soup.findAll('textarea', {'name': 'sql'})) assert len(soup.findAll('textarea', {'name': 'sql'}))
def test_allow_sql_off(): def test_allow_sql_off(worker_id):
for client in app_client(config={ for client in app_client(worker_id, config={
'allow_sql': False, 'allow_sql': False,
}): }):
response = client.get( response = client.get(