mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Initial proof-of-concept .csv export, refs #266
This commit is contained in:
parent
8003a13331
commit
654fde5792
3 changed files with 57 additions and 18 deletions
|
|
@ -53,9 +53,9 @@ class JsonDataView(RenderMixin):
|
|||
self.filename = filename
|
||||
self.data_callback = data_callback
|
||||
|
||||
async def get(self, request, as_json):
|
||||
async def get(self, request, as_ext):
|
||||
data = self.data_callback()
|
||||
if as_json:
|
||||
if as_ext:
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
headers["Access-Control-Allow-Origin"] = "*"
|
||||
|
|
@ -406,7 +406,7 @@ class Datasette:
|
|||
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
|
||||
self.jinja_env.filters["to_css_class"] = to_css_class
|
||||
pm.hook.prepare_jinja2_environment(env=self.jinja_env)
|
||||
app.add_route(IndexView.as_view(self), "/<as_json:(\.jsono?)?$>")
|
||||
app.add_route(IndexView.as_view(self), "/<as_ext:(\.jsono?|\.csv)?$>")
|
||||
# TODO: /favicon.ico and /-/static/ deserve far-future cache expires
|
||||
app.add_route(favicon, "/favicon.ico")
|
||||
app.static("/-/static/", str(app_root / "datasette" / "static"))
|
||||
|
|
@ -419,33 +419,33 @@ class Datasette:
|
|||
app.static(modpath, plugin["static_path"])
|
||||
app.add_route(
|
||||
JsonDataView.as_view(self, "inspect.json", self.inspect),
|
||||
"/-/inspect<as_json:(\.json)?$>",
|
||||
"/-/inspect<as_ext:(\.json)?$>",
|
||||
)
|
||||
app.add_route(
|
||||
JsonDataView.as_view(self, "metadata.json", lambda: self.metadata),
|
||||
"/-/metadata<as_json:(\.json)?$>",
|
||||
"/-/metadata<as_ext:(\.json)?$>",
|
||||
)
|
||||
app.add_route(
|
||||
JsonDataView.as_view(self, "versions.json", self.versions),
|
||||
"/-/versions<as_json:(\.json)?$>",
|
||||
"/-/versions<as_ext:(\.json)?$>",
|
||||
)
|
||||
app.add_route(
|
||||
JsonDataView.as_view(self, "plugins.json", self.plugins),
|
||||
"/-/plugins<as_json:(\.json)?$>",
|
||||
"/-/plugins<as_ext:(\.json)?$>",
|
||||
)
|
||||
app.add_route(
|
||||
DatabaseView.as_view(self), "/<db_name:[^/\.]+?><as_json:(\.jsono?)?$>"
|
||||
DatabaseView.as_view(self), "/<db_name:[^/\.]+?><as_ext:(\.jsono?|\.csv)?$>"
|
||||
)
|
||||
app.add_route(
|
||||
DatabaseDownload.as_view(self), "/<db_name:[^/]+?><as_db:(\.db)$>"
|
||||
)
|
||||
app.add_route(
|
||||
TableView.as_view(self),
|
||||
"/<db_name:[^/]+>/<table:[^/]+?><as_json:(\.jsono?)?$>",
|
||||
"/<db_name:[^/]+>/<table:[^/]+?><as_ext:(\.jsono?|\.csv)?$>",
|
||||
)
|
||||
app.add_route(
|
||||
RowView.as_view(self),
|
||||
"/<db_name:[^/]+>/<table:[^/]+?>/<pk_path:[^/]+?><as_json:(\.jsono?)?$>",
|
||||
"/<db_name:[^/]+>/<table:[^/]+?>/<pk_path:[^/]+?><as_ext:(\.jsono?|\.csv)?$>",
|
||||
)
|
||||
|
||||
self.register_custom_units()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import asyncio
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
import sqlite3
|
||||
|
|
@ -120,8 +121,8 @@ class BaseView(RenderMixin):
|
|||
should_redirect += "/" + kwargs["table"]
|
||||
if "pk_path" in kwargs:
|
||||
should_redirect += "/" + kwargs["pk_path"]
|
||||
if "as_json" in kwargs:
|
||||
should_redirect += kwargs["as_json"]
|
||||
if "as_ext" in kwargs:
|
||||
should_redirect += kwargs["as_ext"]
|
||||
if "as_db" in kwargs:
|
||||
should_redirect += kwargs["as_db"]
|
||||
return name, expected, should_redirect
|
||||
|
|
@ -198,11 +199,49 @@ class BaseView(RenderMixin):
|
|||
|
||||
return await self.view_get(request, name, hash, **kwargs)
|
||||
|
||||
async def as_csv(self, request, name, hash, **kwargs):
|
||||
try:
|
||||
response_or_template_contexts = await self.data(
|
||||
request, name, hash, **kwargs
|
||||
)
|
||||
if isinstance(response_or_template_contexts, response.HTTPResponse):
|
||||
return response_or_template_contexts
|
||||
|
||||
else:
|
||||
data, extra_template_data, templates = response_or_template_contexts
|
||||
except (sqlite3.OperationalError, InvalidSql) as e:
|
||||
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
||||
|
||||
except (sqlite3.OperationalError) as e:
|
||||
raise DatasetteError(str(e))
|
||||
|
||||
except DatasetteError:
|
||||
raise
|
||||
# Convert rows and columns to CSV
|
||||
async def stream_fn(r):
|
||||
writer = csv.writer(r)
|
||||
writer.writerow(data["columns"])
|
||||
for row in data["rows"]:
|
||||
writer.writerow(row)
|
||||
|
||||
return response.stream(
|
||||
stream_fn,
|
||||
headers={
|
||||
"Content-Disposition": 'attachment; filename="{}.csv"'.format(
|
||||
name
|
||||
)
|
||||
},
|
||||
content_type="text/csv; charset=utf-8"
|
||||
)
|
||||
|
||||
async def view_get(self, request, name, hash, **kwargs):
|
||||
try:
|
||||
as_json = kwargs.pop("as_json")
|
||||
as_ext = kwargs.pop("as_ext")
|
||||
except KeyError:
|
||||
as_json = False
|
||||
as_ext = False
|
||||
if as_ext == ".csv":
|
||||
return await self.as_csv(request, name, hash, **kwargs)
|
||||
|
||||
extra_template_data = {}
|
||||
start = time.time()
|
||||
status_code = 200
|
||||
|
|
@ -231,9 +270,9 @@ class BaseView(RenderMixin):
|
|||
value = self.ds.metadata.get(key)
|
||||
if value:
|
||||
data[key] = value
|
||||
if as_json:
|
||||
if as_ext:
|
||||
# Special case for .jsono extension - redirect to _shape=objects
|
||||
if as_json == ".jsono":
|
||||
if as_ext == ".jsono":
|
||||
return self.redirect(
|
||||
request,
|
||||
path_with_added_args(
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class IndexView(RenderMixin):
|
|||
self.jinja_env = datasette.jinja_env
|
||||
self.executor = datasette.executor
|
||||
|
||||
async def get(self, request, as_json):
|
||||
async def get(self, request, as_ext):
|
||||
databases = []
|
||||
for key, info in sorted(self.ds.inspect().items()):
|
||||
tables = [t for t in info["tables"].values() if not t["hidden"]]
|
||||
|
|
@ -38,7 +38,7 @@ class IndexView(RenderMixin):
|
|||
"views_count": len(info["views"]),
|
||||
}
|
||||
databases.append(database)
|
||||
if as_json:
|
||||
if as_ext:
|
||||
headers = {}
|
||||
if self.ds.cors:
|
||||
headers["Access-Control-Allow-Origin"] = "*"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue