mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Use f-strings in place of .format()
Code transformed like so:
pip install flynt
flynt .
black .
This commit is contained in:
parent
6fd35be64d
commit
30e64c8d3b
35 changed files with 213 additions and 277 deletions
|
|
@ -231,7 +231,7 @@ class Datasette:
|
|||
is_mutable = path not in self.immutables
|
||||
db = Database(self, path, is_mutable=is_mutable, is_memory=is_memory)
|
||||
if db.name in self.databases:
|
||||
raise Exception("Multiple files with same stem: {}".format(db.name))
|
||||
raise Exception(f"Multiple files with same stem: {db.name}")
|
||||
self.add_database(db.name, db)
|
||||
self.cache_headers = cache_headers
|
||||
self.cors = cors
|
||||
|
|
@ -455,9 +455,9 @@ class Datasette:
|
|||
if self.sqlite_extensions:
|
||||
conn.enable_load_extension(True)
|
||||
for extension in self.sqlite_extensions:
|
||||
conn.execute("SELECT load_extension('{}')".format(extension))
|
||||
conn.execute(f"SELECT load_extension('{extension}')")
|
||||
if self.config("cache_size_kb"):
|
||||
conn.execute("PRAGMA cache_size=-{}".format(self.config("cache_size_kb")))
|
||||
conn.execute(f"PRAGMA cache_size=-{self.config('cache_size_kb')}")
|
||||
# pylint: disable=no-member
|
||||
pm.hook.prepare_connection(conn=conn, database=database, datasette=self)
|
||||
|
||||
|
|
@ -860,7 +860,7 @@ class Datasette:
|
|||
if plugin["static_path"]:
|
||||
add_route(
|
||||
asgi_static(plugin["static_path"]),
|
||||
"/-/static-plugins/{}/(?P<path>.*)$".format(plugin["name"]),
|
||||
f"/-/static-plugins/{plugin['name']}/(?P<path>.*)$",
|
||||
)
|
||||
# Support underscores in name in addition to hyphens, see https://github.com/simonw/datasette/issues/611
|
||||
add_route(
|
||||
|
|
@ -1156,7 +1156,7 @@ class DatasetteRouter:
|
|||
info = {}
|
||||
message = str(exception)
|
||||
traceback.print_exc()
|
||||
templates = ["{}.html".format(status), "error.html"]
|
||||
templates = [f"{status}.html", "error.html"]
|
||||
info.update(
|
||||
{
|
||||
"ok": False,
|
||||
|
|
@ -1234,7 +1234,7 @@ def route_pattern_from_filepath(filepath):
|
|||
re_bits = ["/"]
|
||||
for bit in _curly_re.split(filepath):
|
||||
if _curly_re.match(bit):
|
||||
re_bits.append("(?P<{}>[^/]*)".format(bit[1:-1]))
|
||||
re_bits.append(f"(?P<{bit[1:-1]}>[^/]*)")
|
||||
else:
|
||||
re_bits.append(re.escape(bit))
|
||||
return re.compile("^" + "".join(re_bits) + "$")
|
||||
|
|
@ -1253,7 +1253,7 @@ class DatasetteClient:
|
|||
if not isinstance(path, PrefixedUrlString):
|
||||
path = self.ds.urls.path(path)
|
||||
if path.startswith("/"):
|
||||
path = "http://localhost{}".format(path)
|
||||
path = f"http://localhost{path}"
|
||||
return path
|
||||
|
||||
async def get(self, path, **kwargs):
|
||||
|
|
|
|||
|
|
@ -9,10 +9,10 @@ _BLOB_HASH = "_blob_hash"
|
|||
|
||||
async def render_blob(datasette, database, rows, columns, request, table, view_name):
|
||||
if _BLOB_COLUMN not in request.args:
|
||||
raise BadRequest("?{}= is required".format(_BLOB_COLUMN))
|
||||
raise BadRequest(f"?{_BLOB_COLUMN}= is required")
|
||||
blob_column = request.args[_BLOB_COLUMN]
|
||||
if blob_column not in columns:
|
||||
raise BadRequest("{} is not a valid column".format(blob_column))
|
||||
raise BadRequest(f"{blob_column} is not a valid column")
|
||||
|
||||
# If ?_blob_hash= provided, use that to select the row - otherwise use first row
|
||||
blob_hash = None
|
||||
|
|
@ -42,7 +42,7 @@ async def render_blob(datasette, database, rows, columns, request, table, view_n
|
|||
filename = "-".join(filename_bits) + ".blob"
|
||||
headers = {
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
"Content-Disposition": 'attachment; filename="{}"'.format(filename),
|
||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
||||
}
|
||||
return Response(
|
||||
body=value or b"",
|
||||
|
|
|
|||
|
|
@ -33,12 +33,12 @@ class Config(click.ParamType):
|
|||
|
||||
def convert(self, config, param, ctx):
|
||||
if ":" not in config:
|
||||
self.fail('"{}" should be name:value'.format(config), param, ctx)
|
||||
self.fail(f'"{config}" should be name:value', param, ctx)
|
||||
return
|
||||
name, value = config.split(":", 1)
|
||||
if name not in DEFAULT_CONFIG:
|
||||
self.fail(
|
||||
"{} is not a valid option (--help-config to see all)".format(name),
|
||||
f"{name} is not a valid option (--help-config to see all)",
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
|
|
@ -49,13 +49,11 @@ class Config(click.ParamType):
|
|||
try:
|
||||
return name, value_as_boolean(value)
|
||||
except ValueAsBooleanError:
|
||||
self.fail(
|
||||
'"{}" should be on/off/true/false/1/0'.format(name), param, ctx
|
||||
)
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
return
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail('"{}" should be an integer'.format(name), param, ctx)
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return
|
||||
return name, int(value)
|
||||
elif isinstance(default, str):
|
||||
|
|
@ -203,7 +201,7 @@ def package(
|
|||
version_note,
|
||||
secret,
|
||||
port,
|
||||
**extra_metadata
|
||||
**extra_metadata,
|
||||
):
|
||||
"Package specified SQLite files into a new datasette Docker container"
|
||||
if not shutil.which("docker"):
|
||||
|
|
@ -389,7 +387,7 @@ def serve(
|
|||
with formatter.section("Config options"):
|
||||
formatter.write_dl(
|
||||
[
|
||||
(option.name, "{} (default={})".format(option.help, option.default))
|
||||
(option.name, f"{option.help} (default={option.default})")
|
||||
for option in CONFIG_OPTIONS
|
||||
]
|
||||
)
|
||||
|
|
@ -470,7 +468,7 @@ def serve(
|
|||
path = asyncio.get_event_loop().run_until_complete(
|
||||
initial_path_for_datasette(ds)
|
||||
)
|
||||
url = "http://{}:{}{}".format(host, port, path)
|
||||
url = f"http://{host}:{port}{path}"
|
||||
webbrowser.open(url)
|
||||
uvicorn.run(
|
||||
ds.app(), host=host, port=port, log_level="info", lifespan="on", workers=1
|
||||
|
|
@ -491,7 +489,5 @@ async def check_databases(ds):
|
|||
)
|
||||
except ConnectionProblem as e:
|
||||
raise click.UsageError(
|
||||
"Connection to {} failed check: {}".format(
|
||||
database.path, str(e.args[0])
|
||||
)
|
||||
f"Connection to {database.path} failed check: {str(e.args[0])}"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ class Database:
|
|||
if write:
|
||||
qs = ""
|
||||
return sqlite3.connect(
|
||||
"file:{}{}".format(self.path, qs), uri=True, check_same_thread=False
|
||||
f"file:{self.path}{qs}", uri=True, check_same_thread=False
|
||||
)
|
||||
|
||||
async def execute_write(self, sql, params=None, block=False):
|
||||
|
|
@ -191,7 +191,7 @@ class Database:
|
|||
try:
|
||||
table_count = (
|
||||
await self.execute(
|
||||
"select count(*) from [{}]".format(table),
|
||||
f"select count(*) from [{table}]",
|
||||
custom_time_limit=limit,
|
||||
)
|
||||
).rows[0][0]
|
||||
|
|
@ -362,13 +362,13 @@ class Database:
|
|||
if self.is_memory:
|
||||
tags.append("memory")
|
||||
if self.hash:
|
||||
tags.append("hash={}".format(self.hash))
|
||||
tags.append(f"hash={self.hash}")
|
||||
if self.size is not None:
|
||||
tags.append("size={}".format(self.size))
|
||||
tags.append(f"size={self.size}")
|
||||
tags_str = ""
|
||||
if tags:
|
||||
tags_str = " ({})".format(", ".join(tags))
|
||||
return "<Database: {}{}>".format(self.name, tags_str)
|
||||
tags_str = f" ({', '.join(tags)})"
|
||||
return f"<Database: {self.name}{tags_str}>"
|
||||
|
||||
|
||||
class WriteTask:
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ class Facet:
|
|||
self.database = database
|
||||
# For foreign key expansion. Can be None for e.g. canned SQL queries:
|
||||
self.table = table
|
||||
self.sql = sql or "select * from [{}]".format(table)
|
||||
self.sql = sql or f"select * from [{table}]"
|
||||
self.params = params or []
|
||||
self.metadata = metadata
|
||||
# row_count can be None, in which case we calculate it ourselves:
|
||||
|
|
@ -114,7 +114,7 @@ class Facet:
|
|||
# Detect column names using the "limit 0" trick
|
||||
return (
|
||||
await self.ds.execute(
|
||||
self.database, "select * from ({}) limit 0".format(sql), params or []
|
||||
self.database, f"select * from ({sql}) limit 0", params or []
|
||||
)
|
||||
).columns
|
||||
|
||||
|
|
@ -123,7 +123,7 @@ class Facet:
|
|||
self.row_count = (
|
||||
await self.ds.execute(
|
||||
self.database,
|
||||
"select count(*) from ({})".format(self.sql),
|
||||
f"select count(*) from ({self.sql})",
|
||||
self.params,
|
||||
)
|
||||
).rows[0][0]
|
||||
|
|
@ -371,14 +371,14 @@ class ArrayFacet(Facet):
|
|||
pairs = self.get_querystring_pairs()
|
||||
for row in facet_rows:
|
||||
value = str(row["value"])
|
||||
selected = ("{}__arraycontains".format(column), value) in pairs
|
||||
selected = (f"{column}__arraycontains", value) in pairs
|
||||
if selected:
|
||||
toggle_path = path_with_removed_args(
|
||||
self.request, {"{}__arraycontains".format(column): value}
|
||||
self.request, {f"{column}__arraycontains": value}
|
||||
)
|
||||
else:
|
||||
toggle_path = path_with_added_args(
|
||||
self.request, {"{}__arraycontains".format(column): value}
|
||||
self.request, {f"{column}__arraycontains": value}
|
||||
)
|
||||
facet_results_values.append(
|
||||
{
|
||||
|
|
@ -482,16 +482,14 @@ class DateFacet(Facet):
|
|||
}
|
||||
facet_rows = facet_rows_results.rows[:facet_size]
|
||||
for row in facet_rows:
|
||||
selected = str(args.get("{}__date".format(column))) == str(
|
||||
row["value"]
|
||||
)
|
||||
selected = str(args.get(f"{column}__date")) == str(row["value"])
|
||||
if selected:
|
||||
toggle_path = path_with_removed_args(
|
||||
self.request, {"{}__date".format(column): str(row["value"])}
|
||||
self.request, {f"{column}__date": str(row["value"])}
|
||||
)
|
||||
else:
|
||||
toggle_path = path_with_added_args(
|
||||
self.request, {"{}__date".format(column): row["value"]}
|
||||
self.request, {f"{column}__date": row["value"]}
|
||||
)
|
||||
facet_results_values.append(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class TemplatedFilter(Filter):
|
|||
kwargs = {"c": column}
|
||||
converted = None
|
||||
else:
|
||||
kwargs = {"c": column, "p": "p{}".format(param_counter), "t": table}
|
||||
kwargs = {"c": column, "p": f"p{param_counter}", "t": table}
|
||||
return self.sql_template.format(**kwargs), converted
|
||||
|
||||
def human_clause(self, column, value):
|
||||
|
|
@ -69,12 +69,12 @@ class InFilter(Filter):
|
|||
|
||||
def where_clause(self, table, column, value, param_counter):
|
||||
values = self.split_value(value)
|
||||
params = [":p{}".format(param_counter + i) for i in range(len(values))]
|
||||
sql = "{} in ({})".format(escape_sqlite(column), ", ".join(params))
|
||||
params = [f":p{param_counter + i}" for i in range(len(values))]
|
||||
sql = f"{escape_sqlite(column)} in ({', '.join(params)})"
|
||||
return sql, values
|
||||
|
||||
def human_clause(self, column, value):
|
||||
return "{} in {}".format(column, json.dumps(self.split_value(value)))
|
||||
return f"{column} in {json.dumps(self.split_value(value))}"
|
||||
|
||||
|
||||
class NotInFilter(InFilter):
|
||||
|
|
@ -83,12 +83,12 @@ class NotInFilter(InFilter):
|
|||
|
||||
def where_clause(self, table, column, value, param_counter):
|
||||
values = self.split_value(value)
|
||||
params = [":p{}".format(param_counter + i) for i in range(len(values))]
|
||||
sql = "{} not in ({})".format(escape_sqlite(column), ", ".join(params))
|
||||
params = [f":p{param_counter + i}" for i in range(len(values))]
|
||||
sql = f"{escape_sqlite(column)} not in ({', '.join(params)})"
|
||||
return sql, values
|
||||
|
||||
def human_clause(self, column, value):
|
||||
return "{} not in {}".format(column, json.dumps(self.split_value(value)))
|
||||
return f"{column} not in {json.dumps(self.split_value(value))}"
|
||||
|
||||
|
||||
class Filters:
|
||||
|
|
@ -221,7 +221,7 @@ class Filters:
|
|||
s = " and ".join(and_bits)
|
||||
if not s:
|
||||
return ""
|
||||
return "where {}".format(s)
|
||||
return f"where {s}"
|
||||
|
||||
def selections(self):
|
||||
"Yields (column, lookup, value) tuples"
|
||||
|
|
@ -265,7 +265,7 @@ class Filters:
|
|||
if not isinstance(param, list):
|
||||
param = [param]
|
||||
for individual_param in param:
|
||||
param_id = "p{}".format(i)
|
||||
param_id = f"p{i}"
|
||||
params[param_id] = individual_param
|
||||
i += 1
|
||||
return sql_bits, params
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ def inspect_tables(conn, database_metadata):
|
|||
|
||||
try:
|
||||
count = conn.execute(
|
||||
"select count(*) from {}".format(escape_sqlite(table))
|
||||
f"select count(*) from {escape_sqlite(table)}"
|
||||
).fetchone()[0]
|
||||
except sqlite3.OperationalError:
|
||||
# This can happen when running against a FTS virtual table
|
||||
|
|
|
|||
|
|
@ -100,9 +100,7 @@ def publish_subcommand(publish):
|
|||
extra_metadata["plugins"] = {}
|
||||
for plugin_name, plugin_setting, setting_value in plugin_secret:
|
||||
environment_variable = (
|
||||
"{}_{}".format(plugin_name, plugin_setting)
|
||||
.upper()
|
||||
.replace("-", "_")
|
||||
f"{plugin_name}_{plugin_setting}".upper().replace("-", "_")
|
||||
)
|
||||
environment_variables[environment_variable] = setting_value
|
||||
extra_metadata["plugins"].setdefault(plugin_name, {})[
|
||||
|
|
@ -133,8 +131,8 @@ def publish_subcommand(publish):
|
|||
print(open("Dockerfile").read())
|
||||
print("\n====================\n")
|
||||
|
||||
image_id = "gcr.io/{project}/{name}".format(project=project, name=name)
|
||||
check_call("gcloud builds submit --tag {}".format(image_id), shell=True)
|
||||
image_id = f"gcr.io/{project}/{name}"
|
||||
check_call(f"gcloud builds submit --tag {image_id}", shell=True)
|
||||
check_call(
|
||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} {}{}".format(
|
||||
image_id, service, " --memory {}".format(memory) if memory else ""
|
||||
|
|
|
|||
|
|
@ -85,9 +85,7 @@ def fail_if_publish_binary_not_installed(binary, publish_target, install_link):
|
|||
err=True,
|
||||
)
|
||||
click.echo(
|
||||
"Follow the instructions at {install_link}".format(
|
||||
install_link=install_link
|
||||
),
|
||||
f"Follow the instructions at {install_link}",
|
||||
err=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
|
|
|||
|
|
@ -83,9 +83,7 @@ def publish_subcommand(publish):
|
|||
extra_metadata["plugins"] = {}
|
||||
for plugin_name, plugin_setting, setting_value in plugin_secret:
|
||||
environment_variable = (
|
||||
"{}_{}".format(plugin_name, plugin_setting)
|
||||
.upper()
|
||||
.replace("-", "_")
|
||||
f"{plugin_name}_{plugin_setting}".upper().replace("-", "_")
|
||||
)
|
||||
environment_variables[environment_variable] = setting_value
|
||||
extra_metadata["plugins"].setdefault(plugin_name, {})[
|
||||
|
|
@ -129,9 +127,7 @@ def publish_subcommand(publish):
|
|||
app_name = json.loads(create_output)["name"]
|
||||
|
||||
for key, value in environment_variables.items():
|
||||
call(
|
||||
["heroku", "config:set", "-a", app_name, "{}={}".format(key, value)]
|
||||
)
|
||||
call(["heroku", "config:set", "-a", app_name, f"{key}={value}"])
|
||||
tar_option = []
|
||||
if tar:
|
||||
tar_option = ["--tar", tar]
|
||||
|
|
@ -181,9 +177,7 @@ def temporary_heroku_directory(
|
|||
|
||||
if branch:
|
||||
install = [
|
||||
"https://github.com/simonw/datasette/archive/{branch}.zip".format(
|
||||
branch=branch
|
||||
)
|
||||
f"https://github.com/simonw/datasette/archive/{branch}.zip"
|
||||
] + list(install)
|
||||
else:
|
||||
install = ["datasette"] + list(install)
|
||||
|
|
@ -216,7 +210,7 @@ def temporary_heroku_directory(
|
|||
link_or_copy_directory(
|
||||
os.path.join(saved_cwd, path), os.path.join(tmp.name, mount_point)
|
||||
)
|
||||
extras.extend(["--static", "{}:{}".format(mount_point, mount_point)])
|
||||
extras.extend(["--static", f"{mount_point}:{mount_point}"])
|
||||
|
||||
quoted_files = " ".join(
|
||||
["-i {}".format(shlex.quote(file_name)) for file_name in file_names]
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ def json_renderer(args, data, view_name):
|
|||
status_code = 400
|
||||
data = {
|
||||
"ok": False,
|
||||
"error": "Invalid _shape: {}".format(shape),
|
||||
"error": f"Invalid _shape: {shape}",
|
||||
"status": 400,
|
||||
"title": None,
|
||||
}
|
||||
|
|
@ -96,7 +96,7 @@ def json_renderer(args, data, view_name):
|
|||
content_type = "application/json; charset=utf-8"
|
||||
headers = {}
|
||||
if next_url:
|
||||
headers["link"] = '<{}>; rel="next"'.format(next_url)
|
||||
headers["link"] = f'<{next_url}>; rel="next"'
|
||||
return Response(
|
||||
body, status=status_code, headers=headers, content_type=content_type
|
||||
)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ def get_task_id():
|
|||
def trace(type, **kwargs):
|
||||
assert not TRACE_RESERVED_KEYS.intersection(
|
||||
kwargs.keys()
|
||||
), ".trace() keyword parameters cannot include {}".format(TRACE_RESERVED_KEYS)
|
||||
), f".trace() keyword parameters cannot include {TRACE_RESERVED_KEYS}"
|
||||
task_id = get_task_id()
|
||||
if task_id is None:
|
||||
yield
|
||||
|
|
@ -124,7 +124,7 @@ class AsgiTracer:
|
|||
content_type = ""
|
||||
if "text/html" in content_type and b"</body>" in accumulated_body:
|
||||
extra = json.dumps(trace_info, indent=2)
|
||||
extra_html = "<pre>{}</pre></body>".format(extra).encode("utf8")
|
||||
extra_html = f"<pre>{extra}</pre></body>".encode("utf8")
|
||||
accumulated_body = accumulated_body.replace(b"</body>", extra_html)
|
||||
elif "json" in content_type and accumulated_body.startswith(b"{"):
|
||||
data = json.loads(accumulated_body.decode("utf8"))
|
||||
|
|
|
|||
|
|
@ -19,10 +19,10 @@ class Urls:
|
|||
return self.path("", format=format)
|
||||
|
||||
def static(self, path):
|
||||
return self.path("-/static/{}".format(path))
|
||||
return self.path(f"-/static/{path}")
|
||||
|
||||
def static_plugins(self, plugin, path):
|
||||
return self.path("-/static-plugins/{}/{}".format(plugin, path))
|
||||
return self.path(f"-/static-plugins/{plugin}/{path}")
|
||||
|
||||
def logout(self):
|
||||
return self.path("-/logout")
|
||||
|
|
@ -30,27 +30,25 @@ class Urls:
|
|||
def database(self, database, format=None):
|
||||
db = self.ds.databases[database]
|
||||
if self.ds.config("hash_urls") and db.hash:
|
||||
path = self.path(
|
||||
"{}-{}".format(database, db.hash[:HASH_LENGTH]), format=format
|
||||
)
|
||||
path = self.path(f"{database}-{db.hash[:HASH_LENGTH]}", format=format)
|
||||
else:
|
||||
path = self.path(database, format=format)
|
||||
return path
|
||||
|
||||
def table(self, database, table, format=None):
|
||||
path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(table))
|
||||
path = f"{self.database(database)}/{urllib.parse.quote_plus(table)}"
|
||||
if format is not None:
|
||||
path = path_with_format(path=path, format=format)
|
||||
return PrefixedUrlString(path)
|
||||
|
||||
def query(self, database, query, format=None):
|
||||
path = "{}/{}".format(self.database(database), urllib.parse.quote_plus(query))
|
||||
path = f"{self.database(database)}/{urllib.parse.quote_plus(query)}"
|
||||
if format is not None:
|
||||
path = path_with_format(path=path, format=format)
|
||||
return PrefixedUrlString(path)
|
||||
|
||||
def row(self, database, table, row_path, format=None):
|
||||
path = "{}/{}".format(self.table(database, table), row_path)
|
||||
path = f"{self.table(database, table)}/{row_path}"
|
||||
if format is not None:
|
||||
path = path_with_format(path=path, format=format)
|
||||
return PrefixedUrlString(path)
|
||||
|
|
|
|||
|
|
@ -115,13 +115,10 @@ def compound_keys_after_sql(pks, start_index=0):
|
|||
last = pks_left[-1]
|
||||
rest = pks_left[:-1]
|
||||
and_clauses = [
|
||||
"{} = :p{}".format(escape_sqlite(pk), (i + start_index))
|
||||
for i, pk in enumerate(rest)
|
||||
f"{escape_sqlite(pk)} = :p{i + start_index}" for i, pk in enumerate(rest)
|
||||
]
|
||||
and_clauses.append(
|
||||
"{} > :p{}".format(escape_sqlite(last), (len(rest) + start_index))
|
||||
)
|
||||
or_clauses.append("({})".format(" and ".join(and_clauses)))
|
||||
and_clauses.append(f"{escape_sqlite(last)} > :p{len(rest) + start_index}")
|
||||
or_clauses.append(f"({' and '.join(and_clauses)})")
|
||||
pks_left.pop()
|
||||
or_clauses.reverse()
|
||||
return "({})".format("\n or\n".join(or_clauses))
|
||||
|
|
@ -195,7 +192,7 @@ allowed_pragmas = (
|
|||
)
|
||||
disallawed_sql_res = [
|
||||
(
|
||||
re.compile("pragma(?!_({}))".format("|".join(allowed_pragmas))),
|
||||
re.compile(f"pragma(?!_({'|'.join(allowed_pragmas)}))"),
|
||||
"Statement may not contain PRAGMA",
|
||||
)
|
||||
]
|
||||
|
|
@ -215,7 +212,7 @@ def validate_sql_select(sql):
|
|||
|
||||
def append_querystring(url, querystring):
|
||||
op = "&" if ("?" in url) else "?"
|
||||
return "{}{}{}".format(url, op, querystring)
|
||||
return f"{url}{op}{querystring}"
|
||||
|
||||
|
||||
def path_with_added_args(request, args, path=None):
|
||||
|
|
@ -230,7 +227,7 @@ def path_with_added_args(request, args, path=None):
|
|||
current.extend([(key, value) for key, value in args if value is not None])
|
||||
query_string = urllib.parse.urlencode(current)
|
||||
if query_string:
|
||||
query_string = "?{}".format(query_string)
|
||||
query_string = f"?{query_string}"
|
||||
return path + query_string
|
||||
|
||||
|
||||
|
|
@ -259,7 +256,7 @@ def path_with_removed_args(request, args, path=None):
|
|||
current.append((key, value))
|
||||
query_string = urllib.parse.urlencode(current)
|
||||
if query_string:
|
||||
query_string = "?{}".format(query_string)
|
||||
query_string = f"?{query_string}"
|
||||
return path + query_string
|
||||
|
||||
|
||||
|
|
@ -275,7 +272,7 @@ def path_with_replaced_args(request, args, path=None):
|
|||
current.extend([p for p in args if p[1] is not None])
|
||||
query_string = urllib.parse.urlencode(current)
|
||||
if query_string:
|
||||
query_string = "?{}".format(query_string)
|
||||
query_string = f"?{query_string}"
|
||||
return path + query_string
|
||||
|
||||
|
||||
|
|
@ -285,7 +282,7 @@ _boring_keyword_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$")
|
|||
|
||||
def escape_css_string(s):
|
||||
return _css_re.sub(
|
||||
lambda m: "\\" + ("{:X}".format(ord(m.group())).zfill(6)),
|
||||
lambda m: "\\" + (f"{ord(m.group()):X}".zfill(6)),
|
||||
s.replace("\r\n", "\n"),
|
||||
)
|
||||
|
||||
|
|
@ -294,7 +291,7 @@ def escape_sqlite(s):
|
|||
if _boring_keyword_re.match(s) and (s.lower() not in reserved_words):
|
||||
return s
|
||||
else:
|
||||
return "[{}]".format(s)
|
||||
return f"[{s}]"
|
||||
|
||||
|
||||
def make_dockerfile(
|
||||
|
|
@ -319,27 +316,27 @@ def make_dockerfile(
|
|||
cmd.extend(["-i", filename])
|
||||
cmd.extend(["--cors", "--inspect-file", "inspect-data.json"])
|
||||
if metadata_file:
|
||||
cmd.extend(["--metadata", "{}".format(metadata_file)])
|
||||
cmd.extend(["--metadata", f"{metadata_file}"])
|
||||
if template_dir:
|
||||
cmd.extend(["--template-dir", "templates/"])
|
||||
if plugins_dir:
|
||||
cmd.extend(["--plugins-dir", "plugins/"])
|
||||
if version_note:
|
||||
cmd.extend(["--version-note", "{}".format(version_note)])
|
||||
cmd.extend(["--version-note", f"{version_note}"])
|
||||
if static:
|
||||
for mount_point, _ in static:
|
||||
cmd.extend(["--static", "{}:{}".format(mount_point, mount_point)])
|
||||
cmd.extend(["--static", f"{mount_point}:{mount_point}"])
|
||||
if extra_options:
|
||||
for opt in extra_options.split():
|
||||
cmd.append("{}".format(opt))
|
||||
cmd.append(f"{opt}")
|
||||
cmd = [shlex.quote(part) for part in cmd]
|
||||
# port attribute is a (fixed) env variable and should not be quoted
|
||||
cmd.extend(["--port", "$PORT"])
|
||||
cmd = " ".join(cmd)
|
||||
if branch:
|
||||
install = [
|
||||
"https://github.com/simonw/datasette/archive/{}.zip".format(branch)
|
||||
] + list(install)
|
||||
install = [f"https://github.com/simonw/datasette/archive/{branch}.zip"] + list(
|
||||
install
|
||||
)
|
||||
else:
|
||||
install = ["datasette"] + list(install)
|
||||
|
||||
|
|
@ -449,7 +446,7 @@ def detect_primary_keys(conn, table):
|
|||
" Figure out primary keys for a table. "
|
||||
table_info_rows = [
|
||||
row
|
||||
for row in conn.execute('PRAGMA table_info("{}")'.format(table)).fetchall()
|
||||
for row in conn.execute(f'PRAGMA table_info("{table}")').fetchall()
|
||||
if row[-1]
|
||||
]
|
||||
table_info_rows.sort(key=lambda row: row[-1])
|
||||
|
|
@ -457,7 +454,7 @@ def detect_primary_keys(conn, table):
|
|||
|
||||
|
||||
def get_outbound_foreign_keys(conn, table):
|
||||
infos = conn.execute("PRAGMA foreign_key_list([{}])".format(table)).fetchall()
|
||||
infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall()
|
||||
fks = []
|
||||
for info in infos:
|
||||
if info is not None:
|
||||
|
|
@ -476,7 +473,7 @@ def get_all_foreign_keys(conn):
|
|||
for table in tables:
|
||||
table_to_foreign_keys[table] = {"incoming": [], "outgoing": []}
|
||||
for table in tables:
|
||||
infos = conn.execute("PRAGMA foreign_key_list([{}])".format(table)).fetchall()
|
||||
infos = conn.execute(f"PRAGMA foreign_key_list([{table}])").fetchall()
|
||||
for info in infos:
|
||||
if info is not None:
|
||||
id, seq, table_name, from_, to_, on_update, on_delete, match = info
|
||||
|
|
@ -544,9 +541,7 @@ def table_columns(conn, table):
|
|||
def table_column_details(conn, table):
|
||||
return [
|
||||
Column(*r)
|
||||
for r in conn.execute(
|
||||
"PRAGMA table_info({});".format(escape_sqlite(table))
|
||||
).fetchall()
|
||||
for r in conn.execute(f"PRAGMA table_info({escape_sqlite(table)});").fetchall()
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -562,9 +557,7 @@ def filters_should_redirect(special_args):
|
|||
if "__" in filter_op:
|
||||
filter_op, filter_value = filter_op.split("__", 1)
|
||||
if filter_column:
|
||||
redirect_params.append(
|
||||
("{}__{}".format(filter_column, filter_op), filter_value)
|
||||
)
|
||||
redirect_params.append((f"{filter_column}__{filter_op}", filter_value))
|
||||
for key in ("_filter_column", "_filter_op", "_filter_value"):
|
||||
if key in special_args:
|
||||
redirect_params.append((key, None))
|
||||
|
|
@ -573,17 +566,17 @@ def filters_should_redirect(special_args):
|
|||
for column_key in column_keys:
|
||||
number = column_key.split("_")[-1]
|
||||
column = special_args[column_key]
|
||||
op = special_args.get("_filter_op_{}".format(number)) or "exact"
|
||||
value = special_args.get("_filter_value_{}".format(number)) or ""
|
||||
op = special_args.get(f"_filter_op_{number}") or "exact"
|
||||
value = special_args.get(f"_filter_value_{number}") or ""
|
||||
if "__" in op:
|
||||
op, value = op.split("__", 1)
|
||||
if column:
|
||||
redirect_params.append(("{}__{}".format(column, op), value))
|
||||
redirect_params.append((f"{column}__{op}", value))
|
||||
redirect_params.extend(
|
||||
[
|
||||
("_filter_column_{}".format(number), None),
|
||||
("_filter_op_{}".format(number), None),
|
||||
("_filter_value_{}".format(number), None),
|
||||
(f"_filter_column_{number}", None),
|
||||
(f"_filter_op_{number}", None),
|
||||
(f"_filter_value_{number}", None),
|
||||
]
|
||||
)
|
||||
return redirect_params
|
||||
|
|
@ -672,7 +665,7 @@ async def resolve_table_and_format(
|
|||
# Check if table ends with a known format
|
||||
formats = list(allowed_formats) + ["csv", "jsono"]
|
||||
for _format in formats:
|
||||
if table_and_format.endswith(".{}".format(_format)):
|
||||
if table_and_format.endswith(f".{_format}"):
|
||||
table = table_and_format[: -(len(_format) + 1)]
|
||||
return table, _format
|
||||
return table_and_format, None
|
||||
|
|
@ -683,20 +676,20 @@ def path_with_format(
|
|||
):
|
||||
qs = extra_qs or {}
|
||||
path = request.path if request else path
|
||||
if replace_format and path.endswith(".{}".format(replace_format)):
|
||||
if replace_format and path.endswith(f".{replace_format}"):
|
||||
path = path[: -(1 + len(replace_format))]
|
||||
if "." in path:
|
||||
qs["_format"] = format
|
||||
else:
|
||||
path = "{}.{}".format(path, format)
|
||||
path = f"{path}.{format}"
|
||||
if qs:
|
||||
extra = urllib.parse.urlencode(sorted(qs.items()))
|
||||
if request and request.query_string:
|
||||
path = "{}?{}&{}".format(path, request.query_string, extra)
|
||||
path = f"{path}?{request.query_string}&{extra}"
|
||||
else:
|
||||
path = "{}?{}".format(path, extra)
|
||||
path = f"{path}?{extra}"
|
||||
elif request and request.query_string:
|
||||
path = "{}?{}".format(path, request.query_string)
|
||||
path = f"{path}?{request.query_string}"
|
||||
return path
|
||||
|
||||
|
||||
|
|
@ -742,9 +735,7 @@ class LimitedWriter:
|
|||
async def write(self, bytes):
|
||||
self.bytes_count += len(bytes)
|
||||
if self.limit_bytes and (self.bytes_count > self.limit_bytes):
|
||||
raise WriteLimitExceeded(
|
||||
"CSV contains more than {} bytes".format(self.limit_bytes)
|
||||
)
|
||||
raise WriteLimitExceeded(f"CSV contains more than {self.limit_bytes} bytes")
|
||||
await self.writer.write(bytes)
|
||||
|
||||
|
||||
|
|
@ -763,14 +754,14 @@ class StaticMount(click.ParamType):
|
|||
def convert(self, value, param, ctx):
|
||||
if ":" not in value:
|
||||
self.fail(
|
||||
'"{}" should be of format mountpoint:directory'.format(value),
|
||||
f'"{value}" should be of format mountpoint:directory',
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
path, dirpath = value.split(":", 1)
|
||||
dirpath = os.path.abspath(dirpath)
|
||||
if not os.path.exists(dirpath) or not os.path.isdir(dirpath):
|
||||
self.fail("%s is not a valid directory path" % value, param, ctx)
|
||||
self.fail(f"{value} is not a valid directory path", param, ctx)
|
||||
return path, dirpath
|
||||
|
||||
|
||||
|
|
@ -781,9 +772,9 @@ def format_bytes(bytes):
|
|||
break
|
||||
current = current / 1024
|
||||
if unit == "bytes":
|
||||
return "{} {}".format(int(current), unit)
|
||||
return f"{int(current)} {unit}"
|
||||
else:
|
||||
return "{:.1f} {}".format(current, unit)
|
||||
return f"{current:.1f} {unit}"
|
||||
|
||||
|
||||
_escape_fts_re = re.compile(r'\s+|(".*?")')
|
||||
|
|
@ -820,7 +811,7 @@ class MultiParams:
|
|||
self._data = new_data
|
||||
|
||||
def __repr__(self):
|
||||
return "<MultiParams: {}>".format(self._data)
|
||||
return f"<MultiParams: {self._data}>"
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._data
|
||||
|
|
@ -867,7 +858,7 @@ def check_connection(conn):
|
|||
for table in tables:
|
||||
try:
|
||||
conn.execute(
|
||||
"PRAGMA table_info({});".format(escape_sqlite(table)),
|
||||
f"PRAGMA table_info({escape_sqlite(table)});",
|
||||
)
|
||||
except sqlite3.OperationalError as e:
|
||||
if e.args[0] == "no such module: VirtualSpatialIndex":
|
||||
|
|
|
|||
|
|
@ -260,7 +260,7 @@ async def asgi_send_file(
|
|||
):
|
||||
headers = headers or {}
|
||||
if filename:
|
||||
headers["content-disposition"] = 'attachment; filename="{}"'.format(filename)
|
||||
headers["content-disposition"] = f'attachment; filename="{filename}"'
|
||||
first = True
|
||||
headers["content-length"] = str((await aiofiles.os.stat(str(filepath))).st_size)
|
||||
async with aiofiles.open(str(filepath), mode="rb") as fp:
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class TestResponse:
|
|||
return any(
|
||||
h
|
||||
for h in self.httpx_response.headers.get_list("set-cookie")
|
||||
if h.startswith('{}="";'.format(cookie))
|
||||
if h.startswith(f'{cookie}="";')
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
@ -125,9 +125,7 @@ class TestClient:
|
|||
if allow_redirects and response.status in (301, 302):
|
||||
assert (
|
||||
redirect_count < self.max_redirects
|
||||
), "Redirected {} times, max_redirects={}".format(
|
||||
redirect_count, self.max_redirects
|
||||
)
|
||||
), f"Redirected {redirect_count} times, max_redirects={self.max_redirects}"
|
||||
location = response.headers["Location"]
|
||||
return await self._request(
|
||||
location, allow_redirects=True, redirect_count=redirect_count + 1
|
||||
|
|
|
|||
|
|
@ -125,9 +125,7 @@ class BaseView:
|
|||
**{
|
||||
"database_color": self.database_color,
|
||||
"select_templates": [
|
||||
"{}{}".format(
|
||||
"*" if template_name == template.name else "", template_name
|
||||
)
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
],
|
||||
},
|
||||
|
|
@ -165,11 +163,11 @@ class DataView(BaseView):
|
|||
|
||||
def redirect(self, request, path, forward_querystring=True, remove_args=None):
|
||||
if request.query_string and "?" not in path and forward_querystring:
|
||||
path = "{}?{}".format(path, request.query_string)
|
||||
path = f"{path}?{request.query_string}"
|
||||
if remove_args:
|
||||
path = path_with_removed_args(request, remove_args, path=path)
|
||||
r = Response.redirect(path)
|
||||
r.headers["Link"] = "<{}>; rel=preload".format(path)
|
||||
r.headers["Link"] = f"<{path}>; rel=preload"
|
||||
if self.ds.cors:
|
||||
r.headers["Access-Control-Allow-Origin"] = "*"
|
||||
return r
|
||||
|
|
@ -184,7 +182,7 @@ class DataView(BaseView):
|
|||
# No matching DB found, maybe it's a name-hash?
|
||||
name_bit, hash_bit = db_name.rsplit("-", 1)
|
||||
if name_bit not in self.ds.databases:
|
||||
raise NotFound("Database not found: {}".format(name))
|
||||
raise NotFound(f"Database not found: {name}")
|
||||
else:
|
||||
name = name_bit
|
||||
hash = hash_bit
|
||||
|
|
@ -194,7 +192,7 @@ class DataView(BaseView):
|
|||
try:
|
||||
db = self.ds.databases[name]
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(name))
|
||||
raise NotFound(f"Database not found: {name}")
|
||||
|
||||
# Verify the hash
|
||||
expected = "000"
|
||||
|
|
@ -217,11 +215,11 @@ class DataView(BaseView):
|
|||
)
|
||||
kwargs["table"] = table
|
||||
if _format:
|
||||
kwargs["as_format"] = ".{}".format(_format)
|
||||
kwargs["as_format"] = f".{_format}"
|
||||
elif kwargs.get("table"):
|
||||
kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"])
|
||||
|
||||
should_redirect = self.ds.urls.path("{}-{}".format(name, expected))
|
||||
should_redirect = self.ds.urls.path(f"{name}-{expected}")
|
||||
if kwargs.get("table"):
|
||||
should_redirect += "/" + urllib.parse.quote_plus(kwargs["table"])
|
||||
if kwargs.get("pk_path"):
|
||||
|
|
@ -294,7 +292,7 @@ class DataView(BaseView):
|
|||
for column in data["columns"]:
|
||||
headings.append(column)
|
||||
if column in expanded_columns:
|
||||
headings.append("{}_label".format(column))
|
||||
headings.append(f"{column}_label")
|
||||
|
||||
async def stream_fn(r):
|
||||
nonlocal data
|
||||
|
|
@ -505,7 +503,7 @@ class DataView(BaseView):
|
|||
elif isinstance(result, Response):
|
||||
r = result
|
||||
else:
|
||||
assert False, "{} should be dict or Response".format(result)
|
||||
assert False, f"{result} should be dict or Response"
|
||||
else:
|
||||
extras = {}
|
||||
if callable(extra_template_data):
|
||||
|
|
@ -581,7 +579,7 @@ class DataView(BaseView):
|
|||
if ttl == 0:
|
||||
ttl_header = "no-cache"
|
||||
else:
|
||||
ttl_header = "max-age={}".format(ttl)
|
||||
ttl_header = f"max-age={ttl}"
|
||||
response.headers["Cache-Control"] = ttl_header
|
||||
response.headers["Referrer-Policy"] = "no-referrer"
|
||||
if self.ds.cors:
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ class DatabaseView(DataView):
|
|||
and not db.is_mutable
|
||||
and database != ":memory:",
|
||||
},
|
||||
("database-{}.html".format(to_css_class(database)), "database.html"),
|
||||
(f"database-{to_css_class(database)}.html", "database.html"),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -233,7 +233,7 @@ class QueryView(DataView):
|
|||
if _size:
|
||||
extra_args["page_size"] = _size
|
||||
|
||||
templates = ["query-{}.html".format(to_css_class(database)), "query.html"]
|
||||
templates = [f"query-{to_css_class(database)}.html", "query.html"]
|
||||
|
||||
# Execute query - as write or as read
|
||||
if write:
|
||||
|
|
@ -324,9 +324,7 @@ class QueryView(DataView):
|
|||
if canned_query:
|
||||
templates.insert(
|
||||
0,
|
||||
"query-{}-{}.html".format(
|
||||
to_css_class(database), to_css_class(canned_query)
|
||||
),
|
||||
f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html",
|
||||
)
|
||||
|
||||
allow_execute_sql = await self.ds.permission_allowed(
|
||||
|
|
|
|||
|
|
@ -111,13 +111,13 @@ class AllowDebugView(BaseView):
|
|||
actor = json.loads(actor_input)
|
||||
actor_input = json.dumps(actor, indent=4)
|
||||
except json.decoder.JSONDecodeError as ex:
|
||||
errors.append("Actor JSON error: {}".format(ex))
|
||||
errors.append(f"Actor JSON error: {ex}")
|
||||
allow_input = request.args.get("allow") or '{"id": "*"}'
|
||||
try:
|
||||
allow = json.loads(allow_input)
|
||||
allow_input = json.dumps(allow, indent=4)
|
||||
except json.decoder.JSONDecodeError as ex:
|
||||
errors.append("Allow JSON error: {}".format(ex))
|
||||
errors.append(f"Allow JSON error: {ex}")
|
||||
|
||||
result = None
|
||||
if not errors:
|
||||
|
|
|
|||
|
|
@ -212,13 +212,11 @@ class RowTableShared(DataView):
|
|||
# representation, which we have to round off to avoid ugliness. In the vast
|
||||
# majority of cases this rounding will be inconsequential. I hope.
|
||||
value = round(value.to_compact(), 6)
|
||||
display_value = jinja2.Markup(
|
||||
"{:~P}".format(value).replace(" ", " ")
|
||||
)
|
||||
display_value = jinja2.Markup(f"{value:~P}".replace(" ", " "))
|
||||
else:
|
||||
display_value = str(value)
|
||||
if truncate_cells and len(display_value) > truncate_cells:
|
||||
display_value = display_value[:truncate_cells] + u"\u2026"
|
||||
display_value = display_value[:truncate_cells] + "\u2026"
|
||||
|
||||
cells.append(
|
||||
{
|
||||
|
|
@ -307,7 +305,7 @@ class TableView(RowTableShared):
|
|||
is_view = bool(await db.get_view_definition(table))
|
||||
table_exists = bool(await db.table_exists(table))
|
||||
if not is_view and not table_exists:
|
||||
raise NotFound("Table not found: {}".format(table))
|
||||
raise NotFound(f"Table not found: {table}")
|
||||
|
||||
await self.check_permissions(
|
||||
request,
|
||||
|
|
@ -330,7 +328,7 @@ class TableView(RowTableShared):
|
|||
|
||||
use_rowid = not pks and not is_view
|
||||
if use_rowid:
|
||||
select = "rowid, {}".format(select_columns)
|
||||
select = f"rowid, {select_columns}"
|
||||
order_by = "rowid"
|
||||
order_by_pks = "rowid"
|
||||
else:
|
||||
|
|
@ -424,7 +422,7 @@ class TableView(RowTableShared):
|
|||
raise DatasetteError(
|
||||
"Invalid _through - could not find corresponding foreign key"
|
||||
)
|
||||
param = "p{}".format(len(params))
|
||||
param = f"p{len(params)}"
|
||||
where_clauses.append(
|
||||
"{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format(
|
||||
through_table=escape_sqlite(through_table),
|
||||
|
|
@ -436,7 +434,7 @@ class TableView(RowTableShared):
|
|||
)
|
||||
params[param] = value
|
||||
extra_human_descriptions.append(
|
||||
'{}.{} = "{}"'.format(through_table, other_column, value)
|
||||
f'{through_table}.{other_column} = "{value}"'
|
||||
)
|
||||
|
||||
# _search support:
|
||||
|
|
@ -462,7 +460,7 @@ class TableView(RowTableShared):
|
|||
else "escape_fts(:search)",
|
||||
)
|
||||
)
|
||||
extra_human_descriptions.append('search matches "{}"'.format(search))
|
||||
extra_human_descriptions.append(f'search matches "{search}"')
|
||||
params["search"] = search
|
||||
else:
|
||||
# More complex: search against specific columns
|
||||
|
|
@ -481,11 +479,9 @@ class TableView(RowTableShared):
|
|||
)
|
||||
)
|
||||
extra_human_descriptions.append(
|
||||
'search column "{}" matches "{}"'.format(
|
||||
search_col, search_text
|
||||
)
|
||||
f'search column "{search_col}" matches "{search_text}"'
|
||||
)
|
||||
params["search_{}".format(i)] = search_text
|
||||
params[f"search_{i}"] = search_text
|
||||
|
||||
sortable_columns = set()
|
||||
|
||||
|
|
@ -506,15 +502,15 @@ class TableView(RowTableShared):
|
|||
|
||||
if sort:
|
||||
if sort not in sortable_columns:
|
||||
raise DatasetteError("Cannot sort table by {}".format(sort))
|
||||
raise DatasetteError(f"Cannot sort table by {sort}")
|
||||
|
||||
order_by = escape_sqlite(sort)
|
||||
|
||||
if sort_desc:
|
||||
if sort_desc not in sortable_columns:
|
||||
raise DatasetteError("Cannot sort table by {}".format(sort_desc))
|
||||
raise DatasetteError(f"Cannot sort table by {sort_desc}")
|
||||
|
||||
order_by = "{} desc".format(escape_sqlite(sort_desc))
|
||||
order_by = f"{escape_sqlite(sort_desc)} desc"
|
||||
|
||||
from_sql = "from {table_name} {where}".format(
|
||||
table_name=escape_sqlite(table),
|
||||
|
|
@ -525,14 +521,14 @@ class TableView(RowTableShared):
|
|||
# Copy of params so we can mutate them later:
|
||||
from_sql_params = dict(**params)
|
||||
|
||||
count_sql = "select count(*) {}".format(from_sql)
|
||||
count_sql = f"select count(*) {from_sql}"
|
||||
|
||||
_next = _next or special_args.get("_next")
|
||||
offset = ""
|
||||
if _next:
|
||||
if is_view:
|
||||
# _next is an offset
|
||||
offset = " offset {}".format(int(_next))
|
||||
offset = f" offset {int(_next)}"
|
||||
else:
|
||||
components = urlsafe_components(_next)
|
||||
# If a sort order is applied, the first of these is the sort value
|
||||
|
|
@ -546,8 +542,8 @@ class TableView(RowTableShared):
|
|||
# Figure out the SQL for next-based-on-primary-key first
|
||||
next_by_pk_clauses = []
|
||||
if use_rowid:
|
||||
next_by_pk_clauses.append("rowid > :p{}".format(len(params)))
|
||||
params["p{}".format(len(params))] = components[0]
|
||||
next_by_pk_clauses.append(f"rowid > :p{len(params)}")
|
||||
params[f"p{len(params)}"] = components[0]
|
||||
else:
|
||||
# Apply the tie-breaker based on primary keys
|
||||
if len(components) == len(pks):
|
||||
|
|
@ -556,7 +552,7 @@ class TableView(RowTableShared):
|
|||
compound_keys_after_sql(pks, param_len)
|
||||
)
|
||||
for i, pk_value in enumerate(components):
|
||||
params["p{}".format(param_len + i)] = pk_value
|
||||
params[f"p{param_len + i}"] = pk_value
|
||||
|
||||
# Now add the sort SQL, which may incorporate next_by_pk_clauses
|
||||
if sort or sort_desc:
|
||||
|
|
@ -590,17 +586,17 @@ class TableView(RowTableShared):
|
|||
next_clauses=" and ".join(next_by_pk_clauses),
|
||||
)
|
||||
)
|
||||
params["p{}".format(len(params))] = sort_value
|
||||
order_by = "{}, {}".format(order_by, order_by_pks)
|
||||
params[f"p{len(params)}"] = sort_value
|
||||
order_by = f"{order_by}, {order_by_pks}"
|
||||
else:
|
||||
where_clauses.extend(next_by_pk_clauses)
|
||||
|
||||
where_clause = ""
|
||||
if where_clauses:
|
||||
where_clause = "where {} ".format(" and ".join(where_clauses))
|
||||
where_clause = f"where {' and '.join(where_clauses)} "
|
||||
|
||||
if order_by:
|
||||
order_by = "order by {} ".format(order_by)
|
||||
order_by = f"order by {order_by} "
|
||||
|
||||
extra_args = {}
|
||||
# Handle ?_size=500
|
||||
|
|
@ -617,9 +613,7 @@ class TableView(RowTableShared):
|
|||
raise BadRequest("_size must be a positive integer")
|
||||
|
||||
if page_size > self.ds.max_returned_rows:
|
||||
raise BadRequest(
|
||||
"_size must be <= {}".format(self.ds.max_returned_rows)
|
||||
)
|
||||
raise BadRequest(f"_size must be <= {self.ds.max_returned_rows}")
|
||||
|
||||
extra_args["page_size"] = page_size
|
||||
else:
|
||||
|
|
@ -631,9 +625,7 @@ class TableView(RowTableShared):
|
|||
where=where_clause,
|
||||
order_by=order_by,
|
||||
)
|
||||
sql = "{sql_no_limit} limit {limit}{offset}".format(
|
||||
sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset
|
||||
)
|
||||
sql = f"{sql_no_limit.rstrip()} limit {page_size + 1}{offset}"
|
||||
|
||||
if request.args.get("_timelimit"):
|
||||
extra_args["custom_time_limit"] = int(request.args.get("_timelimit"))
|
||||
|
|
@ -645,7 +637,7 @@ class TableView(RowTableShared):
|
|||
if (
|
||||
not db.is_mutable
|
||||
and self.ds.inspect_data
|
||||
and count_sql == "select count(*) from {} ".format(table)
|
||||
and count_sql == f"select count(*) from {table} "
|
||||
):
|
||||
try:
|
||||
filtered_table_rows_count = self.ds.inspect_data[database]["tables"][
|
||||
|
|
@ -763,7 +755,7 @@ class TableView(RowTableShared):
|
|||
prefix = "$null"
|
||||
else:
|
||||
prefix = urllib.parse.quote_plus(str(prefix))
|
||||
next_value = "{},{}".format(prefix, next_value)
|
||||
next_value = f"{prefix},{next_value}"
|
||||
added_args = {"_next": next_value}
|
||||
if sort:
|
||||
added_args["_sort"] = sort
|
||||
|
|
@ -879,12 +871,8 @@ class TableView(RowTableShared):
|
|||
"sort_desc": sort_desc,
|
||||
"disable_sort": is_view,
|
||||
"custom_table_templates": [
|
||||
"_table-{}-{}.html".format(
|
||||
to_css_class(database), to_css_class(table)
|
||||
),
|
||||
"_table-table-{}-{}.html".format(
|
||||
to_css_class(database), to_css_class(table)
|
||||
),
|
||||
f"_table-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
f"_table-table-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
"_table.html",
|
||||
],
|
||||
"metadata": metadata,
|
||||
|
|
@ -918,7 +906,7 @@ class TableView(RowTableShared):
|
|||
},
|
||||
extra_template,
|
||||
(
|
||||
"table-{}-{}.html".format(to_css_class(database), to_css_class(table)),
|
||||
f"table-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
"table.html",
|
||||
),
|
||||
)
|
||||
|
|
@ -931,13 +919,11 @@ async def _sql_params_pks(db, table, pk_values):
|
|||
if use_rowid:
|
||||
select = "rowid, *"
|
||||
pks = ["rowid"]
|
||||
wheres = ['"{}"=:p{}'.format(pk, i) for i, pk in enumerate(pks)]
|
||||
sql = "select {} from {} where {}".format(
|
||||
select, escape_sqlite(table), " AND ".join(wheres)
|
||||
)
|
||||
wheres = [f'"{pk}"=:p{i}' for i, pk in enumerate(pks)]
|
||||
sql = f"select {select} from {escape_sqlite(table)} where {' AND '.join(wheres)}"
|
||||
params = {}
|
||||
for i, pk_value in enumerate(pk_values):
|
||||
params["p{}".format(i)] = pk_value
|
||||
params[f"p{i}"] = pk_value
|
||||
return sql, params, pks
|
||||
|
||||
|
||||
|
|
@ -960,7 +946,7 @@ class RowView(RowTableShared):
|
|||
columns = [r[0] for r in results.description]
|
||||
rows = list(results.rows)
|
||||
if not rows:
|
||||
raise NotFound("Record not found: {}".format(pk_values))
|
||||
raise NotFound(f"Record not found: {pk_values}")
|
||||
|
||||
async def template_data():
|
||||
display_columns, display_rows = await self.display_columns_and_rows(
|
||||
|
|
@ -981,12 +967,8 @@ class RowView(RowTableShared):
|
|||
"display_columns": display_columns,
|
||||
"display_rows": display_rows,
|
||||
"custom_table_templates": [
|
||||
"_table-{}-{}.html".format(
|
||||
to_css_class(database), to_css_class(table)
|
||||
),
|
||||
"_table-row-{}-{}.html".format(
|
||||
to_css_class(database), to_css_class(table)
|
||||
),
|
||||
f"_table-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
"_table.html",
|
||||
],
|
||||
"metadata": (self.ds.metadata("databases") or {})
|
||||
|
|
@ -1014,7 +996,7 @@ class RowView(RowTableShared):
|
|||
data,
|
||||
template_data,
|
||||
(
|
||||
"row-{}-{}.html".format(to_css_class(database), to_css_class(table)),
|
||||
f"row-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
"row.html",
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -247,7 +247,7 @@ def generate_compound_rows(num):
|
|||
for a, b, c in itertools.islice(
|
||||
itertools.product(string.ascii_lowercase, repeat=3), num
|
||||
):
|
||||
yield a, b, c, "{}-{}-{}".format(a, b, c)
|
||||
yield a, b, c, f"{a}-{b}-{c}"
|
||||
|
||||
|
||||
def generate_sortable_rows(num):
|
||||
|
|
@ -258,7 +258,7 @@ def generate_sortable_rows(num):
|
|||
yield {
|
||||
"pk1": a,
|
||||
"pk2": b,
|
||||
"content": "{}-{}".format(a, b),
|
||||
"content": f"{a}-{b}",
|
||||
"sortable": rand.randint(-100, 100),
|
||||
"sortable_with_nulls": rand.choice([None, rand.random(), rand.random()]),
|
||||
"sortable_with_nulls_2": rand.choice([None, rand.random(), rand.random()]),
|
||||
|
|
@ -742,7 +742,7 @@ def cli(db_filename, metadata, plugins_path, recreate):
|
|||
if pathlib.Path(db_filename).exists():
|
||||
if not recreate:
|
||||
raise click.ClickException(
|
||||
"{} already exists, use --recreate to reset it".format(db_filename)
|
||||
f"{db_filename} already exists, use --recreate to reset it"
|
||||
)
|
||||
else:
|
||||
pathlib.Path(db_filename).unlink()
|
||||
|
|
@ -751,10 +751,10 @@ def cli(db_filename, metadata, plugins_path, recreate):
|
|||
for sql, params in TABLE_PARAMETERIZED_SQL:
|
||||
with conn:
|
||||
conn.execute(sql, params)
|
||||
print("Test tables written to {}".format(db_filename))
|
||||
print(f"Test tables written to {db_filename}")
|
||||
if metadata:
|
||||
open(metadata, "w").write(json.dumps(METADATA, indent=4))
|
||||
print("- metadata written to {}".format(metadata))
|
||||
print(f"- metadata written to {metadata}")
|
||||
if plugins_path:
|
||||
path = pathlib.Path(plugins_path)
|
||||
if not path.exists():
|
||||
|
|
@ -763,7 +763,7 @@ def cli(db_filename, metadata, plugins_path, recreate):
|
|||
for filepath in test_plugins.glob("*.py"):
|
||||
newpath = path / filepath.name
|
||||
newpath.write_text(filepath.open().read())
|
||||
print(" Wrote plugin: {}".format(newpath))
|
||||
print(f" Wrote plugin: {newpath}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ def extra_template_vars(
|
|||
|
||||
@hookimpl
|
||||
def prepare_jinja2_environment(env):
|
||||
env.filters["format_numeric"] = lambda s: "{:,.0f}".format(float(s))
|
||||
env.filters["format_numeric"] = lambda s: f"{float(s):,.0f}"
|
||||
|
||||
|
||||
@hookimpl
|
||||
|
|
@ -207,7 +207,7 @@ def register_routes():
|
|||
async def two(request):
|
||||
name = request.url_vars["name"]
|
||||
greeting = request.args.get("greeting")
|
||||
return Response.text("{} {}".format(greeting, name))
|
||||
return Response.text(f"{greeting} {name}")
|
||||
|
||||
async def three(scope, send):
|
||||
await asgi_send_json(
|
||||
|
|
@ -281,11 +281,7 @@ def startup(datasette):
|
|||
|
||||
@hookimpl
|
||||
def canned_queries(datasette, database, actor):
|
||||
return {
|
||||
"from_hook": "select 1, '{}' as actor_id".format(
|
||||
actor["id"] if actor else "null"
|
||||
)
|
||||
}
|
||||
return {"from_hook": f"select 1, '{actor['id'] if actor else 'null'}' as actor_id"}
|
||||
|
||||
|
||||
@hookimpl
|
||||
|
|
@ -329,9 +325,9 @@ def table_actions(datasette, database, table, actor):
|
|||
return [
|
||||
{
|
||||
"href": datasette.urls.instance(),
|
||||
"label": "Database: {}".format(database),
|
||||
"label": f"Database: {database}",
|
||||
},
|
||||
{"href": datasette.urls.instance(), "label": "Table: {}".format(table)},
|
||||
{"href": datasette.urls.instance(), "label": f"Table: {table}"},
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -341,6 +337,6 @@ def database_actions(datasette, database, actor):
|
|||
return [
|
||||
{
|
||||
"href": datasette.urls.instance(),
|
||||
"label": "Database: {}".format(database),
|
||||
"label": f"Database: {database}",
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -918,7 +918,7 @@ def test_paginate_compound_keys_with_extra_filters(app_client):
|
|||
],
|
||||
)
|
||||
def test_sortable(app_client, query_string, sort_key, human_description_en):
|
||||
path = "/fixtures/sortable.json?_shape=objects&{}".format(query_string)
|
||||
path = f"/fixtures/sortable.json?_shape=objects&{query_string}"
|
||||
fetched = []
|
||||
page = 0
|
||||
while path:
|
||||
|
|
@ -969,8 +969,8 @@ def test_sortable_columns_metadata(app_client):
|
|||
assert "Cannot sort table by content" == response.json["error"]
|
||||
# no_primary_key has ALL sort options disabled
|
||||
for column in ("content", "a", "b", "c"):
|
||||
response = app_client.get("/fixtures/sortable.json?_sort={}".format(column))
|
||||
assert "Cannot sort table by {}".format(column) == response.json["error"]
|
||||
response = app_client.get(f"/fixtures/sortable.json?_sort={column}")
|
||||
assert f"Cannot sort table by {column}" == response.json["error"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
@ -1877,7 +1877,7 @@ def test_binary_data_in_json(app_client, path, expected_json, expected_text):
|
|||
],
|
||||
)
|
||||
def test_paginate_using_link_header(app_client, qs):
|
||||
path = "/fixtures/compound_three_primary_keys.json{}".format(qs)
|
||||
path = f"/fixtures/compound_three_primary_keys.json{qs}"
|
||||
num_pages = 0
|
||||
while path:
|
||||
response = app_client.get(path)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import time
|
|||
def test_auth_token(app_client):
|
||||
"The /-/auth-token endpoint sets the correct cookie"
|
||||
assert app_client.ds._root_token is not None
|
||||
path = "/-/auth-token?token={}".format(app_client.ds._root_token)
|
||||
path = f"/-/auth-token?token={app_client.ds._root_token}"
|
||||
response = app_client.get(
|
||||
path,
|
||||
allow_redirects=False,
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ def test_insert(canned_write_client):
|
|||
def test_canned_query_form_csrf_hidden_field(
|
||||
canned_write_client, query_name, expect_csrf_hidden_field
|
||||
):
|
||||
response = canned_write_client.get("/data/{}".format(query_name))
|
||||
response = canned_write_client.get(f"/data/{query_name}")
|
||||
html = response.text
|
||||
fragment = '<input type="hidden" name="csrftoken" value="'
|
||||
if expect_csrf_hidden_field:
|
||||
|
|
@ -284,10 +284,10 @@ def magic_parameters_client():
|
|||
def test_magic_parameters(magic_parameters_client, magic_parameter, expected_re):
|
||||
magic_parameters_client.ds._metadata["databases"]["data"]["queries"]["runme_post"][
|
||||
"sql"
|
||||
] = "insert into logs (line) values (:{})".format(magic_parameter)
|
||||
] = f"insert into logs (line) values (:{magic_parameter})"
|
||||
magic_parameters_client.ds._metadata["databases"]["data"]["queries"]["runme_get"][
|
||||
"sql"
|
||||
] = "select :{} as result".format(magic_parameter)
|
||||
] = f"select :{magic_parameter} as result"
|
||||
cookies = {
|
||||
"ds_actor": magic_parameters_client.actor_cookie({"id": "root"}),
|
||||
"foo": "bar",
|
||||
|
|
@ -328,7 +328,7 @@ def test_magic_parameters_csrf_json(magic_parameters_client, use_csrf, return_js
|
|||
if return_json:
|
||||
qs = "?_json=1"
|
||||
response = magic_parameters_client.post(
|
||||
"/data/runme_post{}".format(qs),
|
||||
f"/data/runme_post{qs}",
|
||||
{},
|
||||
csrftoken_from=use_csrf or None,
|
||||
allow_redirects=False,
|
||||
|
|
|
|||
|
|
@ -162,4 +162,4 @@ def test_uninstall(run_module):
|
|||
def test_version():
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["--version"])
|
||||
assert result.output == "cli, version {}\n".format(__version__)
|
||||
assert result.output == f"cli, version {__version__}\n"
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ def test_help_includes(name, filename):
|
|||
expected = open(str(docs_path / filename)).read()
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88)
|
||||
actual = "$ datasette {} --help\n\n{}".format(name, result.output)
|
||||
actual = f"$ datasette {name} --help\n\n{result.output}"
|
||||
# actual has "Usage: cli package [OPTIONS] FILES"
|
||||
# because it doesn't know that cli will be aliased to datasette
|
||||
expected = expected.replace("Usage: datasette", "Usage: cli")
|
||||
|
|
@ -67,10 +67,10 @@ def test_plugin_hooks_are_documented(plugin, plugin_hooks_content):
|
|||
hook_caller = getattr(app.pm.hook, plugin)
|
||||
arg_names = [a for a in hook_caller.spec.argnames if a != "__multicall__"]
|
||||
# Check for plugin_name(arg1, arg2, arg3)
|
||||
expected = "{}({})".format(plugin, ", ".join(arg_names))
|
||||
expected = f"{plugin}({', '.join(arg_names)})"
|
||||
assert (
|
||||
expected in plugin_hooks_content
|
||||
), "Missing from plugin hook documentation: {}".format(expected)
|
||||
), f"Missing from plugin hook documentation: {expected}"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
|
|
|
|||
|
|
@ -62,6 +62,4 @@ def test_build_where(args, expected_where, expected_params):
|
|||
f = Filters(sorted(args))
|
||||
sql_bits, actual_params = f.build_where_clauses("table")
|
||||
assert expected_where == sql_bits
|
||||
assert {
|
||||
"p{}".format(i): param for i, param in enumerate(expected_params)
|
||||
} == actual_params
|
||||
assert {f"p{i}": param for i, param in enumerate(expected_params)} == actual_params
|
||||
|
|
|
|||
|
|
@ -592,9 +592,7 @@ def test_css_classes_on_body(app_client, path, expected_classes):
|
|||
def test_templates_considered(app_client, path, expected_considered):
|
||||
response = app_client.get(path)
|
||||
assert response.status == 200
|
||||
assert (
|
||||
"<!-- Templates considered: {} -->".format(expected_considered) in response.text
|
||||
)
|
||||
assert f"<!-- Templates considered: {expected_considered} -->" in response.text
|
||||
|
||||
|
||||
def test_table_html_simple_primary_key(app_client):
|
||||
|
|
@ -607,9 +605,7 @@ def test_table_html_simple_primary_key(app_client):
|
|||
for expected_col, th in zip(("content",), ths[1:]):
|
||||
a = th.find("a")
|
||||
assert expected_col == a.string
|
||||
assert a["href"].endswith(
|
||||
"/simple_primary_key?_size=3&_sort={}".format(expected_col)
|
||||
)
|
||||
assert a["href"].endswith(f"/simple_primary_key?_size=3&_sort={expected_col}")
|
||||
assert ["nofollow"] == a["rel"]
|
||||
assert [
|
||||
[
|
||||
|
|
@ -730,11 +726,11 @@ def test_table_html_no_primary_key(app_client):
|
|||
'<td class="col-Link type-pk"><a href="/fixtures/no_primary_key/{}">{}</a></td>'.format(
|
||||
i, i
|
||||
),
|
||||
'<td class="col-rowid type-int">{}</td>'.format(i),
|
||||
'<td class="col-content type-str">{}</td>'.format(i),
|
||||
'<td class="col-a type-str">a{}</td>'.format(i),
|
||||
'<td class="col-b type-str">b{}</td>'.format(i),
|
||||
'<td class="col-c type-str">c{}</td>'.format(i),
|
||||
f'<td class="col-rowid type-int">{i}</td>',
|
||||
f'<td class="col-content type-str">{i}</td>',
|
||||
f'<td class="col-a type-str">a{i}</td>',
|
||||
f'<td class="col-b type-str">b{i}</td>',
|
||||
f'<td class="col-c type-str">c{i}</td>',
|
||||
]
|
||||
for i in range(1, 51)
|
||||
]
|
||||
|
|
@ -782,8 +778,8 @@ def test_table_html_compound_primary_key(app_client):
|
|||
for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]):
|
||||
a = th.find("a")
|
||||
assert expected_col == a.string
|
||||
assert th["class"] == ["col-{}".format(expected_col)]
|
||||
assert a["href"].endswith("/compound_primary_key?_sort={}".format(expected_col))
|
||||
assert th["class"] == [f"col-{expected_col}"]
|
||||
assert a["href"].endswith(f"/compound_primary_key?_sort={expected_col}")
|
||||
expected = [
|
||||
[
|
||||
'<td class="col-Link type-pk"><a href="/fixtures/compound_primary_key/a,b">a,b</a></td>',
|
||||
|
|
@ -1100,9 +1096,7 @@ def test_404(app_client, path):
|
|||
response = app_client.get(path)
|
||||
assert 404 == response.status
|
||||
assert (
|
||||
'<link rel="stylesheet" href="/-/static/app.css?{}'.format(
|
||||
app_client.ds.app_css_hash()
|
||||
)
|
||||
f'<link rel="stylesheet" href="/-/static/app.css?{app_client.ds.app_css_hash()}'
|
||||
in response.text
|
||||
)
|
||||
|
||||
|
|
@ -1293,9 +1287,10 @@ def test_blob_download(app_client, path, expected_filename):
|
|||
assert response.status == 200
|
||||
assert response.body == b"\x15\x1c\x02\xc7\xad\x05\xfe"
|
||||
assert response.headers["x-content-type-options"] == "nosniff"
|
||||
assert response.headers[
|
||||
"content-disposition"
|
||||
] == 'attachment; filename="{}"'.format(expected_filename)
|
||||
assert (
|
||||
response.headers["content-disposition"]
|
||||
== f'attachment; filename="{expected_filename}"'
|
||||
)
|
||||
assert response.headers["content-type"] == "application/binary"
|
||||
|
||||
|
||||
|
|
@ -1502,9 +1497,7 @@ def test_base_url_affects_metadata_extra_css_urls(app_client_base_url_prefix):
|
|||
)
|
||||
def test_edit_sql_link_on_canned_queries(app_client, path, expected):
|
||||
response = app_client.get(path)
|
||||
expected_link = '<a href="{}" class="canned-query-edit-sql">Edit SQL</a>'.format(
|
||||
expected
|
||||
)
|
||||
expected_link = f'<a href="{expected}" class="canned-query-edit-sql">Edit SQL</a>'
|
||||
if expected:
|
||||
assert expected_link in response.text
|
||||
else:
|
||||
|
|
@ -1555,10 +1548,10 @@ def test_navigation_menu_links(
|
|||
for link in should_have_links:
|
||||
assert (
|
||||
details.find("a", {"href": link}) is not None
|
||||
), "{} expected but missing from nav menu".format(link)
|
||||
), f"{link} expected but missing from nav menu"
|
||||
|
||||
if should_not_have_links:
|
||||
for link in should_not_have_links:
|
||||
assert (
|
||||
details.find("a", {"href": link}) is None
|
||||
), "{} found but should not have been in nav menu".format(link)
|
||||
), f"{link} found but should not have been in nav menu"
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ def test_database_hashed(app_client_with_hash, base_url):
|
|||
ds._config["base_url"] = base_url
|
||||
db_hash = ds.get_database("fixtures").hash
|
||||
assert len(db_hash) == 64
|
||||
expected = "{}fixtures-{}".format(base_url, db_hash[:7])
|
||||
expected = f"{base_url}fixtures-{db_hash[:7]}"
|
||||
assert ds.urls.database("fixtures") == expected
|
||||
assert ds.urls.table("fixtures", "name") == expected + "/name"
|
||||
assert ds.urls.query("fixtures", "name") == expected + "/name"
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import pytest
|
|||
],
|
||||
)
|
||||
def test_add_message_sets_cookie(app_client, qs, expected):
|
||||
response = app_client.get("/fixtures.message?{}".format(qs))
|
||||
response = app_client.get(f"/fixtures.message?{qs}")
|
||||
signed = response.cookies["ds_messages"]
|
||||
decoded = app_client.ds.unsign(signed, "messages")
|
||||
assert expected == decoded
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ def test_plugin_hooks_have_tests(plugin_hook):
|
|||
for test in tests_in_this_module:
|
||||
if plugin_hook in test:
|
||||
ok = True
|
||||
assert ok, "Plugin hook is missing tests: {}".format(plugin_hook)
|
||||
assert ok, f"Plugin hook is missing tests: {plugin_hook}"
|
||||
|
||||
|
||||
def test_hook_plugins_dir_plugin_prepare_connection(app_client):
|
||||
|
|
@ -398,7 +398,7 @@ def view_names_client(tmp_path_factory):
|
|||
def test_view_names(view_names_client, path, view_name):
|
||||
response = view_names_client.get(path)
|
||||
assert response.status == 200
|
||||
assert "view_name:{}".format(view_name) == response.text
|
||||
assert f"view_name:{view_name}" == response.text
|
||||
|
||||
|
||||
def test_hook_register_output_renderer_no_parameters(app_client):
|
||||
|
|
@ -659,7 +659,7 @@ def test_hook_register_routes_csrftoken(restore_working_directory, tmpdir_factor
|
|||
with make_app_client(template_dir=templates) as client:
|
||||
response = client.get("/csrftoken-form/")
|
||||
expected_token = client.ds._last_request.scope["csrftoken"]()
|
||||
assert "CSRFTOKEN: {}".format(expected_token) == response.text
|
||||
assert f"CSRFTOKEN: {expected_token}" == response.text
|
||||
|
||||
|
||||
def test_hook_register_routes_asgi(app_client):
|
||||
|
|
@ -793,14 +793,14 @@ def test_hook_table_actions(app_client, table_or_view):
|
|||
return []
|
||||
return [{"label": a.text, "href": a["href"]} for a in details.select("a")]
|
||||
|
||||
response = app_client.get("/fixtures/{}".format(table_or_view))
|
||||
response = app_client.get(f"/fixtures/{table_or_view}")
|
||||
assert get_table_actions_links(response.text) == []
|
||||
|
||||
response_2 = app_client.get("/fixtures/{}?_bot=1".format(table_or_view))
|
||||
response_2 = app_client.get(f"/fixtures/{table_or_view}?_bot=1")
|
||||
assert get_table_actions_links(response_2.text) == [
|
||||
{"label": "From async", "href": "/"},
|
||||
{"label": "Database: fixtures", "href": "/"},
|
||||
{"label": "Table: {}".format(table_or_view), "href": "/"},
|
||||
{"label": f"Table: {table_or_view}", "href": "/"},
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ Service name: input-service
|
|||
tag = "gcr.io/myproject/datasette"
|
||||
mock_call.assert_has_calls(
|
||||
[
|
||||
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
|
||||
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
||||
mock.call(
|
||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} input-service".format(
|
||||
tag
|
||||
|
|
@ -86,10 +86,10 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
|
|||
cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
|
||||
)
|
||||
assert 0 == result.exit_code
|
||||
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
|
||||
tag = f"gcr.io/{mock_output.return_value}/datasette"
|
||||
mock_call.assert_has_calls(
|
||||
[
|
||||
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
|
||||
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
||||
mock.call(
|
||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} test".format(
|
||||
tag
|
||||
|
|
@ -129,10 +129,10 @@ def test_publish_cloudrun_memory(
|
|||
assert 2 == result.exit_code
|
||||
return
|
||||
assert 0 == result.exit_code
|
||||
tag = "gcr.io/{}/datasette".format(mock_output.return_value)
|
||||
tag = f"gcr.io/{mock_output.return_value}/datasette"
|
||||
mock_call.assert_has_calls(
|
||||
[
|
||||
mock.call("gcloud builds submit --tag {}".format(tag), shell=True),
|
||||
mock.call(f"gcloud builds submit --tag {tag}", shell=True),
|
||||
mock.call(
|
||||
"gcloud run deploy --allow-unauthenticated --platform=managed --image {} test --memory {}".format(
|
||||
tag, memory
|
||||
|
|
|
|||
|
|
@ -457,7 +457,7 @@ def test_check_connection_passes():
|
|||
|
||||
def test_call_with_supported_arguments():
|
||||
def foo(a, b):
|
||||
return "{}+{}".format(a, b)
|
||||
return f"{a}+{b}"
|
||||
|
||||
assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2)
|
||||
assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def update_help_includes():
|
|||
for name, filename in includes:
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88)
|
||||
actual = "$ datasette {} --help\n\n{}".format(name, result.output)
|
||||
actual = f"$ datasette {name} --help\n\n{result.output}"
|
||||
actual = actual.replace("Usage: cli ", "Usage: datasette ")
|
||||
open(docs_path / filename, "w").write(actual)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue