Coverage for datasette/views/table.py : 96%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import urllib
2import itertools
3import json
5import jinja2
7from datasette.plugins import pm
8from datasette.database import QueryInterrupted
9from datasette.utils import (
10 CustomRow,
11 MultiParams,
12 append_querystring,
13 compound_keys_after_sql,
14 escape_sqlite,
15 filters_should_redirect,
16 is_url,
17 path_from_row_pks,
18 path_with_added_args,
19 path_with_removed_args,
20 path_with_replaced_args,
21 sqlite3,
22 to_css_class,
23 urlsafe_components,
24 value_as_boolean,
25)
26from datasette.utils.asgi import NotFound
27from datasette.filters import Filters
28from .base import DataView, DatasetteError, ureg
29from .database import QueryView
31LINK_WITH_LABEL = (
32 '<a href="{base_url}{database}/{table}/{link_id}">{label}</a> <em>{id}</em>'
33)
34LINK_WITH_VALUE = '<a href="{base_url}{database}/{table}/{link_id}">{id}</a>'
37class Row:
38 def __init__(self, cells):
39 self.cells = cells
41 def __iter__(self):
42 return iter(self.cells)
44 def __getitem__(self, key):
45 for cell in self.cells:
46 if cell["column"] == key:
47 return cell["raw"]
48 raise KeyError
50 def display(self, key):
51 for cell in self.cells:
52 if cell["column"] == key:
53 return cell["value"]
54 return None
56 def __str__(self):
57 d = {
58 key: self[key]
59 for key in [
60 c["column"] for c in self.cells if not c.get("is_special_link_column")
61 ]
62 }
63 return json.dumps(d, default=repr, indent=2)
66class RowTableShared(DataView):
67 async def sortable_columns_for_table(self, database, table, use_rowid):
68 db = self.ds.databases[database]
69 table_metadata = self.ds.table_metadata(database, table)
70 if "sortable_columns" in table_metadata:
71 sortable_columns = set(table_metadata["sortable_columns"])
72 else:
73 sortable_columns = set(await db.table_columns(table))
74 if use_rowid:
75 sortable_columns.add("rowid")
76 return sortable_columns
78 async def expandable_columns(self, database, table):
79 # Returns list of (fk_dict, label_column-or-None) pairs for that table
80 expandables = []
81 db = self.ds.databases[database]
82 for fk in await db.foreign_keys_for_table(table):
83 label_column = await db.label_column_for_table(fk["other_table"])
84 expandables.append((fk, label_column))
85 return expandables
87 async def display_columns_and_rows(
88 self, database, table, description, rows, link_column=False, truncate_cells=0
89 ):
90 "Returns columns, rows for specified table - including fancy foreign key treatment"
91 db = self.ds.databases[database]
92 table_metadata = self.ds.table_metadata(database, table)
93 sortable_columns = await self.sortable_columns_for_table(database, table, True)
94 columns = [
95 {"name": r[0], "sortable": r[0] in sortable_columns} for r in description
96 ]
97 pks = await db.primary_keys(table)
98 column_to_foreign_key_table = {
99 fk["column"]: fk["other_table"]
100 for fk in await db.foreign_keys_for_table(table)
101 }
103 cell_rows = []
104 base_url = self.ds.config("base_url")
105 for row in rows:
106 cells = []
107 # Unless we are a view, the first column is a link - either to the rowid
108 # or to the simple or compound primary key
109 if link_column:
110 is_special_link_column = len(pks) != 1
111 pk_path = path_from_row_pks(row, pks, not pks, False)
112 cells.append(
113 {
114 "column": pks[0] if len(pks) == 1 else "Link",
115 "value_type": "pk",
116 "is_special_link_column": is_special_link_column,
117 "raw": pk_path,
118 "value": jinja2.Markup(
119 '<a href="{base_url}{database}/{table}/{flat_pks_quoted}">{flat_pks}</a>'.format(
120 base_url=base_url,
121 database=database,
122 table=urllib.parse.quote_plus(table),
123 flat_pks=str(jinja2.escape(pk_path)),
124 flat_pks_quoted=path_from_row_pks(row, pks, not pks),
125 )
126 ),
127 }
128 )
130 for value, column_dict in zip(row, columns):
131 column = column_dict["name"]
132 if link_column and len(pks) == 1 and column == pks[0]:
133 # If there's a simple primary key, don't repeat the value as it's
134 # already shown in the link column.
135 continue
137 # First let the plugins have a go
138 # pylint: disable=no-member
139 plugin_display_value = pm.hook.render_cell(
140 value=value,
141 column=column,
142 table=table,
143 database=database,
144 datasette=self.ds,
145 )
146 if plugin_display_value is not None:
147 display_value = plugin_display_value
148 elif isinstance(value, bytes):
149 display_value = jinja2.Markup(
150 "<Binary data: {} byte{}>".format(
151 len(value), "" if len(value) == 1 else "s"
152 )
153 )
154 elif isinstance(value, dict):
155 # It's an expanded foreign key - display link to other row
156 label = value["label"]
157 value = value["value"]
158 # The table we link to depends on the column
159 other_table = column_to_foreign_key_table[column]
160 link_template = (
161 LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE
162 )
163 display_value = jinja2.Markup(
164 link_template.format(
165 database=database,
166 base_url=base_url,
167 table=urllib.parse.quote_plus(other_table),
168 link_id=urllib.parse.quote_plus(str(value)),
169 id=str(jinja2.escape(value)),
170 label=str(jinja2.escape(label)),
171 )
172 )
173 elif value in ("", None):
174 display_value = jinja2.Markup(" ")
175 elif is_url(str(value).strip()):
176 display_value = jinja2.Markup(
177 '<a href="{url}">{url}</a>'.format(
178 url=jinja2.escape(value.strip())
179 )
180 )
181 elif column in table_metadata.get("units", {}) and value != "":
182 # Interpret units using pint
183 value = value * ureg(table_metadata["units"][column])
184 # Pint uses floating point which sometimes introduces errors in the compact
185 # representation, which we have to round off to avoid ugliness. In the vast
186 # majority of cases this rounding will be inconsequential. I hope.
187 value = round(value.to_compact(), 6)
188 display_value = jinja2.Markup(
189 "{:~P}".format(value).replace(" ", " ")
190 )
191 else:
192 display_value = str(value)
193 if truncate_cells and len(display_value) > truncate_cells:
194 display_value = display_value[:truncate_cells] + u"\u2026"
196 cells.append(
197 {
198 "column": column,
199 "value": display_value,
200 "raw": value,
201 "value_type": "none"
202 if value is None
203 else str(type(value).__name__),
204 }
205 )
206 cell_rows.append(Row(cells))
208 if link_column:
209 # Add the link column header.
210 # If it's a simple primary key, we have to remove and re-add that column name at
211 # the beginning of the header row.
212 if len(pks) == 1:
213 columns = [col for col in columns if col["name"] != pks[0]]
215 columns = [
216 {"name": pks[0] if len(pks) == 1 else "Link", "sortable": len(pks) == 1}
217 ] + columns
218 return columns, cell_rows
221class TableView(RowTableShared):
222 name = "table"
224 async def post(self, request, db_name, table_and_format):
225 # Handle POST to a canned query
226 canned_query = self.ds.get_canned_query(db_name, table_and_format)
227 assert canned_query, "You may only POST to a canned query"
228 return await QueryView(self.ds).data(
229 request,
230 db_name,
231 None,
232 canned_query["sql"],
233 metadata=canned_query,
234 editable=False,
235 canned_query=table_and_format,
236 named_parameters=canned_query.get("params"),
237 write=bool(canned_query.get("write")),
238 )
240 async def data(
241 self,
242 request,
243 database,
244 hash,
245 table,
246 default_labels=False,
247 _next=None,
248 _size=None,
249 ):
250 canned_query = self.ds.get_canned_query(database, table)
251 if canned_query:
252 return await QueryView(self.ds).data(
253 request,
254 database,
255 hash,
256 canned_query["sql"],
257 metadata=canned_query,
258 editable=False,
259 canned_query=table,
260 named_parameters=canned_query.get("params"),
261 write=bool(canned_query.get("write")),
262 )
264 db = self.ds.databases[database]
265 is_view = bool(await db.get_view_definition(table))
266 table_exists = bool(await db.table_exists(table))
267 if not is_view and not table_exists:
268 raise NotFound("Table not found: {}".format(table))
270 await self.check_permission(request, "view-instance")
271 await self.check_permission(request, "view-database", database)
272 await self.check_permission(request, "view-table", (database, table))
274 private = not await self.ds.permission_allowed(
275 None, "view-table", (database, table), default=True
276 )
278 pks = await db.primary_keys(table)
279 table_columns = await db.table_columns(table)
281 select_columns = ", ".join(escape_sqlite(t) for t in table_columns)
283 use_rowid = not pks and not is_view
284 if use_rowid:
285 select = "rowid, {}".format(select_columns)
286 order_by = "rowid"
287 order_by_pks = "rowid"
288 else:
289 select = select_columns
290 order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
291 order_by = order_by_pks
293 if is_view:
294 order_by = ""
296 # Ensure we don't drop anything with an empty value e.g. ?name__exact=
297 args = MultiParams(
298 urllib.parse.parse_qs(request.query_string, keep_blank_values=True)
299 )
301 # Special args start with _ and do not contain a __
302 # That's so if there is a column that starts with _
303 # it can still be queried using ?_col__exact=blah
304 special_args = {}
305 other_args = []
306 for key in args:
307 if key.startswith("_") and "__" not in key:
308 special_args[key] = args[key]
309 else:
310 for v in args.getlist(key):
311 other_args.append((key, v))
313 # Handle ?_filter_column and redirect, if present
314 redirect_params = filters_should_redirect(special_args)
315 if redirect_params:
316 return self.redirect(
317 request,
318 path_with_added_args(request, redirect_params),
319 forward_querystring=False,
320 )
322 # Spot ?_sort_by_desc and redirect to _sort_desc=(_sort)
323 if "_sort_by_desc" in special_args:
324 return self.redirect(
325 request,
326 path_with_added_args(
327 request,
328 {
329 "_sort_desc": special_args.get("_sort"),
330 "_sort_by_desc": None,
331 "_sort": None,
332 },
333 ),
334 forward_querystring=False,
335 )
337 table_metadata = self.ds.table_metadata(database, table)
338 units = table_metadata.get("units", {})
339 filters = Filters(sorted(other_args), units, ureg)
340 where_clauses, params = filters.build_where_clauses(table)
342 extra_wheres_for_ui = []
343 # Add _where= from querystring
344 if "_where" in request.args:
345 if not await self.ds.permission_allowed(
346 request.actor, "execute-sql", resource=database, default=True,
347 ):
348 raise DatasetteError("_where= is not allowed", status=403)
349 else:
350 where_clauses.extend(request.args.getlist("_where"))
351 extra_wheres_for_ui = [
352 {
353 "text": text,
354 "remove_url": path_with_removed_args(request, {"_where": text}),
355 }
356 for text in request.args.getlist("_where")
357 ]
359 # Support for ?_through={table, column, value}
360 extra_human_descriptions = []
361 if "_through" in request.args:
362 for through in request.args.getlist("_through"):
363 through_data = json.loads(through)
364 through_table = through_data["table"]
365 other_column = through_data["column"]
366 value = through_data["value"]
367 outgoing_foreign_keys = await db.foreign_keys_for_table(through_table)
368 try:
369 fk_to_us = [
370 fk for fk in outgoing_foreign_keys if fk["other_table"] == table
371 ][0]
372 except IndexError:
373 raise DatasetteError(
374 "Invalid _through - could not find corresponding foreign key"
375 )
376 param = "p{}".format(len(params))
377 where_clauses.append(
378 "{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})".format(
379 through_table=escape_sqlite(through_table),
380 our_pk=escape_sqlite(fk_to_us["other_column"]),
381 our_column=escape_sqlite(fk_to_us["column"]),
382 other_column=escape_sqlite(other_column),
383 param=param,
384 )
385 )
386 params[param] = value
387 extra_human_descriptions.append(
388 '{}.{} = "{}"'.format(through_table, other_column, value)
389 )
391 # _search support:
392 fts_table = special_args.get("_fts_table")
393 fts_table = fts_table or table_metadata.get("fts_table")
394 fts_table = fts_table or await db.fts_table(table)
395 fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid"))
396 search_args = dict(
397 pair for pair in special_args.items() if pair[0].startswith("_search")
398 )
399 search = ""
400 search_mode_raw = special_args.get("_searchmode") == "raw"
401 if fts_table and search_args:
402 if "_search" in search_args:
403 # Simple ?_search=xxx
404 search = search_args["_search"]
405 where_clauses.append(
406 "{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format(
407 fts_table=escape_sqlite(fts_table),
408 fts_pk=escape_sqlite(fts_pk),
409 match_clause=":search"
410 if search_mode_raw
411 else "escape_fts(:search)",
412 )
413 )
414 extra_human_descriptions.append('search matches "{}"'.format(search))
415 params["search"] = search
416 else:
417 # More complex: search against specific columns
418 for i, (key, search_text) in enumerate(search_args.items()):
419 search_col = key.split("_search_", 1)[1]
420 if search_col not in await db.table_columns(fts_table):
421 raise DatasetteError("Cannot search by that column", status=400)
423 where_clauses.append(
424 "rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format(
425 fts_table=escape_sqlite(fts_table),
426 search_col=escape_sqlite(search_col),
427 match_clause=":search_{}".format(i)
428 if search_mode_raw
429 else "escape_fts(:search_{})".format(i),
430 )
431 )
432 extra_human_descriptions.append(
433 'search column "{}" matches "{}"'.format(
434 search_col, search_text
435 )
436 )
437 params["search_{}".format(i)] = search_text
439 sortable_columns = set()
441 sortable_columns = await self.sortable_columns_for_table(
442 database, table, use_rowid
443 )
445 # Allow for custom sort order
446 sort = special_args.get("_sort")
447 sort_desc = special_args.get("_sort_desc")
449 if not sort and not sort_desc:
450 sort = table_metadata.get("sort")
451 sort_desc = table_metadata.get("sort_desc")
453 if sort and sort_desc:
454 raise DatasetteError("Cannot use _sort and _sort_desc at the same time")
456 if sort:
457 if sort not in sortable_columns:
458 raise DatasetteError("Cannot sort table by {}".format(sort))
460 order_by = escape_sqlite(sort)
462 if sort_desc:
463 if sort_desc not in sortable_columns:
464 raise DatasetteError("Cannot sort table by {}".format(sort_desc))
466 order_by = "{} desc".format(escape_sqlite(sort_desc))
468 from_sql = "from {table_name} {where}".format(
469 table_name=escape_sqlite(table),
470 where=("where {} ".format(" and ".join(where_clauses)))
471 if where_clauses
472 else "",
473 )
474 # Copy of params so we can mutate them later:
475 from_sql_params = dict(**params)
477 count_sql = "select count(*) {}".format(from_sql)
479 _next = _next or special_args.get("_next")
480 offset = ""
481 if _next:
482 if is_view:
483 # _next is an offset
484 offset = " offset {}".format(int(_next))
485 else:
486 components = urlsafe_components(_next)
487 # If a sort order is applied, the first of these is the sort value
488 if sort or sort_desc:
489 sort_value = components[0]
490 # Special case for if non-urlencoded first token was $null
491 if _next.split(",")[0] == "$null":
492 sort_value = None
493 components = components[1:]
495 # Figure out the SQL for next-based-on-primary-key first
496 next_by_pk_clauses = []
497 if use_rowid:
498 next_by_pk_clauses.append("rowid > :p{}".format(len(params)))
499 params["p{}".format(len(params))] = components[0]
500 else:
501 # Apply the tie-breaker based on primary keys
502 if len(components) == len(pks):
503 param_len = len(params)
504 next_by_pk_clauses.append(
505 compound_keys_after_sql(pks, param_len)
506 )
507 for i, pk_value in enumerate(components):
508 params["p{}".format(param_len + i)] = pk_value
510 # Now add the sort SQL, which may incorporate next_by_pk_clauses
511 if sort or sort_desc:
512 if sort_value is None:
513 if sort_desc:
514 # Just items where column is null ordered by pk
515 where_clauses.append(
516 "({column} is null and {next_clauses})".format(
517 column=escape_sqlite(sort_desc),
518 next_clauses=" and ".join(next_by_pk_clauses),
519 )
520 )
521 else:
522 where_clauses.append(
523 "({column} is not null or ({column} is null and {next_clauses}))".format(
524 column=escape_sqlite(sort),
525 next_clauses=" and ".join(next_by_pk_clauses),
526 )
527 )
528 else:
529 where_clauses.append(
530 "({column} {op} :p{p}{extra_desc_only} or ({column} = :p{p} and {next_clauses}))".format(
531 column=escape_sqlite(sort or sort_desc),
532 op=">" if sort else "<",
533 p=len(params),
534 extra_desc_only=""
535 if sort
536 else " or {column2} is null".format(
537 column2=escape_sqlite(sort or sort_desc)
538 ),
539 next_clauses=" and ".join(next_by_pk_clauses),
540 )
541 )
542 params["p{}".format(len(params))] = sort_value
543 order_by = "{}, {}".format(order_by, order_by_pks)
544 else:
545 where_clauses.extend(next_by_pk_clauses)
547 where_clause = ""
548 if where_clauses:
549 where_clause = "where {} ".format(" and ".join(where_clauses))
551 if order_by:
552 order_by = "order by {} ".format(order_by)
554 extra_args = {}
555 # Handle ?_size=500
556 page_size = _size or request.args.get("_size") or table_metadata.get("size")
557 if page_size:
558 if page_size == "max":
559 page_size = self.ds.max_returned_rows
560 try:
561 page_size = int(page_size)
562 if page_size < 0:
563 raise ValueError
565 except ValueError:
566 raise DatasetteError("_size must be a positive integer", status=400)
568 if page_size > self.ds.max_returned_rows:
569 raise DatasetteError(
570 "_size must be <= {}".format(self.ds.max_returned_rows), status=400
571 )
573 extra_args["page_size"] = page_size
574 else:
575 page_size = self.ds.page_size
577 sql_no_limit = "select {select} from {table_name} {where}{order_by}".format(
578 select=select,
579 table_name=escape_sqlite(table),
580 where=where_clause,
581 order_by=order_by,
582 )
583 sql = "{sql_no_limit} limit {limit}{offset}".format(
584 sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset
585 )
587 if request.args.get("_timelimit"):
588 extra_args["custom_time_limit"] = int(request.args.get("_timelimit"))
590 results = await db.execute(sql, params, truncate=True, **extra_args)
592 # Number of filtered rows in whole set:
593 filtered_table_rows_count = None
594 if (
595 not db.is_mutable
596 and self.ds.inspect_data
597 and count_sql == "select count(*) from {} ".format(table)
598 ):
599 try:
600 filtered_table_rows_count = self.ds.inspect_data[database]["tables"][
601 table
602 ]["count"]
603 except KeyError:
604 pass
606 if count_sql and filtered_table_rows_count is None:
607 try:
608 count_rows = list(await db.execute(count_sql, from_sql_params))
609 filtered_table_rows_count = count_rows[0][0]
610 except QueryInterrupted:
611 pass
613 # facets support
614 if not self.ds.config("allow_facet") and any(
615 arg.startswith("_facet") for arg in request.args
616 ):
617 raise DatasetteError("_facet= is not allowed", status=400)
619 # pylint: disable=no-member
620 facet_classes = list(
621 itertools.chain.from_iterable(pm.hook.register_facet_classes())
622 )
623 facet_results = {}
624 facets_timed_out = []
625 facet_instances = []
626 for klass in facet_classes:
627 facet_instances.append(
628 klass(
629 self.ds,
630 request,
631 database,
632 sql=sql_no_limit,
633 params=params,
634 table=table,
635 metadata=table_metadata,
636 row_count=filtered_table_rows_count,
637 )
638 )
640 for facet in facet_instances:
641 (
642 instance_facet_results,
643 instance_facets_timed_out,
644 ) = await facet.facet_results()
645 facet_results.update(instance_facet_results)
646 facets_timed_out.extend(instance_facets_timed_out)
648 # Figure out columns and rows for the query
649 columns = [r[0] for r in results.description]
650 rows = list(results.rows)
652 # Expand labeled columns if requested
653 expanded_columns = []
654 expandable_columns = await self.expandable_columns(database, table)
655 columns_to_expand = None
656 try:
657 all_labels = value_as_boolean(special_args.get("_labels", ""))
658 except ValueError:
659 all_labels = default_labels
660 # Check for explicit _label=
661 if "_label" in request.args:
662 columns_to_expand = request.args.getlist("_label")
663 if columns_to_expand is None and all_labels:
664 # expand all columns with foreign keys
665 columns_to_expand = [fk["column"] for fk, _ in expandable_columns]
667 if columns_to_expand:
668 expanded_labels = {}
669 for fk, _ in expandable_columns:
670 column = fk["column"]
671 if column not in columns_to_expand:
672 continue
673 expanded_columns.append(column)
674 # Gather the values
675 column_index = columns.index(column)
676 values = [row[column_index] for row in rows]
677 # Expand them
678 expanded_labels.update(
679 await self.ds.expand_foreign_keys(database, table, column, values)
680 )
681 if expanded_labels:
682 # Rewrite the rows
683 new_rows = []
684 for row in rows:
685 new_row = CustomRow(columns)
686 for column in row.keys():
687 value = row[column]
688 if (column, value) in expanded_labels and value is not None:
689 new_row[column] = {
690 "value": value,
691 "label": expanded_labels[(column, value)],
692 }
693 else:
694 new_row[column] = value
695 new_rows.append(new_row)
696 rows = new_rows
698 # Pagination next link
699 next_value = None
700 next_url = None
701 if len(rows) > page_size and page_size > 0:
702 if is_view:
703 next_value = int(_next or 0) + page_size
704 else:
705 next_value = path_from_row_pks(rows[-2], pks, use_rowid)
706 # If there's a sort or sort_desc, add that value as a prefix
707 if (sort or sort_desc) and not is_view:
708 prefix = rows[-2][sort or sort_desc]
709 if isinstance(prefix, dict) and "value" in prefix:
710 prefix = prefix["value"]
711 if prefix is None:
712 prefix = "$null"
713 else:
714 prefix = urllib.parse.quote_plus(str(prefix))
715 next_value = "{},{}".format(prefix, next_value)
716 added_args = {"_next": next_value}
717 if sort:
718 added_args["_sort"] = sort
719 else:
720 added_args["_sort_desc"] = sort_desc
721 else:
722 added_args = {"_next": next_value}
723 next_url = self.ds.absolute_url(
724 request, path_with_replaced_args(request, added_args)
725 )
726 rows = rows[:page_size]
728 # Detect suggested facets
729 suggested_facets = []
731 if (
732 self.ds.config("suggest_facets")
733 and self.ds.config("allow_facet")
734 and not _next
735 ):
736 for facet in facet_instances:
737 suggested_facets.extend(await facet.suggest())
739 # human_description_en combines filters AND search, if provided
740 human_description_en = filters.human_description_en(
741 extra=extra_human_descriptions
742 )
744 if sort or sort_desc:
745 sorted_by = "sorted by {}{}".format(
746 (sort or sort_desc), " descending" if sort_desc else ""
747 )
748 human_description_en = " ".join(
749 [b for b in [human_description_en, sorted_by] if b]
750 )
752 async def extra_template():
753 nonlocal sort
755 display_columns, display_rows = await self.display_columns_and_rows(
756 database,
757 table,
758 results.description,
759 rows,
760 link_column=not is_view,
761 truncate_cells=self.ds.config("truncate_cells_html"),
762 )
763 metadata = (
764 (self.ds.metadata("databases") or {})
765 .get(database, {})
766 .get("tables", {})
767 .get(table, {})
768 )
769 self.ds.update_with_inherited_metadata(metadata)
770 form_hidden_args = []
771 for arg in ("_fts_table", "_fts_pk"):
772 if arg in special_args:
773 form_hidden_args.append((arg, special_args[arg]))
774 if request.args.get("_where"):
775 for where_text in request.args.getlist("_where"):
776 form_hidden_args.append(("_where", where_text))
778 # if no sort specified AND table has a single primary key,
779 # set sort to that so arrow is displayed
780 if not sort and not sort_desc:
781 if 1 == len(pks):
782 sort = pks[0]
783 elif use_rowid:
784 sort = "rowid"
786 return {
787 "supports_search": bool(fts_table),
788 "search": search or "",
789 "use_rowid": use_rowid,
790 "filters": filters,
791 "display_columns": display_columns,
792 "filter_columns": columns,
793 "display_rows": display_rows,
794 "facets_timed_out": facets_timed_out,
795 "sorted_facet_results": sorted(
796 facet_results.values(),
797 key=lambda f: (len(f["results"]), f["name"]),
798 reverse=True,
799 ),
800 "extra_wheres_for_ui": extra_wheres_for_ui,
801 "form_hidden_args": form_hidden_args,
802 "is_sortable": any(c["sortable"] for c in display_columns),
803 "path_with_replaced_args": path_with_replaced_args,
804 "path_with_removed_args": path_with_removed_args,
805 "append_querystring": append_querystring,
806 "request": request,
807 "sort": sort,
808 "sort_desc": sort_desc,
809 "disable_sort": is_view,
810 "custom_table_templates": [
811 "_table-{}-{}.html".format(
812 to_css_class(database), to_css_class(table)
813 ),
814 "_table-table-{}-{}.html".format(
815 to_css_class(database), to_css_class(table)
816 ),
817 "_table.html",
818 ],
819 "metadata": metadata,
820 "view_definition": await db.get_view_definition(table),
821 "table_definition": await db.get_table_definition(table),
822 }
824 return (
825 {
826 "database": database,
827 "table": table,
828 "is_view": is_view,
829 "human_description_en": human_description_en,
830 "rows": rows[:page_size],
831 "truncated": results.truncated,
832 "filtered_table_rows_count": filtered_table_rows_count,
833 "expanded_columns": expanded_columns,
834 "expandable_columns": expandable_columns,
835 "columns": columns,
836 "primary_keys": pks,
837 "units": units,
838 "query": {"sql": sql, "params": params},
839 "facet_results": facet_results,
840 "suggested_facets": suggested_facets,
841 "next": next_value and str(next_value) or None,
842 "next_url": next_url,
843 "private": private,
844 "allow_execute_sql": await self.ds.permission_allowed(
845 request.actor, "execute-sql", database, default=True
846 ),
847 },
848 extra_template,
849 (
850 "table-{}-{}.html".format(to_css_class(database), to_css_class(table)),
851 "table.html",
852 ),
853 )
856class RowView(RowTableShared):
857 name = "row"
859 async def data(self, request, database, hash, table, pk_path, default_labels=False):
860 pk_values = urlsafe_components(pk_path)
861 await self.check_permission(request, "view-instance")
862 await self.check_permission(request, "view-database", database)
863 await self.check_permission(request, "view-table", (database, table))
864 db = self.ds.databases[database]
865 pks = await db.primary_keys(table)
866 use_rowid = not pks
867 select = "*"
868 if use_rowid:
869 select = "rowid, *"
870 pks = ["rowid"]
871 wheres = ['"{}"=:p{}'.format(pk, i) for i, pk in enumerate(pks)]
872 sql = "select {} from {} where {}".format(
873 select, escape_sqlite(table), " AND ".join(wheres)
874 )
875 params = {}
876 for i, pk_value in enumerate(pk_values):
877 params["p{}".format(i)] = pk_value
878 results = await db.execute(sql, params, truncate=True)
879 columns = [r[0] for r in results.description]
880 rows = list(results.rows)
881 if not rows:
882 raise NotFound("Record not found: {}".format(pk_values))
884 async def template_data():
885 display_columns, display_rows = await self.display_columns_and_rows(
886 database,
887 table,
888 results.description,
889 rows,
890 link_column=False,
891 truncate_cells=0,
892 )
893 for column in display_columns:
894 column["sortable"] = False
895 return {
896 "foreign_key_tables": await self.foreign_key_tables(
897 database, table, pk_values
898 ),
899 "display_columns": display_columns,
900 "display_rows": display_rows,
901 "custom_table_templates": [
902 "_table-{}-{}.html".format(
903 to_css_class(database), to_css_class(table)
904 ),
905 "_table-row-{}-{}.html".format(
906 to_css_class(database), to_css_class(table)
907 ),
908 "_table.html",
909 ],
910 "metadata": (self.ds.metadata("databases") or {})
911 .get(database, {})
912 .get("tables", {})
913 .get(table, {}),
914 }
916 data = {
917 "database": database,
918 "table": table,
919 "rows": rows,
920 "columns": columns,
921 "primary_keys": pks,
922 "primary_key_values": pk_values,
923 "units": self.ds.table_metadata(database, table).get("units", {}),
924 }
926 if "foreign_key_tables" in (request.args.get("_extras") or "").split(","):
927 data["foreign_key_tables"] = await self.foreign_key_tables(
928 database, table, pk_values
929 )
931 return (
932 data,
933 template_data,
934 (
935 "row-{}-{}.html".format(to_css_class(database), to_css_class(table)),
936 "row.html",
937 ),
938 )
940 async def foreign_key_tables(self, database, table, pk_values):
941 if len(pk_values) != 1:
942 return []
943 db = self.ds.databases[database]
944 all_foreign_keys = await db.get_all_foreign_keys()
945 foreign_keys = all_foreign_keys[table]["incoming"]
946 if len(foreign_keys) == 0:
947 return []
949 sql = "select " + ", ".join(
950 [
951 "(select count(*) from {table} where {column}=:id)".format(
952 table=escape_sqlite(fk["other_table"]),
953 column=escape_sqlite(fk["other_column"]),
954 )
955 for fk in foreign_keys
956 ]
957 )
958 try:
959 rows = list(await db.execute(sql, {"id": pk_values[0]}))
960 except sqlite3.OperationalError:
961 # Almost certainly hit the timeout
962 return []
964 foreign_table_counts = dict(
965 zip(
966 [(fk["other_table"], fk["other_column"]) for fk in foreign_keys],
967 list(rows[0]),
968 )
969 )
970 foreign_key_tables = []
971 for fk in foreign_keys:
972 count = (
973 foreign_table_counts.get((fk["other_table"], fk["other_column"])) or 0
974 )
975 foreign_key_tables.append({**fk, **{"count": count}})
976 return foreign_key_tables