3 from datasette.app import connections
4 from datasette.inspect import inspect_hash
5 from datasette.utils import Results
6 from pathlib import Path
9 from . import connectors
12 def patch_datasette():
14 Monkey patching for original Datasette
18 " Inspect the database and return a dictionary of table metadata "
25 for filename in files:
26 self.files = (filename,)
30 raise Exception("Multiple files with the same stem %s" % name)
32 _inspect[name] = self.original_inspect()[name]
33 except sqlite3.DatabaseError:
34 tables, views, dbtype = connectors.inspect(path)
36 "hash": inspect_hash(path),
44 self._inspect = _inspect
47 datasette.app.Datasette.original_inspect = datasette.app.Datasette.inspect
48 datasette.app.Datasette.inspect = inspect
51 async def execute(self, db_name, sql, params=None, truncate=False, custom_time_limit=None, page_size=None):
52 """Executes sql against db_name in a thread"""
53 page_size = page_size or self.page_size
55 def is_sqlite3_conn():
56 conn = getattr(connections, db_name, None)
58 info = self.inspect()[db_name]
59 return info.get('dbtype', 'sqlite3') == 'sqlite3'
61 return isinstance(conn, sqlite3.Connection)
63 def sql_operation_in_thread():
64 conn = getattr(connections, db_name, None)
66 info = self.inspect()[db_name]
67 conn = connectors.connect(info['file'], info['dbtype'])
68 setattr(connections, db_name, conn)
70 rows, truncated, description = conn.execute(
75 max_returned_rows=self.max_returned_rows,
77 return Results(rows, truncated, description)
80 return await self.original_execute(db_name, sql, params=params, truncate=truncate, custom_time_limit=custom_time_limit, page_size=page_size)
82 return await asyncio.get_event_loop().run_in_executor(
83 self.executor, sql_operation_in_thread
86 datasette.app.Datasette.original_execute = datasette.app.Datasette.execute
87 datasette.app.Datasette.execute = execute