3 from datasette.app import connections
4 from datasette.cli import cli
5 from datasette.inspect import inspect_hash
6 from datasette.utils import Results
7 from pathlib import Path
11 # Monkey patching for original Datasette
13 " Inspect the database and return a dictionary of table metadata "
20 for filename in files:
21 self.files = (filename,)
25 raise Exception("Multiple files with the same stem %s" % name)
27 _inspect[name] = self.original_inspect()[name]
28 except sqlite3.DatabaseError:
29 tables, views, dbtype = connectors.inspect(path)
31 "hash": inspect_hash(path),
39 self._inspect = _inspect
42 datasette.app.Datasette.original_inspect = datasette.app.Datasette.inspect
43 datasette.app.Datasette.inspect = inspect
46 async def execute(self, db_name, sql, params=None, truncate=False, custom_time_limit=None, page_size=None):
47 """Executes sql against db_name in a thread"""
48 page_size = page_size or self.page_size
50 def is_sqlite3_conn():
51 conn = getattr(connections, db_name, None)
53 info = self.inspect()[db_name]
54 return info.get('dbtype', 'sqlite3') == 'sqlite3'
56 return isinstance(conn, sqlite3.Connection)
58 def sql_operation_in_thread():
59 conn = getattr(connections, db_name, None)
61 info = self.inspect()[db_name]
62 conn = connectors.connect(info['file'], info['dbtype'])
63 setattr(connections, db_name, conn)
65 rows, truncated, description = conn.execute(
70 max_returned_rows=self.max_returned_rows,
72 return Results(rows, truncated, description)
75 return await self.original_execute(db_name, sql, params=params, truncate=truncate, custom_time_limit=custom_time_limit, page_size=page_size)
77 return await asyncio.get_event_loop().run_in_executor(
78 self.executor, sql_operation_in_thread
81 datasette.app.Datasette.original_execute = datasette.app.Datasette.execute
82 datasette.app.Datasette.execute = execute
85 # Read external database connectors
86 from . import connectors
87 connectors.load_connectors()