2 import datasette_connectors as dc
3 from .utils import parse_sql
6 class PyTablesConnection(dc.Connection):
7 def __init__(self, path, connector):
8 super().__init__(path, connector)
9 self.h5file = tables.open_file(path)
12 class PyTablesConnector(dc.Connector):
13 connector_type = 'pytables'
14 connection_class = PyTablesConnection
29 def _serialize_table_name(self, table_name):
30 return table_name.replace('/', '%')
32 def _deserialize_table_name(self, table_name):
33 return table_name.replace('%', '/')
35 def table_names(self):
37 self._serialize_table_name(node._v_pathname)
38 for node in self.conn.h5file
39 if not(isinstance(node, tables.group.Group))
42 def table_count(self, table_name):
43 table = self.conn.h5file.get_node(self._deserialize_table_name(table_name))
44 return int(table.nrows)
46 def table_info(self, table_name):
47 table = self.conn.h5file.get_node(self._deserialize_table_name(table_name))
51 'type': table.dtype.name,
54 if isinstance(table, tables.table.Table):
58 'type': table.coltypes[colname],
60 for colname in table.colnames
66 'name': column['name'],
67 'type': column['type'],
69 'default_value': None,
72 for cid, column in enumerate(columns)
75 def hidden_table_names(self):
78 def detect_spatialite(self):
84 def detect_fts(self, table_name):
87 def foreign_keys(self, table_name):
90 def table_exists(self, table_name):
92 self.conn.h5file.get_node(self._deserialize_table_name(table_name))
97 def table_definition(self, table_type, table_name):
98 table_name = self._deserialize_table_name(table_name)
99 table = self.conn.h5file.get_node(table_name)
101 if isinstance(table, tables.table.Table):
102 colnames = table.colnames
104 return 'CREATE TABLE {} ({})'.format(
109 def indices_definition(self, table_name):
117 custom_time_limit=None,
125 # Some Datasette queries uses glob operand, not supported by Pytables
127 return results, truncated, description
129 parsed_sql = parse_sql(sql, params)
131 while isinstance(parsed_sql['from'], dict):
132 # Pytables does not support subqueries
133 parsed_sql['from'] = parsed_sql['from']['value']['from']
135 table = self.conn.h5file.get_node(self._deserialize_table_name(parsed_sql['from']))
137 fields = parsed_sql['select']
139 if type(table) is tables.table.Table:
140 colnames = table.colnames
146 def _get_field_type(field):
147 coltype = table.dtype.name
148 if type(table) is tables.table.Table:
149 coltype = table.coltypes[field]
152 # Use 'where' statement or get all the rows
153 def _cast_param(field, pname):
154 # Cast value to the column type
155 coltype = _get_field_type(field)
157 if coltype == 'string':
159 elif coltype.startswith('int'):
161 elif coltype.startswith('float'):
164 params[pname] = fcast(params[pname])
166 def _translate_where(where):
167 # Translate SQL to PyTables expression
170 operator = list(where)[0]
172 if operator in ['and', 'or']:
173 subexpr = [_translate_where(e) for e in where[operator]]
174 subexpr = filter(lambda e: e, subexpr)
175 subexpr = ["({})".format(e) for e in subexpr]
176 expr = " {} ".format(self.operators[operator]).join(subexpr)
177 elif operator == 'exists':
179 elif where == {'eq': ['rowid', 'p0']}:
180 start = int(params['p0'])
182 elif where == {'gt': ['rowid', 'p0']}:
183 start = int(params['p0']) + 1
185 left, right = where[operator]
187 if isinstance(left, dict):
188 left = "(" + _translate_where(left) + ")"
190 _cast_param(right, left)
192 if isinstance(right, dict):
193 right = "(" + _translate_where(right) + ")"
194 elif right in params:
195 _cast_param(left, right)
197 expr = "{left} {operator} {right}".format(
199 operator=self.operators.get(operator, operator),
205 if 'where' in parsed_sql:
206 if type(parsed_sql['where']) is dict:
207 query = _translate_where(parsed_sql['where'])
209 query = parsed_sql['where']
213 if 'orderby' in parsed_sql:
214 orderby = parsed_sql['orderby']
215 if type(orderby) is list:
217 orderby = orderby['value']
218 if orderby == 'rowid':
221 # Limit number of rows
223 if 'limit' in parsed_sql:
224 limit = int(parsed_sql['limit'])
228 if 'offset' in parsed_sql:
229 offset = int(parsed_sql['offset'])
232 if page_size and max_returned_rows and truncate:
233 if max_returned_rows == page_size:
234 max_returned_rows += 1
238 table_rows = table.where(query, params, start, end)
240 table_rows = table.itersorted(orderby, start=start, stop=end)
242 table_rows = table.iterrows(start, end)
245 def normalize_field_value(value):
246 if type(value) is bytes:
247 return value.decode('utf-8')
248 elif not type(value) in (int, float, complex):
253 def make_get_rowid():
254 if type(table) is tables.table.Table:
265 def make_get_row_value():
266 if type(table) is tables.table.Table:
267 def get_row_value(row, field):
270 def get_row_value(row, field):
275 get_rowid = make_get_rowid()
276 get_row_value = make_get_row_value()
278 table_rows = table_rows[offset:]
280 for table_row in table_rows:
282 if limit is not None and count > limit:
284 if truncate and max_returned_rows and count > max_returned_rows:
290 if isinstance(field, dict):
291 field_name = field['value']
292 if isinstance(field_name, dict) and 'distinct' in field_name:
293 field_name = field_name['distinct']
294 if field_name == 'rowid':
295 row['rowid'] = get_rowid(table_row)
296 elif field_name == '*':
298 row[col] = normalize_field_value(get_row_value(table_row, col))
299 elif isinstance(field_name, dict):
300 if field_name.get('count') == '*':
301 row['count(*)'] = int(table.nrows)
302 elif field_name.get('json_type'):
303 field_name = field_name.get('json_type')
304 row['json_type(' + field_name + ')'] = _get_field_type(field_name)
306 raise Exception("Function not recognized")
308 row[field_name] = normalize_field_value(get_row_value(table_row, field_name))
311 # Prepare query description
312 for field in [f['value'] if isinstance(f, dict) else f for f in fields]:
315 description += ((col,),)
317 description += ((field,),)
319 return results, truncated, description