1 from moz_sql_parser import parse
5 _connector_type = 'pytables'
8 "Open file and return tables info"
11 h5file = tables.open_file(path)
13 for table in filter(lambda node: not(isinstance(node, tables.group.Group)), h5file):
15 if isinstance(table, tables.table.Table):
16 colnames = table.colnames
18 h5tables[table._v_pathname] = {
19 'name': table._v_pathname,
22 'count': int(table.nrows),
26 'foreign_keys': {'incoming': [], 'outgoing': []},
30 return h5tables, views, _connector_type
32 def _parse_sql(sql, params):
34 sql = re.sub('(?i)from \[(.*)]', 'from "\g<1>"', sql)
37 sql = sql.replace(":" + param, param)
42 # Propably it's a PyTables expression
43 for token in ['group by', 'order by', 'limit', '']:
44 res = re.search('(?i)where (.*)' + token, sql)
46 modified_sql = re.sub('(?i)where (.*)(' + token + ')', '\g<2>', sql)
47 parsed = parse(modified_sql)
48 parsed['where'] = res.group(1).strip()
51 # Always a list of fields
52 if type(parsed['select']) is not list:
53 parsed['select'] = [parsed['select']]
69 def __init__(self, path):
71 self.h5file = tables.open_file(path)
73 def execute(self, sql, params=None, truncate=False, page_size=None, max_returned_rows=None):
80 parsed_sql = _parse_sql(sql, params)
82 if parsed_sql['from'] == 'sqlite_master':
83 rows = self._execute_datasette_query(sql, params)
84 description = (('value',))
85 return rows, truncated, description
87 table = self.h5file.get_node(parsed_sql['from'])
89 fields = parsed_sql['select']
91 if type(table) is tables.table.Table:
92 colnames = table.colnames
98 # Use 'where' statement or get all the rows
99 def _cast_param(field, pname):
100 # Cast value to the column type
101 coltype = table.dtype.name
102 if type(table) is tables.table.Table:
103 coltype = table.coltypes[field]
105 if coltype == 'string':
107 elif coltype.startswith('int'):
109 elif coltype.startswith('float'):
112 params[pname] = fcast(params[pname])
114 def _translate_where(where):
115 # Translate SQL to PyTables expression
118 operator = list(where)[0]
120 if operator in ['and', 'or']:
121 subexpr = [_translate_where(e) for e in where[operator]]
122 subexpr = filter(lambda e: e, subexpr)
123 subexpr = ["({})".format(e) for e in subexpr]
124 expr = " {} ".format(_operators[operator]).join(subexpr)
125 elif operator == 'exists':
127 elif where == {'eq': ['rowid', 'p0']}:
128 start = int(params['p0'])
130 elif where == {'gt': ['rowid', 'p0']}:
131 start = int(params['p0']) + 1
133 left, right = where[operator]
135 _cast_param(right, left)
136 elif right in params:
137 _cast_param(left, right)
139 expr = "{left} {operator} {right}".format(left=left, operator=_operators.get(operator, operator), right=right)
143 if 'where' in parsed_sql:
144 if type(parsed_sql['where']) is dict:
145 query = _translate_where(parsed_sql['where'])
147 query = parsed_sql['where']
151 if 'orderby' in parsed_sql:
152 orderby = parsed_sql['orderby']
153 if type(orderby) is list:
155 orderby = orderby['value']
156 if orderby == 'rowid':
159 # Limit number of rows
161 if 'limit' in parsed_sql:
162 limit = int(parsed_sql['limit'])
165 if page_size and max_returned_rows and truncate:
166 if max_returned_rows == page_size:
167 max_returned_rows += 1
171 table_rows = table.where(query, params, start, end)
173 table_rows = table.itersorted(orderby, start=start, stop=end)
175 table_rows = table.iterrows(start, end)
178 def normalize_field_value(value):
179 if type(value) is bytes:
180 return value.decode('utf-8')
181 elif not type(value) in (int, float, complex):
186 def make_get_rowid():
187 if type(table) is tables.table.Table:
198 def make_get_row_value():
199 if type(table) is tables.table.Table:
200 def get_row_value(row, field):
203 def get_row_value(row, field):
207 if len(fields) == 1 and type(fields[0]['value']) is dict and \
208 fields[0]['value'].get('count') == '*':
209 rows.append(Row({'count(*)': int(table.nrows)}))
211 get_rowid = make_get_rowid()
212 get_row_value = make_get_row_value()
214 for table_row in table_rows:
216 if limit and count > limit:
218 if truncate and max_returned_rows and count > max_returned_rows:
223 field_name = field['value']
224 if type(field_name) is dict and 'distinct' in field_name:
225 field_name = field_name['distinct']
226 if field_name == 'rowid':
227 row['rowid'] = get_rowid(table_row)
228 elif field_name == '*':
230 row[col] = normalize_field_value(get_row_value(table_row, col))
232 row[field_name] = normalize_field_value(get_row_value(table_row, field_name))
235 # Prepare query description
236 for field in [f['value'] for f in fields]:
239 description.append((col,))
241 description.append((field,))
244 return rows, truncated, tuple(description)
246 def _execute_datasette_query(self, sql, params):
247 "Datasette special queries for getting tables info"
248 if sql == 'select sql from sqlite_master where name = :n and type=:t':
249 if params['t'] == 'view':
253 table = self.h5file.get_node(params['n'])
255 if type(table) is tables.table.Table:
256 colnames = table.colnames
258 row['sql'] = 'CREATE TABLE {} ({})'.format(params['n'], ", ".join(colnames))
263 raise Exception("SQLite queries cannot be executed with this connector: %s, %s" % (sql, params))
267 def __init__(self, values=None):
272 self.__setitem__(idx, values[idx])
274 def __setitem__(self, idx, value):
276 if idx in self.labels:
277 self.values[self.labels.index(idx)] = value
279 self.labels.append(idx)
280 self.values.append(value)
282 self.values[idx] = value
284 def __getitem__(self, idx):
286 return self.values[self.labels.index(idx)]
288 return self.values[idx]
291 return self.values.__iter__()