]> git.jsancho.org Git - datasette-pytables.git/blobdiff - datasette_pytables/__init__.py
Use dataset-connectors 2.0 api (wip)
[datasette-pytables.git] / datasette_pytables / __init__.py
index d9530ae9a7edb969a64628c018d2b2c83d29c051..396cd0dbb93a09dbcd41da70b1d505e6a72d822c 100644 (file)
@@ -1,9 +1,58 @@
-from collections import OrderedDict
 from moz_sql_parser import parse
 import re
+
 import tables
+import datasette_connectors as dc
+
+
+class PyTablesConnection(dc.Connection):
+    def __init__(self, path, connector):
+        super().__init__(path, connector)
+        self.h5file = tables.open_file(path)
+
+
+class PyTablesConnector(dc.Connector):
+    connector_type = 'pytables'
+    connection_class = PyTablesConnection
+
+    def table_names(self):
+        return [
+            node._v_pathname
+            for node in self.conn.h5file
+            if not(isinstance(node, tables.group.Group))
+        ]
+
+    def table_count(self, table_name):
+        table = self.conn.h5file.get_node(table_name)
+        return int(table.nrows)
+
+    def table_info(self, table_name):
+        table = self.conn.h5file.get_node(table_name)
+        colnames = ['value']
+        if isinstance(table, tables.table.Table):
+            colnames = table.colnames
+
+        return [
+            {
+                'idx': idx,
+                'name': colname,
+                'primary_key': False,
+            }
+            for idx, colname in enumerate(colnames)
+        ]
+
+    def hidden_table_names(self):
+        return []
+
+    def detect_spatialite(self):
+        return False
+
+    def view_names(self):
+        return []
+
+    def detect_fts(self, table_name):
+        return False
 
-_connector_type = 'pytables'
 
 def inspect(path):
     "Open file and return tables info"
@@ -12,7 +61,7 @@ def inspect(path):
     h5file = tables.open_file(path)
 
     for table in filter(lambda node: not(isinstance(node, tables.group.Group)), h5file):
-        colnames = []
+        colnames = ['value']
         if isinstance(table, tables.table.Table):
             colnames = table.colnames
 
@@ -32,7 +81,7 @@ def inspect(path):
 
 def _parse_sql(sql, params):
     # Table name
-    sql = re.sub('(?i)from \[(.*)]', 'from "\g<1>"', sql)
+    sql = re.sub(r'(?i)from \[(.*)]', r'from "\g<1>"', sql)
     # Params
     for param in params:
         sql = sql.replace(":" + param, param)
@@ -44,7 +93,7 @@ def _parse_sql(sql, params):
         for token in ['group by', 'order by', 'limit', '']:
             res = re.search('(?i)where (.*)' + token, sql)
             if res:
-                modified_sql = re.sub('(?i)where (.*)(' + token + ')', '\g<2>', sql)
+                modified_sql = re.sub('(?i)where (.*)(' + token + ')', r'\g<2>', sql)
                 parsed = parse(modified_sql)
                 parsed['where'] = res.group(1).strip()
                 break
@@ -71,7 +120,7 @@ class Connection:
         self.path = path
         self.h5file = tables.open_file(path)
 
-    def execute(self, sql, params=None, truncate=False, page_size=None):
+    def execute(self, sql, params=None, truncate=False, page_size=None, max_returned_rows=None):
         if params is None:
             params = {}
         rows = []
@@ -81,11 +130,16 @@ class Connection:
         parsed_sql = _parse_sql(sql, params)
 
         if parsed_sql['from'] == 'sqlite_master':
-            return self._execute_datasette_query(sql, params)
+            rows = self._execute_datasette_query(sql, params)
+            description = (('value',),)
+            return rows, truncated, description
 
         table = self.h5file.get_node(parsed_sql['from'])
         table_rows = []
         fields = parsed_sql['select']
+        colnames = ['value']
+        if type(table) is tables.table.Table:
+            colnames = table.colnames
 
         query = ''
         start = 0
@@ -94,7 +148,9 @@ class Connection:
         # Use 'where' statement or get all the rows
         def _cast_param(field, pname):
             # Cast value to the column type
-            coltype = table.coltypes[field]
+            coltype = table.dtype.name
+            if type(table) is tables.table.Table:
+                coltype = table.coltypes[field]
             fcast = None
             if coltype == 'string':
                 fcast = str
@@ -107,6 +163,7 @@ class Connection:
 
         def _translate_where(where):
             # Translate SQL to PyTables expression
+            nonlocal start, end
             expr = ''
             operator = list(where)[0]
 
@@ -118,9 +175,10 @@ class Connection:
             elif operator == 'exists':
                 pass
             elif where == {'eq': ['rowid', 'p0']}:
-                nonlocal start, end
                 start = int(params['p0'])
                 end = start + 1
+            elif where == {'gt': ['rowid', 'p0']}:
+                start = int(params['p0']) + 1
             else:
                 left, right = where[operator]
                 if left in params:
@@ -138,99 +196,122 @@ class Connection:
             else:
                 query = parsed_sql['where']
 
+        # Sort by column
+        orderby = ''
+        if 'orderby' in parsed_sql:
+            orderby = parsed_sql['orderby']
+            if type(orderby) is list:
+                orderby = orderby[0]
+            orderby = orderby['value']
+            if orderby == 'rowid':
+                orderby = ''
+
         # Limit number of rows
+        limit = None
         if 'limit' in parsed_sql:
-            max_rows = int(parsed_sql['limit'])
-            if end - start > max_rows:
-                end = start + max_rows
+            limit = int(parsed_sql['limit'])
 
         # Truncate if needed
-        if page_size and truncate:
-            if end - start > page_size:
-                end = start + page_size
-                truncated = True
+        if page_size and max_returned_rows and truncate:
+            if max_returned_rows == page_size:
+                max_returned_rows += 1
 
         # Execute query
         if query:
             table_rows = table.where(query, params, start, end)
+        elif orderby:
+            table_rows = table.itersorted(orderby, start=start, stop=end)
         else:
             table_rows = table.iterrows(start, end)
 
         # Prepare rows
-        if len(fields) == 1 and type(fields[0]['value']) is dict and \
-           fields[0]['value'].get('count') == '*':
-            rows.append(Row({'count(*)': int(table.nrows)}))
-        else:
+        def normalize_field_value(value):
+            if type(value) is bytes:
+                return value.decode('utf-8')
+            elif not type(value) in (int, float, complex):
+                return str(value)
+            else:
+                return value
+
+        def make_get_rowid():
             if type(table) is tables.table.Table:
-                for table_row in table_rows:
-                    row = Row()
-                    for field in fields:
-                        field_name = field['value']
-                        if type(field_name) is dict and 'distinct' in field_name:
-                            field_name = field_name['distinct']
-                        if field_name == 'rowid':
-                            row['rowid'] = int(table_row.nrow)
-                        elif field_name == '*':
-                            for col in table.colnames:
-                                value = table_row[col]
-                                if type(value) is bytes:
-                                    value = value.decode('utf-8')
-                                row[col] = value
-                        else:
-                            row[field_name] = table_row[field_name]
-                    rows.append(row)
+                def get_rowid(row):
+                    return int(row.nrow)
             else:
-                # Any kind of array
                 rowid = start - 1
-                for table_row in table_rows:
-                    row = Row()
+                def get_rowid(row):
+                    nonlocal rowid
                     rowid += 1
-                    for field in fields:
-                        field_name = field['value']
-                        if type(field_name) is dict and 'distinct' in field_name:
-                            field_name = field_name['distinct']
-                        if field_name == 'rowid':
-                            row['rowid'] = rowid
-                        else:
-                            value = table_row
-                            if type(value) is bytes:
-                                value = value.decode('utf-8')
-                            row['value'] = value
-                    rows.append(row)
+                    return rowid
+            return get_rowid
+
+        def make_get_row_value():
+            if type(table) is tables.table.Table:
+                def get_row_value(row, field):
+                    return row[field]
+            else:
+                def get_row_value(row, field):
+                    return row
+            return get_row_value
+
+        if len(fields) == 1 and type(fields[0]['value']) is dict and \
+           fields[0]['value'].get('count') == '*':
+            rows.append(Row({'count(*)': int(table.nrows)}))
+        else:
+            get_rowid = make_get_rowid()
+            get_row_value = make_get_row_value()
+            count = 0
+            for table_row in table_rows:
+                count += 1
+                if limit and count > limit:
+                    break
+                if truncate and max_returned_rows and count > max_returned_rows:
+                    truncated = True
+                    break
+                row = Row()
+                for field in fields:
+                    field_name = field['value']
+                    if type(field_name) is dict and 'distinct' in field_name:
+                        field_name = field_name['distinct']
+                    if field_name == 'rowid':
+                        row['rowid'] = get_rowid(table_row)
+                    elif field_name == '*':
+                        for col in colnames:
+                            row[col] = normalize_field_value(get_row_value(table_row, col))
+                    else:
+                        row[field_name] = normalize_field_value(get_row_value(table_row, field_name))
+                rows.append(row)
 
         # Prepare query description
         for field in [f['value'] for f in fields]:
             if field == '*':
-                if type(table) is tables.table.Table:
-                    for col in table.colnames:
-                        description.append((col,))
-                else:
-                    description.append(('value',))
+                for col in colnames:
+                    description.append((col,))
             else:
                 description.append((field,))
 
         # Return the rows
-        if truncate:
-            return rows, truncated, tuple(description)
-        else:
-            return rows
+        return rows, truncated, tuple(description)
 
     def _execute_datasette_query(self, sql, params):
         "Datasette special queries for getting tables info"
-        if sql == "SELECT count(*) from sqlite_master WHERE type = 'view' and name=:n":
-            row = Row()
-            row['count(*)'] = 0
-            return [row]
-        elif sql == 'select sql from sqlite_master where name = :n and type="table"':
-            try:
-                table = self.h5file.get_node(params['n'])
-                row = Row()
-                row['sql'] = 'CREATE TABLE {} ()'.format(params['n'])
-                return [row]
-            except:
+        if sql == 'select sql from sqlite_master where name = :n and type=:t':
+            if params['t'] == 'view':
                 return []
+            else:
+                try:
+                    table = self.h5file.get_node(params['n'])
+                    colnames = ['value']
+                    if type(table) is tables.table.Table:
+                        colnames = table.colnames
+                    row = Row()
+                    row['sql'] = 'CREATE TABLE {} ({})'.format(params['n'], ", ".join(colnames))
+                    return [row]
+                except:
+                    return []
         else:
-            raise Exception("SQLite queries cannot be executed with this connector")
+            raise Exception("SQLite queries cannot be executed with this connector: %s, %s" % (sql, params))
+
 
 class Row(list):
     def __init__(self, values=None):