X-Git-Url: https://git.jsancho.org/?p=datasette-pytables.git;a=blobdiff_plain;f=datasette_pytables%2F__init__.py;h=2e6920eceab3f96c3f28e17a566962023534fb44;hp=80349a6f918cb5eb58728785bca258a69a42a9ce;hb=ca01a7a7a6320ba50a2374fab68d69dc5aacec4e;hpb=7574220c89f66e62f93aed8fbe12e45e0db27f46 diff --git a/datasette_pytables/__init__.py b/datasette_pytables/__init__.py index 80349a6..2e6920e 100644 --- a/datasette_pytables/__init__.py +++ b/datasette_pytables/__init__.py @@ -11,7 +11,7 @@ def inspect(path): h5file = tables.open_file(path) for table in filter(lambda node: not(isinstance(node, tables.group.Group)), h5file): - colnames = [] + colnames = ['value'] if isinstance(table, tables.table.Table): colnames = table.colnames @@ -87,6 +87,9 @@ class Connection: table = self.h5file.get_node(parsed_sql['from']) table_rows = [] fields = parsed_sql['select'] + colnames = ['value'] + if type(table) is tables.table.Table: + colnames = table.colnames query = '' start = 0 @@ -95,10 +98,9 @@ class Connection: # Use 'where' statement or get all the rows def _cast_param(field, pname): # Cast value to the column type + coltype = table.dtype.name if type(table) is tables.table.Table: coltype = table.coltypes[field] - else: - coltype = table.dtype.name fcast = None if coltype == 'string': fcast = str @@ -144,6 +146,16 @@ class Connection: else: query = parsed_sql['where'] + # Sort by column + orderby = '' + if 'orderby' in parsed_sql: + orderby = parsed_sql['orderby'] + if type(orderby) is list: + orderby = orderby[0] + orderby = orderby['value'] + if orderby == 'rowid': + orderby = '' + # Limit number of rows limit = None if 'limit' in parsed_sql: @@ -157,75 +169,74 @@ class Connection: # Execute query if query: table_rows = table.where(query, params, start, end) + elif orderby: + table_rows = table.itersorted(orderby, start=start, stop=end) else: table_rows = table.iterrows(start, end) # Prepare rows - if len(fields) == 1 and type(fields[0]['value']) is dict and \ - fields[0]['value'].get('count') == '*': - rows.append(Row({'count(*)': int(table.nrows)})) - else: + def normalize_field_value(value): + if type(value) is bytes: + return value.decode('utf-8') + elif not type(value) in (int, float, complex): + return str(value) + else: + return value + + def make_get_rowid(): if type(table) is tables.table.Table: - count = 0 - for table_row in table_rows: - count += 1 - if limit and count > limit: - break - if truncate and max_returned_rows and count > max_returned_rows: - truncated = True - break - row = Row() - for field in fields: - field_name = field['value'] - if type(field_name) is dict and 'distinct' in field_name: - field_name = field_name['distinct'] - if field_name == 'rowid': - row['rowid'] = int(table_row.nrow) - elif field_name == '*': - for col in table.colnames: - value = table_row[col] - if type(value) is bytes: - value = value.decode('utf-8') - row[col] = value - else: - row[field_name] = table_row[field_name] - rows.append(row) + def get_rowid(row): + return int(row.nrow) else: - # Any kind of array rowid = start - 1 - count = 0 - for table_row in table_rows: - count += 1 - if limit and count > limit: - break - if truncate and max_returned_rows and count > max_returned_rows: - truncated = True - break - row = Row() + def get_rowid(row): + nonlocal rowid rowid += 1 - for field in fields: - field_name = field['value'] - if type(field_name) is dict and 'distinct' in field_name: - field_name = field_name['distinct'] - if field_name == 'rowid': - row['rowid'] = rowid - else: - value = table_row - if type(value) is bytes: - value = value.decode('utf-8') - elif not type(value) in (int, float, complex): - value = str(value) - row['value'] = value - rows.append(row) + return rowid + return get_rowid + + def make_get_row_value(): + if type(table) is tables.table.Table: + def get_row_value(row, field): + return row[field] + else: + def get_row_value(row, field): + return row + return get_row_value + + if len(fields) == 1 and type(fields[0]['value']) is dict and \ + fields[0]['value'].get('count') == '*': + rows.append(Row({'count(*)': int(table.nrows)})) + else: + get_rowid = make_get_rowid() + get_row_value = make_get_row_value() + count = 0 + for table_row in table_rows: + count += 1 + if limit and count > limit: + break + if truncate and max_returned_rows and count > max_returned_rows: + truncated = True + break + row = Row() + for field in fields: + field_name = field['value'] + if type(field_name) is dict and 'distinct' in field_name: + field_name = field_name['distinct'] + if field_name == 'rowid': + row['rowid'] = get_rowid(table_row) + elif field_name == '*': + for col in colnames: + row[col] = normalize_field_value(get_row_value(table_row, col)) + else: + row[field_name] = normalize_field_value(get_row_value(table_row, field_name)) + rows.append(row) # Prepare query description for field in [f['value'] for f in fields]: if field == '*': - if type(table) is tables.table.Table: - for col in table.colnames: - description.append((col,)) - else: - description.append(('value',)) + for col in colnames: + description.append((col,)) else: description.append((field,)) @@ -241,14 +252,18 @@ class Connection: elif sql == 'select sql from sqlite_master where name = :n and type="table"': try: table = self.h5file.get_node(params['n']) + colnames = ['value'] + if type(table) is tables.table.Table: + colnames = table.colnames row = Row() - row['sql'] = 'CREATE TABLE {} ()'.format(params['n']) + row['sql'] = 'CREATE TABLE {} ({})'.format(params['n'], ", ".join(colnames)) return [row] except: return [] else: raise Exception("SQLite queries cannot be executed with this connector") + class Row(list): def __init__(self, values=None): self.labels = []