X-Git-Url: https://git.jsancho.org/?p=datasette-pytables.git;a=blobdiff_plain;f=datasette_pytables%2F__init__.py;h=93cc1f082fd6eef31e0250ef644080cf4474303c;hp=e8d0a6ccb4519f72d2e0680e0c3b4b8f44dc2574;hb=6e8ac54ac9cc83d82ae2482dcec518a1f09991fa;hpb=2cf7538022a4290be1cdbe6f8cf13e44f8190bee diff --git a/datasette_pytables/__init__.py b/datasette_pytables/__init__.py index e8d0a6c..93cc1f0 100644 --- a/datasette_pytables/__init__.py +++ b/datasette_pytables/__init__.py @@ -37,7 +37,19 @@ def _parse_sql(sql, params): for param in params: sql = sql.replace(":" + param, param) - parsed = parse(sql) + try: + parsed = parse(sql) + except: + # Propably it's a PyTables expression + for token in ['group by', 'order by', 'limit', '']: + res = re.search('(?i)where (.*)' + token, sql) + if res: + modified_sql = re.sub('(?i)where (.*)(' + token + ')', '\g<2>', sql) + parsed = parse(modified_sql) + parsed['where'] = res.group(1).strip() + break + + # Always a list of fields if type(parsed['select']) is not list: parsed['select'] = [parsed['select']] @@ -59,7 +71,7 @@ class Connection: self.path = path self.h5file = tables.open_file(path) - def execute(self, sql, params=None, truncate=False): + def execute(self, sql, params=None, truncate=False, page_size=None, max_returned_rows=None): if params is None: params = {} rows = [] @@ -67,6 +79,10 @@ class Connection: description = [] parsed_sql = _parse_sql(sql, params) + + if parsed_sql['from'] == 'sqlite_master': + return self._execute_datasette_query(sql, params) + table = self.h5file.get_node(parsed_sql['from']) table_rows = [] fields = parsed_sql['select'] @@ -95,8 +111,12 @@ class Connection: operator = list(where)[0] if operator in ['and', 'or']: - subexpr = ["({})".format(_translate_where(q)) for q in where[operator]] + subexpr = [_translate_where(e) for e in where[operator]] + subexpr = filter(lambda e: e, subexpr) + subexpr = ["({})".format(e) for e in subexpr] expr = " {} ".format(_operators[operator]).join(subexpr) + elif operator == 'exists': + pass elif where == {'eq': ['rowid', 'p0']}: nonlocal start, end start = int(params['p0']) @@ -113,11 +133,10 @@ class Connection: return expr if 'where' in parsed_sql: - try: + if type(parsed_sql['where']) is dict: query = _translate_where(parsed_sql['where']) - except: - # Probably it's a PyTables query - query = str(parsed_sql['where'])[6:] # without where keyword + else: + query = parsed_sql['where'] # Limit number of rows if 'limit' in parsed_sql: @@ -125,6 +144,14 @@ class Connection: if end - start > max_rows: end = start + max_rows + # Truncate if needed + if page_size and max_returned_rows and truncate: + if max_returned_rows == page_size: + max_returned_rows += 1 + if end - start > max_returned_rows: + end = start + max_returned_rows + truncated = True + # Execute query if query: table_rows = table.where(query, params, start, end) @@ -136,26 +163,51 @@ class Connection: fields[0]['value'].get('count') == '*': rows.append(Row({'count(*)': int(table.nrows)})) else: - for table_row in table_rows: - row = Row() - for field in fields: - if field['value'] == 'rowid': - row['rowid'] = int(table_row.nrow) - elif field['value'] == '*': - for col in table.colnames: - value = table_row[col] + if type(table) is tables.table.Table: + for table_row in table_rows: + row = Row() + for field in fields: + field_name = field['value'] + if type(field_name) is dict and 'distinct' in field_name: + field_name = field_name['distinct'] + if field_name == 'rowid': + row['rowid'] = int(table_row.nrow) + elif field_name == '*': + for col in table.colnames: + value = table_row[col] + if type(value) is bytes: + value = value.decode('utf-8') + row[col] = value + else: + row[field_name] = table_row[field_name] + rows.append(row) + else: + # Any kind of array + rowid = start - 1 + for table_row in table_rows: + row = Row() + rowid += 1 + for field in fields: + field_name = field['value'] + if type(field_name) is dict and 'distinct' in field_name: + field_name = field_name['distinct'] + if field_name == 'rowid': + row['rowid'] = rowid + else: + value = table_row if type(value) is bytes: value = value.decode('utf-8') - row[col] = value - else: - row[field['value']] = table_row[field['value']] - rows.append(row) + row['value'] = value + rows.append(row) # Prepare query description for field in [f['value'] for f in fields]: if field == '*': - for col in table.colnames: - description.append((col,)) + if type(table) is tables.table.Table: + for col in table.colnames: + description.append((col,)) + else: + description.append(('value',)) else: description.append((field,)) @@ -165,12 +217,46 @@ class Connection: else: return rows -class Row(OrderedDict): - def __getitem__(self, label): - if type(label) is int: - return super(OrderedDict, self).__getitem__(list(self.keys())[label]) + def _execute_datasette_query(self, sql, params): + "Datasette special queries for getting tables info" + if sql == "SELECT count(*) from sqlite_master WHERE type = 'view' and name=:n": + row = Row() + row['count(*)'] = 0 + return [row] + elif sql == 'select sql from sqlite_master where name = :n and type="table"': + try: + table = self.h5file.get_node(params['n']) + row = Row() + row['sql'] = 'CREATE TABLE {} ()'.format(params['n']) + return [row] + except: + return [] + else: + raise Exception("SQLite queries cannot be executed with this connector") + +class Row(list): + def __init__(self, values=None): + self.labels = [] + self.values = [] + if values: + for idx in values: + self.__setitem__(idx, values[idx]) + + def __setitem__(self, idx, value): + if type(idx) is str: + if idx in self.labels: + self.values[self.labels.index(idx)] = value + else: + self.labels.append(idx) + self.values.append(value) + else: + self.values[idx] = value + + def __getitem__(self, idx): + if type(idx) is str: + return self.values[self.labels.index(idx)] else: - return super(OrderedDict, self).__getitem__(label) + return self.values[idx] def __iter__(self): - return self.values().__iter__() + return self.values.__iter__()