-
Notifications
You must be signed in to change notification settings - Fork 4.5k
[Data Sources] Add: MemSQL query runner #1746
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
fefcb92
469b041
79187cd
805ea3c
b9f8b6c
248808e
6b7234c
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,157 @@ | ||
import json | ||
import logging | ||
import sys | ||
|
||
from redash.query_runner import * | ||
from redash.utils import JSONEncoder | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
try: | ||
from memsql.common import database | ||
|
||
enabled = True | ||
except ImportError, e: | ||
logger.warning(e) | ||
enabled = False | ||
|
||
COLUMN_NAME = 0 | ||
COLUMN_TYPE = 1 | ||
|
||
types_map = { | ||
'BIGINT': TYPE_INTEGER, | ||
'TINYINT': TYPE_INTEGER, | ||
'SMALLINT': TYPE_INTEGER, | ||
'MEDIUMINT': TYPE_INTEGER, | ||
'INT': TYPE_INTEGER, | ||
'DOUBLE': TYPE_FLOAT, | ||
'DECIMAL': TYPE_FLOAT, | ||
'FLOAT': TYPE_FLOAT, | ||
'REAL': TYPE_FLOAT, | ||
'BOOL': TYPE_BOOLEAN, | ||
'BOOLEAN': TYPE_BOOLEAN, | ||
'TIMESTAMP': TYPE_DATETIME, | ||
'DATETIME': TYPE_DATETIME, | ||
'DATE': TYPE_DATETIME, | ||
'JSON': TYPE_STRING, | ||
'CHAR': TYPE_STRING, | ||
'VARCHAR': TYPE_STRING | ||
} | ||
|
||
|
||
class MemSQL(BaseSQLQueryRunner): | ||
noop_query = 'SELECT 1' | ||
|
||
@classmethod | ||
def configuration_schema(cls): | ||
return { | ||
"type": "object", | ||
"properties": { | ||
"host": { | ||
"type": "string" | ||
}, | ||
"port": { | ||
"type": "number" | ||
}, | ||
"user": { | ||
"type": "string" | ||
}, | ||
"password": { | ||
"type": "string" | ||
} | ||
|
||
}, | ||
"required": ["host", "port"] | ||
} | ||
|
||
@classmethod | ||
def annotate_query(cls): | ||
return False | ||
|
||
@classmethod | ||
def type(cls): | ||
return "memsql" | ||
|
||
@classmethod | ||
def enabled(cls): | ||
return enabled | ||
|
||
def __init__(self, configuration): | ||
super(MemSQL, self).__init__(configuration) | ||
|
||
def _get_tables(self, schema): | ||
try: | ||
schemas_query = "show schemas" | ||
|
||
tables_query = "show tables in %s" | ||
|
||
columns_query = "show columns in %s" | ||
|
||
for schema_name in filter(lambda a: len(a) > 0, | ||
map(lambda a: str(a['Database']), self._run_query_internal(schemas_query))): | ||
for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['Tables_in_%s' % schema_name]), | ||
self._run_query_internal( | ||
tables_query % schema_name))): | ||
table_name = '.'.join((schema_name, table_name)) | ||
columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['Field']), | ||
self._run_query_internal(columns_query % table_name))) | ||
|
||
schema[table_name] = {'name': table_name, 'columns': columns} | ||
except Exception, e: | ||
raise sys.exc_info()[1], None, sys.exc_info()[2] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is unnecessary. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the Exception handling? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes. |
||
return schema.values() | ||
|
||
def run_query(self, query, user): | ||
|
||
cursor = None | ||
try: | ||
cursor = database.connect(**self.configuration.to_dict()) | ||
|
||
res = cursor.query(query) | ||
# column_names = [] | ||
# columns = [] | ||
# | ||
# for column in cursor.description: | ||
# column_name = column[COLUMN_NAME] | ||
# column_names.append(column_name) | ||
# | ||
# columns.append({ | ||
# 'name': column_name, | ||
# 'friendly_name': column_name, | ||
# 'type': types_map.get(column[COLUMN_TYPE], None) | ||
# }) | ||
|
||
rows = [dict(zip(list(row.keys()), list(row.values()))) for row in res] | ||
|
||
# ==================================================================================================== | ||
# temporary - until https://github.com/memsql/memsql-python/pull/8 gets merged | ||
# ==================================================================================================== | ||
columns = [] | ||
column_names = rows[0].keys() if rows else None | ||
|
||
if column_names: | ||
for column in column_names: | ||
columns.append({ | ||
'name': column, | ||
'friendly_name': column, | ||
'type': None | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's better to use |
||
}) | ||
|
||
data = {'columns': columns, 'rows': rows} | ||
json_data = json.dumps(data, cls=JSONEncoder) | ||
error = None | ||
except KeyboardInterrupt: | ||
cursor.close() | ||
error = "Query cancelled by user." | ||
json_data = None | ||
except Exception as e: | ||
logging.exception(e) | ||
raise sys.exc_info()[1], None, sys.exc_info()[2] | ||
finally: | ||
if cursor: | ||
cursor.close() | ||
|
||
return json_data, error | ||
|
||
|
||
register(MemSQL) |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -42,4 +42,6 @@ xlsxwriter==0.9.3 | |
pystache==0.5.4 | ||
parsedatetime==2.1 | ||
cryptography==1.4 | ||
oauthlib==2.0.0 | ||
WTForms==2.1 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. These two aren't needed. |
||
simplejson==3.10.0 |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Please add a
secret
field too (look at other data sources for example).