From 0acc16b4f4dd1de07cb590934609496e7ad06c96 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Wed, 12 Apr 2017 22:02:42 +0000 Subject: [PATCH 01/48] In docker-entrypoint ensure tables exist --- bin/docker-entrypoint | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bin/docker-entrypoint b/bin/docker-entrypoint index 0d45eb5482..1bed803efd 100755 --- a/bin/docker-entrypoint +++ b/bin/docker-entrypoint @@ -2,6 +2,7 @@ set -e worker() { + /app/manage.py db upgrade WORKERS_COUNT=${WORKERS_COUNT:-2} QUEUES=${QUEUES:-queries,scheduled_queries,celery} @@ -10,6 +11,7 @@ worker() { } scheduler() { + /app/manage.py db upgrade WORKERS_COUNT=${WORKERS_COUNT:-1} QUEUES=${QUEUES:-celery} @@ -19,6 +21,7 @@ scheduler() { } server() { + /app/manage.py db upgrade exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app } From 464e6347db8079584056fac07fcb2700ef2fdf67 Mon Sep 17 00:00:00 2001 From: Alison Date: Fri, 11 Aug 2017 09:34:35 -0500 Subject: [PATCH 02/48] upgrade node and npm in dockerfile --- Dockerfile | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/Dockerfile b/Dockerfile index e289bbde18..bbfaa77f40 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,6 +6,15 @@ COPY requirements.txt requirements_dev.txt requirements_all_ds.txt ./ RUN pip install -r requirements.txt -r requirements_dev.txt -r requirements_all_ds.txt COPY . ./ + +# Upgrade node to LTS 6.11.2 +RUN cd ~ +RUN wget https://nodejs.org/download/release/v6.11.2/node-v6.11.2-linux-x64.tar.gz +RUN sudo tar --strip-components 1 -xzvf node-v* -C /usr/local + +# Upgrade npm +RUN npm upgrade npm + RUN npm install && npm run build && rm -rf node_modules RUN chown -R redash /app USER redash From 795f6c277e46fd18709f879815031ba26468e441 Mon Sep 17 00:00:00 2001 From: Blake Imsland Date: Wed, 19 Apr 2017 10:23:14 -0700 Subject: [PATCH 03/48] Update Circle CI for our workflow - Use new master / rc release release strategy (#440) - Migrate Circle CI 2.0 (#488, #502) --- .circleci/config.yml | 212 ++++++++++++++++++++++--------------------- bin/alias | 12 +++ bin/deploy | 19 ++++ 3 files changed, 140 insertions(+), 103 deletions(-) create mode 100755 bin/alias create mode 100755 bin/deploy diff --git a/.circleci/config.yml b/.circleci/config.yml index 92928eb86e..9c830220f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,124 +1,130 @@ -version: 2.0 +# These environment variables must be set in CircleCI UI +# +# DOCKERHUB_REPO - docker hub repo, format: / +# DOCKER_USER +# DOCKER_PASS +# + +version: 2 jobs: - unit-tests: - environment: - COMPOSE_FILE: .circleci/docker-compose.circle.yml - COMPOSE_PROJECT_NAME: redash + build: docker: - - image: circleci/buildpack-deps:xenial + - image: docker:18.02.0-ce + working_directory: ~/mozilla/redash steps: - - setup_remote_docker - checkout + - setup_remote_docker - run: - name: Build Docker Images - command: | - set -x - docker-compose up -d - sleep 10 - - run: - name: Create Test Database - command: docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests;" - - run: - name: Run Tests - command: docker-compose run --name tests redash tests --junitxml=junit.xml tests/ - - run: - name: Copy Test Results - command: | - mkdir -p /tmp/test-results/unit-tests - docker cp tests:/app/coverage.xml ./coverage.xml - docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml - - store_test_results: - path: /tmp/test-results - - store_artifacts: - path: coverage.xml - build-tarball: + command: docker build -t app:build . + no_output_timeout: 20m + + test: docker: - - image: circleci/node:8 + - image: circleci/node:6.14.3-stretch + - image: redis + - image: circleci/postgres:10-alpine-postgis + working_directory: ~/mozilla/redash steps: - checkout - - run: npm install - - run: npm run build - - run: .circleci/update_version - - run: .circleci/pack + - run: mkdir -p /tmp/test-reports/pytest + - run: sudo apt-get update + - run: sudo apt-get install -y python-pip python-dev + - run: sudo apt-get install -y redis-tools redis-server + - run: sudo pip install --upgrade setuptools + - run: sudo pip install -r requirements_dev.txt + - run: sudo pip install -r requirements.txt + - run: sudo npm install + - run: sudo npm run build + - run: + command: pytest --junitxml=/tmp/test-reports/pytest/junit.xml tests/ + environment: + REDASH_REDIS_URL: redis://localhost:6379/0 + REDASH_DATABASE_URL: "postgresql://postgres@localhost/postgres" - store_artifacts: - path: /tmp/artifacts/ - build-docker-image: - docker: - - image: circleci/buildpack-deps:xenial - steps: - - setup_remote_docker - - checkout - - run: .circleci/update_version - - run: docker login -u $DOCKER_USER -p $DOCKER_PASS - - run: docker build -t redash/redash:$(.circleci/docker_tag) . - - run: docker push redash/redash:$(.circleci/docker_tag) - integration-tests: - working_directory: ~/redash - machine: true - environment: - REDASH_SERVER_URL : "http://127.0.0.1:5000/" - DOCKER_IMAGE: mozilla/redash-ui-tests + path: /tmp/test-reports/ + destination: tr1 + - store_test_results: + path: /tmp/test-reports/ + + deploy-master: + machine: + enable: true + working_directory: ~/mozilla/redash steps: - checkout - run: - name: Install Docker Compose - command: | - set -x - pip install --upgrade pip - pip install docker-compose>=1.18 - docker-compose --version - - run: - name: Pull redash images + name: Deploy to Dockerhub + no_output_timeout: 20m command: | - set -x - docker-compose -f docker-compose.yml up --no-start - sleep 10 - - run: - name: Pull redash-ui-tests - command: docker pull "${DOCKER_IMAGE}":latest + ./bin/deploy "master" + + deploy-rc: + machine: + enable: true + working_directory: ~/mozilla/redash + steps: + - checkout - run: - name: Setup redash instance + name: Deploy to Dockerhub + no_output_timeout: 20m command: | - set -x - docker-compose run --rm --user root server create_db - docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests" - docker-compose run --rm --user root server /app/manage.py users create_root root@example.com "rootuser" --password "IAMROOT" --org default - docker-compose run --rm --user root server /app/manage.py ds new "ui-tests" --type "url" --options '{"title": "uitests"}' - docker-compose run -d -p 5000:5000 --user root server - docker-compose start postgres + ./bin/deploy "rc" + + deploy-milestone: + machine: + enable: true + working_directory: ~/mozilla/redash + steps: + - checkout - run: - name: Run tests + name: Deploy milestone to Dockerhub + no_output_timeout: 20m command: | - set -x - docker run --net="host" --env REDASH_SERVER_URL="${REDASH_SERVER_URL}" "${DOCKER_IMAGE}" - - store_artifacts: - path: report.html + ./bin/deploy "$CIRCLE_TAG" + ./bin/alias "$CIRCLE_TAG" "latest" + + workflows: version: 2 - integration_tests: + build-test-deploy: jobs: - - integration-tests: + - build: filters: - branches: - only: master - build: - jobs: - - unit-tests - - build-tarball: - requires: - - unit-tests - filters: - tags: - only: /v[0-9]+(\.[0-9\-a-z]+)*/ - branches: - only: - - master - - release - - build-docker-image: - requires: - - unit-tests - filters: - branches: - ignore: /.*/ - tags: - only: /v[0-9]+(\.[0-9\-a-z]+)*/ \ No newline at end of file + tags: + only: /.*/ + branches: + ignore: + - gh-pages + + - test: + filters: + tags: + only: /.*/ + branches: + ignore: + - gh-pages + + - deploy-master: + requires: + - test + filters: + branches: + only: + - master + + - deploy-rc: + requires: + - test + filters: + branches: + only: + - release + + - deploy-milestone: + requires: + - test + filters: + tags: + only: /^m[0-9]+(\.[0-9]+)?$/ + branches: + ignore: /.*/ diff --git a/bin/alias b/bin/alias new file mode 100755 index 0000000000..1d4a32a19b --- /dev/null +++ b/bin/alias @@ -0,0 +1,12 @@ +#!/bin/bash + +set -eo pipefail + +[ ! -z $DOCKERHUB_REPO ] && [ $# -eq 2 ] + +VERSION="$1" +ALIAS="$2" + +docker login -u $DOCKER_USER -p $DOCKER_PASS +docker tag $DOCKERHUB_REPO:$VERSION $DOCKERHUB_REPO:$ALIAS +docker push $DOCKERHUB_REPO:$ALIAS diff --git a/bin/deploy b/bin/deploy new file mode 100755 index 0000000000..2bdb54ed4d --- /dev/null +++ b/bin/deploy @@ -0,0 +1,19 @@ +#!/bin/bash + +set -eo pipefail + +[ ! -z $DOCKERHUB_REPO ] && [ $# -eq 1 ] + +VERSION="$1" + +printf '{"commit":"%s","version":"%s","source":"https://github.com/%s/%s","build":"%s"}\n' \ + "$CIRCLE_SHA1" \ + "$VERSION" \ + "$CIRCLE_PROJECT_USERNAME" \ + "$CIRCLE_PROJECT_REPONAME" \ + "$CIRCLE_BUILD_URL" \ +> version.json + +docker login -u $DOCKER_USER -p $DOCKER_PASS +docker build -t $DOCKERHUB_REPO:$VERSION . +docker push $DOCKERHUB_REPO:$VERSION From 839b0db8edcece10c47af8a9007f52be62ceaafd Mon Sep 17 00:00:00 2001 From: Alison Date: Sat, 8 Jul 2017 00:17:46 -0500 Subject: [PATCH 04/48] add pyup config document --- .pyup.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .pyup.yml diff --git a/.pyup.yml b/.pyup.yml new file mode 100644 index 0000000000..af68611aed --- /dev/null +++ b/.pyup.yml @@ -0,0 +1,6 @@ +schedule: "every day" +search: False +update: insecure +requirements: + - requirements.txt: + update: insecure From c8c89b6ef9d1275794e8d07c355647955f0a1e17 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Wed, 25 Oct 2017 19:32:37 +0200 Subject: [PATCH 05/48] Pin PyAthena dependency to 1.2.0. --- requirements_all_ds.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_all_ds.txt b/requirements_all_ds.txt index 9e60dfc53d..93d174ceee 100644 --- a/requirements_all_ds.txt +++ b/requirements_all_ds.txt @@ -19,7 +19,7 @@ cassandra-driver==3.11.0 memsql==2.16.0 atsd_client==2.0.12 simple_salesforce==0.72.2 -PyAthena>=1.0.0 +PyAthena>=1.2.0 pymapd>=0.2.1 qds-sdk>=1.9.6 # certifi is needed to support MongoDB and SSL: From 63535aa84d7e9430377c670377d70feb7b7964e1 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Thu, 14 Sep 2017 06:23:44 +0000 Subject: [PATCH 06/48] Switch to PyMySQL for MySQL 5.7 support --- redash/query_runner/mysql.py | 8 ++++---- requirements_all_ds.txt | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index fafa5edf16..3679b694db 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -91,7 +91,7 @@ def name(cls): @classmethod def enabled(cls): try: - import MySQLdb + import pymysql except ImportError: return False @@ -127,11 +127,11 @@ def _get_tables(self, schema): return schema.values() def run_query(self, query, user): - import MySQLdb + import pymysql connection = None try: - connection = MySQLdb.connect(host=self.configuration.get('host', ''), + connection = pymysql.connect(host=self.configuration.get('host', ''), user=self.configuration.get('user', ''), passwd=self.configuration.get('passwd', ''), db=self.configuration['db'], @@ -161,7 +161,7 @@ def run_query(self, query, user): error = "No data was returned." cursor.close() - except MySQLdb.Error as e: + except pymysql.Error as e: json_data = None error = e.args[1] except KeyboardInterrupt: diff --git a/requirements_all_ds.txt b/requirements_all_ds.txt index 93d174ceee..e58418d55f 100644 --- a/requirements_all_ds.txt +++ b/requirements_all_ds.txt @@ -2,7 +2,7 @@ google-api-python-client==1.5.1 gspread==0.6.2 impyla==0.10.0 influxdb==2.7.1 -MySQL-python==1.2.5 +PyMySQL==0.7.11 oauth2client==3.0.0 pyhive==0.3.0 pymongo==3.6.1 From d8689e9637e97a82d8aeee2611f0c39d2938ba85 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Mon, 8 Jan 2018 22:16:14 +0000 Subject: [PATCH 07/48] upgrade to Celery 4.1.1 up celery version to fix kombu async error --- redash/metrics/celery.py | 4 ++-- redash/settings/__init__.py | 18 ++++++++++++++---- redash/worker.py | 12 ++++++------ requirements.txt | 2 +- 4 files changed, 23 insertions(+), 13 deletions(-) diff --git a/redash/metrics/celery.py b/redash/metrics/celery.py index 25b63af136..ef7ed68cf2 100644 --- a/redash/metrics/celery.py +++ b/redash/metrics/celery.py @@ -12,7 +12,7 @@ @task_prerun.connect -def task_prerun_handler(signal, sender, task_id, task, args, kwargs): +def task_prerun_handler(signal, sender, task_id, task, args, kwargs, **kw): try: tasks_start_time[task_id] = time.time() except Exception: @@ -30,7 +30,7 @@ def metric_name(name, tags): @task_postrun.connect -def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, state): +def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, state, **kw): try: run_time = 1000 * (time.time() - tasks_start_time.pop(task_id)) diff --git a/redash/settings/__init__.py b/redash/settings/__init__.py index 2115d8aa27..44cb71eef4 100644 --- a/redash/settings/__init__.py +++ b/redash/settings/__init__.py @@ -32,8 +32,12 @@ def all_settings(): # Celery related settings CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL) -CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER) -CELERY_TASK_RESULT_EXPIRES = int(os.environ.get('REDASH_CELERY_TASK_RESULT_EXPIRES', 3600 * 4)) +CELERY_RESULT_BACKEND = os.environ.get( + "REDASH_CELERY_RESULT_BACKEND", + os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER)) +CELERY_RESULT_EXPIRES = int(os.environ.get( + "REDASH_CELERY_RESULT_EXPIRES", + os.environ.get("REDASH_CELERY_TASK_RESULT_EXPIRES", 3600 * 4))) # The following enables periodic job (every 5 minutes) of removing unused query results. QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true")) @@ -107,8 +111,14 @@ def all_settings(): LOG_STDOUT = parse_boolean(os.environ.get('REDASH_LOG_STDOUT', 'false')) LOG_PREFIX = os.environ.get('REDASH_LOG_PREFIX', '') LOG_FORMAT = os.environ.get('REDASH_LOG_FORMAT', LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(name)s] %(message)s') -CELERYD_LOG_FORMAT = os.environ.get('REDASH_CELERYD_LOG_FORMAT', LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(processName)s] %(message)s') -CELERYD_TASK_LOG_FORMAT = os.environ.get('REDASH_CELERYD_TASK_LOG_FORMAT', LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(processName)s] task_name=%(task_name)s taks_id=%(task_id)s %(message)s') +CELERYD_WORKER_LOG_FORMAT = os.environ.get( + "REDASH_CELERYD_WORKER_LOG_FORMAT", + os.environ.get('REDASH_CELERYD_LOG_FORMAT', + LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(processName)s] %(message)s')) +CELERYD_WORKER_TASK_LOG_FORMAT = os.environ.get( + "REDASH_CELERYD_WORKER_TASK_LOG_FORMAT", + os.environ.get('REDASH_CELERYD_TASK_LOG_FORMAT', + LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(processName)s] task_name=%(task_name)s taks_id=%(task_id)s %(message)s')) # Mail settings: MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost') diff --git a/redash/worker.py b/redash/worker.py index ecab48ec20..629180b1f1 100644 --- a/redash/worker.py +++ b/redash/worker.py @@ -44,12 +44,12 @@ 'schedule': timedelta(minutes=5) } -celery.conf.update(CELERY_RESULT_BACKEND=settings.CELERY_BACKEND, - CELERYBEAT_SCHEDULE=celery_schedule, - CELERY_TIMEZONE='UTC', - CELERY_TASK_RESULT_EXPIRES=settings.CELERY_TASK_RESULT_EXPIRES, - CELERYD_LOG_FORMAT=settings.CELERYD_LOG_FORMAT, - CELERYD_TASK_LOG_FORMAT=settings.CELERYD_TASK_LOG_FORMAT) +celery.conf.update(result_backend=settings.CELERY_RESULT_BACKEND, + beat_schedule=celery_schedule, + timezone='UTC', + result_expires=settings.CELERY_RESULT_EXPIRES, + worker_log_format=settings.CELERYD_WORKER_LOG_FORMAT, + worker_task_log_format=settings.CELERYD_WORKER_TASK_LOG_FORMAT) if settings.SENTRY_DSN: from raven import Client diff --git a/requirements.txt b/requirements.txt index 5beae129f3..314045fd1b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,7 +33,7 @@ wsgiref==0.1.2 honcho==0.5.0 statsd==2.1.2 gunicorn==19.7.1 -celery==3.1.25 +celery==4.1.1 jsonschema==2.4.0 RestrictedPython==3.6.0 pysaml2==4.5.0 From d9c59f4759042fcc217618b7d0c5b257e4107592 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Tue, 8 Nov 2016 16:15:35 -0600 Subject: [PATCH 08/48] Documentation links for data sources (re #6) --- client/app/assets/less/redash/query.less | 2 ++ client/app/pages/data-sources/list.html | 2 +- client/app/pages/data-sources/show.js | 21 ++++++++++++++++ .../pages/queries/get-data-source-version.js | 19 +++++++++++++++ client/app/pages/queries/query.html | 8 ++++++- client/app/pages/queries/view.js | 1 + client/app/services/data-source.js | 3 +++ redash/cli/data_sources.py | 24 +++++++++++++++++++ redash/handlers/api.py | 5 ++-- redash/handlers/data_sources.py | 13 ++++++++++ redash/models.py | 11 +++++---- redash/query_runner/__init__.py | 24 +++++++++++++++++++ redash/query_runner/big_query.py | 6 +++++ redash/query_runner/cass.py | 6 +++++ redash/query_runner/dynamodb_sql.py | 8 +++++++ redash/query_runner/elasticsearch.py | 8 ++++++- redash/query_runner/google_spreadsheets.py | 8 +++++++ redash/query_runner/graphite.py | 5 ++++ redash/query_runner/hive_ds.py | 7 ++++++ redash/query_runner/impala_ds.py | 7 ++++++ redash/query_runner/influx_db.py | 7 ++++++ redash/query_runner/jql.py | 7 ++++++ redash/query_runner/mongodb.py | 8 +++++++ redash/query_runner/mssql.py | 6 +++++ redash/query_runner/mysql.py | 8 +++++++ redash/query_runner/oracle.py | 7 ++++++ redash/query_runner/pg.py | 18 ++++++++++++++ redash/query_runner/presto.py | 6 +++++ redash/query_runner/python.py | 8 +++++++ redash/query_runner/script.py | 8 +++++++ redash/query_runner/sqlite.py | 6 +++++ redash/query_runner/treasuredata.py | 6 +++++ redash/query_runner/url.py | 8 +++++++ redash/query_runner/vertica.py | 9 +++++++ tests/handlers/test_data_sources.py | 7 ++++-- tests/test_cli.py | 2 +- 36 files changed, 297 insertions(+), 12 deletions(-) create mode 100644 client/app/pages/queries/get-data-source-version.js diff --git a/client/app/assets/less/redash/query.less b/client/app/assets/less/redash/query.less index 30231788a3..c689f05b05 100644 --- a/client/app/assets/less/redash/query.less +++ b/client/app/assets/less/redash/query.less @@ -458,6 +458,7 @@ a.label-tag { .datasource-small { visibility: hidden; + display: none !important; } .query-fullscreen .query-metadata__mobile { @@ -576,6 +577,7 @@ nav .rg-bottom { .datasource-small { visibility: visible; + display: inline-block !important; } .query-fullscreen { diff --git a/client/app/pages/data-sources/list.html b/client/app/pages/data-sources/list.html index 56af90e071..fd23dfc516 100644 --- a/client/app/pages/data-sources/list.html +++ b/client/app/pages/data-sources/list.html @@ -9,7 +9,7 @@ diff --git a/client/app/pages/data-sources/show.js b/client/app/pages/data-sources/show.js index 435f41ab90..0c4b4ae15e 100644 --- a/client/app/pages/data-sources/show.js +++ b/client/app/pages/data-sources/show.js @@ -80,11 +80,32 @@ function DataSourceCtrl( }); } + function getDataSourceVersion(callback) { + Events.record('test', 'data_source_version', $scope.dataSource.id); + + DataSource.version({ id: $scope.dataSource.id }, (httpResponse) => { + if (httpResponse.ok) { + const versionNumber = httpResponse.message; + toastr.success(`Success. Version: ${versionNumber}`); + } else { + toastr.error(httpResponse.message, 'Version Test Failed:', { timeOut: 10000 }); + } + callback(); + }, (httpResponse) => { + logger('Failed to get data source version: ', httpResponse.status, httpResponse.statusText, httpResponse); + toastr.error('Unknown error occurred while performing data source version test. Please try again later.', 'Data Source Version Test Failed:', { timeOut: 10000 }); + callback(); + }); + } + $scope.actions = [ { name: 'Delete', class: 'btn-danger', callback: deleteDataSource }, { name: 'Test Connection', class: 'btn-default pull-right', callback: testConnection, disableWhenDirty: true, }, + { + name: 'Test Data Source Version', class: 'btn-default', callback: getDataSourceVersion, disableWhenDirty: true, + }, ]; } diff --git a/client/app/pages/queries/get-data-source-version.js b/client/app/pages/queries/get-data-source-version.js new file mode 100644 index 0000000000..90df5cb61c --- /dev/null +++ b/client/app/pages/queries/get-data-source-version.js @@ -0,0 +1,19 @@ +function GetDataSourceVersionCtrl(Events, toastr, $scope, DataSource, $route) { + 'ngInject'; + + this.getDataSourceVersion = DataSource.version({ + id: $route.current.locals.query.data_source_id, + }); +} + +const GetDataSourceVersionInfo = { + bindings: { + onRefresh: '&', + }, + controller: GetDataSourceVersionCtrl, + template: '{{ $ctrl.getDataSourceVersion.message }}', +}; + +export default function (ngModule) { + ngModule.component('getDataSourceVersion', GetDataSourceVersionInfo); +} diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index 4724cdb428..2961ecfed2 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -81,6 +81,9 @@

{{ds.name}} + {{dataSource.type_name}} documentation + {{ dataSource.type_name }} documentation +
@@ -160,6 +163,9 @@

+ {{dataSource.type_name}} documentation + {{dataSource.type_name}} +

- \ No newline at end of file + diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index 3072cda76c..16c11bc14e 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -322,6 +322,7 @@ function QueryViewCtrl( } $scope.dataSource = find($scope.dataSources, ds => ds.id === $scope.query.data_source_id); + document.getElementById('data-source-version').innerHTML = ''; getSchema(); $scope.executeQuery(); }; diff --git a/client/app/services/data-source.js b/client/app/services/data-source.js index 39d8a78ade..d26c37b947 100644 --- a/client/app/services/data-source.js +++ b/client/app/services/data-source.js @@ -21,6 +21,9 @@ function DataSource($q, $resource, $http) { isArray: false, url: 'api/data_sources/:id/test', }, + version: { + method: 'GET', cache: false, isArray: false, url: 'api/data_sources/:id/version', + }, }; const DataSourceResource = $resource('api/data_sources/:id', { id: '@id' }, actions); diff --git a/redash/cli/data_sources.py b/redash/cli/data_sources.py index 79827132a0..0b7b18ff5d 100644 --- a/redash/cli/data_sources.py +++ b/redash/cli/data_sources.py @@ -67,6 +67,30 @@ def test(name, organization='default'): print("Couldn't find data source named: {}".format(name)) exit(1) +@manager.command() +@click.argument('name') +@click.option('--org', 'organization', default='default', + help="The organization the user belongs to " + "(leave blank for 'default').") +def get_data_source_version(name, organization='default'): + """Get version of data source connection by issuing a trivial query.""" + try: + org = models.Organization.get_by_slug(organization) + data_source = models.DataSource.query.filter( + models.DataSource.name == name, + models.DataSource.org == org).one() + print("Testing get connection data source version: {} (id={})".format( + name, data_source.id)) + try: + info = data_source.query_runner.get_data_source_version() + except Exception as e: + print("Failure: {}".format(e)) + exit(1) + else: + print(info) + except NoResultFound: + print("Couldn't find data source named: {}".format(name)) + exit(1) @manager.command() @click.argument('name', default=None, required=False) diff --git a/redash/handlers/api.py b/redash/handlers/api.py index f8ef199857..fc23bc0031 100644 --- a/redash/handlers/api.py +++ b/redash/handlers/api.py @@ -6,8 +6,8 @@ from redash.handlers.base import org_scoped_rule from redash.handlers.permissions import ObjectPermissionsListResource, CheckPermissionResource from redash.handlers.alerts import AlertResource, AlertListResource, AlertSubscriptionListResource, AlertSubscriptionResource -from redash.handlers.dashboards import DashboardListResource, DashboardResource, DashboardShareResource, PublicDashboardResource -from redash.handlers.data_sources import DataSourceTypeListResource, DataSourceListResource, DataSourceSchemaResource, DataSourceResource, DataSourcePauseResource, DataSourceTestResource +from redash.handlers.dashboards import DashboardListResource, DashboardResource, DashboardShareResource, PublicDashboardResource +from redash.handlers.data_sources import DataSourceTypeListResource, DataSourceListResource, DataSourceSchemaResource, DataSourceResource, DataSourcePauseResource, DataSourceTestResource, DataSourceVersionResource from redash.handlers.events import EventsResource from redash.handlers.queries import QueryForkResource, QueryRefreshResource, QueryListResource, QueryRecentResource, QuerySearchResource, QueryResource, MyQueriesResource from redash.handlers.query_results import QueryResultListResource, QueryResultResource, JobResource @@ -58,6 +58,7 @@ def json_representation(data, code, headers=None): api.add_org_resource(DataSourceSchemaResource, '/api/data_sources//schema') api.add_org_resource(DataSourcePauseResource, '/api/data_sources//pause') api.add_org_resource(DataSourceTestResource, '/api/data_sources//test') +api.add_org_resource(DataSourceVersionResource, '/api/data_sources//version') api.add_org_resource(DataSourceResource, '/api/data_sources/', endpoint='data_source') api.add_org_resource(GroupListResource, '/api/groups', endpoint='groups') diff --git a/redash/handlers/data_sources.py b/redash/handlers/data_sources.py index be94b1d028..cfb7a03c24 100644 --- a/redash/handlers/data_sources.py +++ b/redash/handlers/data_sources.py @@ -192,3 +192,16 @@ def post(self, data_source_id): return {"message": unicode(e), "ok": False} else: return {"message": "success", "ok": True} + +class DataSourceVersionResource(BaseResource): + def get(self, data_source_id): + data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org) + require_access(data_source.groups, self.current_user, view_only) + try: + version_info = data_source.query_runner.get_data_source_version() + except Exception as e: + return {"message": unicode(e), "ok": False} + else: + return {"message": version_info, "ok": True} + + diff --git a/redash/models.py b/redash/models.py index 895ead6196..775187ea36 100644 --- a/redash/models.py +++ b/redash/models.py @@ -580,13 +580,14 @@ def to_dict(self, all=False, with_permissions_for=None): 'type': self.type, 'syntax': self.query_runner.syntax, 'paused': self.paused, - 'pause_reason': self.pause_reason + 'pause_reason': self.pause_reason, + 'type_name': self.query_runner.name(), } + schema = get_configuration_schema_for_query_runner_type(self.type) + self.options.set_schema(schema) + d['options'] = self.options.to_dict(mask_secrets=True) if all: - schema = get_configuration_schema_for_query_runner_type(self.type) - self.options.set_schema(schema) - d['options'] = self.options.to_dict(mask_secrets=True) d['queue_name'] = self.queue_name d['scheduled_queue_name'] = self.scheduled_queue_name d['groups'] = self.groups @@ -670,6 +671,8 @@ def add_group(self, group, view_only=False): db.session.add(dsg) return dsg + setattr(self, 'data_source_groups', dsg) + def remove_group(self, group): db.session.query(DataSourceGroup).filter( DataSourceGroup.group == group, diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py index 1a5357dbad..60bcbe4005 100644 --- a/redash/query_runner/__init__.py +++ b/redash/query_runner/__init__.py @@ -51,6 +51,8 @@ class NotSupported(Exception): class BaseQueryRunner(object): noop_query = None + default_doc_url = None + data_source_version_query = None def __init__(self, configuration): self.syntax = 'sql' @@ -76,6 +78,28 @@ def annotate_query(cls): def configuration_schema(cls): return {} + def get_data_source_version(self): + if self.data_source_version_query is None: + raise NotImplementedError + data, error = self.run_query(self.data_source_version_query, None) + + if error is not None: + raise Exception(error) + + try: + version = json.loads(data)['rows'][0]['version'] + except KeyError as e: + raise Exception(e) + + if self.data_source_version_post_process == "split by space take second": + version = version.split(" ")[1] + elif self.data_source_version_post_process == "split by space take last": + version = version.split(" ")[-1] + elif self.data_source_version_post_process == "none": + version = version + + return version + def test_connection(self): if self.noop_query is None: raise NotImplementedError() diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 7e44661d2d..52f9e4cc99 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -83,6 +83,7 @@ def _get_query_results(jobs, project_id, location, job_id, start_index): class BigQuery(BaseQueryRunner): noop_query = "SELECT 1" + default_doc_url = "https://cloud.google.com/bigquery/docs/reference/legacy-sql" @classmethod def enabled(cls): @@ -125,6 +126,11 @@ def configuration_schema(cls): 'maximumBillingTier': { "type": "number", "title": "Maximum Billing Tier" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['jsonKeyFile', 'projectId'], diff --git a/redash/query_runner/cass.py b/redash/query_runner/cass.py index 37bcc98cbb..4c5af888e5 100644 --- a/redash/query_runner/cass.py +++ b/redash/query_runner/cass.py @@ -27,6 +27,7 @@ def default(self, o): class Cassandra(BaseQueryRunner): noop_query = "SELECT dateof(now()) FROM system.local" + default_doc_url = "http://cassandra.apache.org/doc/latest/cql/index.html" @classmethod def enabled(cls): @@ -65,6 +66,11 @@ def configuration_schema(cls): 'type': 'number', 'title': 'Timeout', 'default': 10 + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['keyspace', 'host'] diff --git a/redash/query_runner/dynamodb_sql.py b/redash/query_runner/dynamodb_sql.py index 6c7a980326..b29942c4f7 100644 --- a/redash/query_runner/dynamodb_sql.py +++ b/redash/query_runner/dynamodb_sql.py @@ -33,6 +33,9 @@ class DynamoDBSQL(BaseSQLQueryRunner): + noop_query = "SELECT 1" + default_doc_url = "https://dql.readthedocs.io/en/latest/" + @classmethod def configuration_schema(cls): return { @@ -47,6 +50,11 @@ def configuration_schema(cls): }, "secret_key": { "type": "string", + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "required": ["access_key", "secret_key"], diff --git a/redash/query_runner/elasticsearch.py b/redash/query_runner/elasticsearch.py index 7b2a06ec37..b2404ffb7c 100644 --- a/redash/query_runner/elasticsearch.py +++ b/redash/query_runner/elasticsearch.py @@ -44,7 +44,8 @@ class BaseElasticSearch(BaseQueryRunner): - DEBUG_ENABLED = False + DEBUG_ENABLED = True + default_doc_url = "https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html" @classmethod def configuration_schema(cls): @@ -62,6 +63,11 @@ def configuration_schema(cls): 'basic_auth_password': { 'type': 'string', 'title': 'Basic Auth Password' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "secret": ["basic_auth_password"], diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py index 61b7a62d5d..ccd2d8cbb2 100644 --- a/redash/query_runner/google_spreadsheets.py +++ b/redash/query_runner/google_spreadsheets.py @@ -148,6 +148,9 @@ def request(self, *args, **kwargs): class GoogleSpreadsheet(BaseQueryRunner): + default_doc_url = ("http://redash.readthedocs.io/en/latest/" + "datasources.html#google-spreadsheets") + @classmethod def annotate_query(cls): return False @@ -168,6 +171,11 @@ def configuration_schema(cls): 'jsonKeyFile': { "type": "string", 'title': 'JSON Key File' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['jsonKeyFile'], diff --git a/redash/query_runner/graphite.py b/redash/query_runner/graphite.py index 023ec04940..e2aaff6643 100644 --- a/redash/query_runner/graphite.py +++ b/redash/query_runner/graphite.py @@ -42,6 +42,11 @@ def configuration_schema(cls): 'verify': { 'type': 'boolean', 'title': 'Verify SSL certificate' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['url'], diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py index 2d3aa4a303..042647e8ef 100644 --- a/redash/query_runner/hive_ds.py +++ b/redash/query_runner/hive_ds.py @@ -36,6 +36,8 @@ class Hive(BaseSQLQueryRunner): noop_query = "SELECT 1" + default_doc_url = ("https://cwiki.apache.org/confluence/display/Hive/" + "LanguageManual") @classmethod def configuration_schema(cls): @@ -53,6 +55,11 @@ def configuration_schema(cls): }, "username": { "type": "string" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "required": ["host"] diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py index 0f412ffac6..8fa7db4593 100644 --- a/redash/query_runner/impala_ds.py +++ b/redash/query_runner/impala_ds.py @@ -36,6 +36,8 @@ class Impala(BaseSQLQueryRunner): noop_query = "show schemas" + default_doc_url = ("http://www.cloudera.com/documentation/enterprise/" + "latest/topics/impala_langref.html") @classmethod def configuration_schema(cls): @@ -66,6 +68,11 @@ def configuration_schema(cls): }, "timeout": { "type": "number" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "required": ["host"], diff --git a/redash/query_runner/influx_db.py b/redash/query_runner/influx_db.py index 5d830daa46..f512e140bc 100644 --- a/redash/query_runner/influx_db.py +++ b/redash/query_runner/influx_db.py @@ -50,6 +50,8 @@ def _transform_result(results): class InfluxDB(BaseQueryRunner): noop_query = "show measurements limit 1" + default_doc_url = ("https://docs.influxdata.com/influxdb/v1.0/" + "query_language/spec/") @classmethod def configuration_schema(cls): @@ -58,6 +60,11 @@ def configuration_schema(cls): 'properties': { 'url': { 'type': 'string' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['url'] diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index 37b1f345c6..e3f9743547 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -139,6 +139,8 @@ def get_dict_output_field_name(cls,field_name, member_name): class JiraJQL(BaseQueryRunner): noop_query = '{"queryType": "count"}' + default_doc_url = ("https://confluence.atlassian.com/jirasoftwarecloud/" + "advanced-searching-764478330.html") @classmethod def configuration_schema(cls): @@ -154,6 +156,11 @@ def configuration_schema(cls): }, 'password': { 'type': 'string' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['url', 'username', 'password'], diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py index 91323a8a19..04b5ef8dac 100644 --- a/redash/query_runner/mongodb.py +++ b/redash/query_runner/mongodb.py @@ -118,6 +118,9 @@ def parse_results(results): class MongoDB(BaseQueryRunner): + default_doc_url = ("https://docs.mongodb.com/manual/reference/operator/" + "query/") + @classmethod def configuration_schema(cls): return { @@ -135,6 +138,11 @@ def configuration_schema(cls): 'type': 'string', 'title': 'Replica Set Name' }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url + } }, 'required': ['connectionString', 'dbName'] } diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py index 57ee300a9c..4865df9794 100644 --- a/redash/query_runner/mssql.py +++ b/redash/query_runner/mssql.py @@ -35,6 +35,7 @@ def default(self, o): class SqlServer(BaseSQLQueryRunner): noop_query = "SELECT 1" + default_doc_url = "https://msdn.microsoft.com/en-us/library/bb510741.aspx" @classmethod def configuration_schema(cls): @@ -68,6 +69,11 @@ def configuration_schema(cls): "db": { "type": "string", "title": "Database Name" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "required": ["db"], diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index 3679b694db..525dd3d054 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -29,6 +29,9 @@ class Mysql(BaseSQLQueryRunner): noop_query = "SELECT 1" + default_doc_url = 'https://dev.mysql.com/doc/refman/5.7/en/' + data_source_version_query = "select version()" + data_source_version_post_process = "none" @classmethod def configuration_schema(cls): @@ -79,6 +82,11 @@ def configuration_schema(cls): 'ssl_key': { 'type': 'string', 'title': 'Path to private key file (SSL)' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }) diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py index 5bb8f70f2f..12448029d5 100644 --- a/redash/query_runner/oracle.py +++ b/redash/query_runner/oracle.py @@ -31,8 +31,10 @@ logger = logging.getLogger(__name__) + class Oracle(BaseSQLQueryRunner): noop_query = "SELECT 1 FROM dual" + default_doc_url = "http://docs.oracle.com/database/121/SQLRF/toc.htm" @classmethod def get_col_type(cls, col_type, scale): @@ -65,6 +67,11 @@ def configuration_schema(cls): "servicename": { "type": "string", "title": "DSN Service Name" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "required": ["servicename", "user", "password", "host", "port"], diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index 348f5c925b..3bd20af567 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -47,6 +47,9 @@ def _wait(conn, timeout=None): class PostgreSQL(BaseSQLQueryRunner): noop_query = "SELECT 1" + default_doc_url = "https://www.postgresql.org/docs/current/" + data_source_version_query = "select version()" + data_source_version_post_process = "split by space take second" @classmethod def configuration_schema(cls): @@ -75,6 +78,11 @@ def configuration_schema(cls): "type": "string", "title": "SSL Mode", "default": "prefer" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "order": ['host', 'port', 'user', 'password'], @@ -187,6 +195,11 @@ def run_query(self, query, user): class Redshift(PostgreSQL): + default_doc_url = ("http://docs.aws.amazon.com/redshift/latest/" + "dg/cm_chap_SQLCommandRef.html") + data_source_version_query = "select version()" + data_source_version_post_process = "split by space take last" + @classmethod def type(cls): return "redshift" @@ -231,6 +244,11 @@ def configuration_schema(cls): "type": "string", "title": "SSL Mode", "default": "prefer" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "order": ['host', 'port', 'user', 'password'], diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index e915fa9a2e..7ed9823f41 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -33,6 +33,7 @@ class Presto(BaseQueryRunner): noop_query = 'SHOW TABLES' + default_doc_url = 'https://prestodb.io/docs/current/' @classmethod def configuration_schema(cls): @@ -53,6 +54,11 @@ def configuration_schema(cls): }, 'username': { 'type': 'string' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['host'] diff --git a/redash/query_runner/python.py b/redash/query_runner/python.py index f8e69f96ce..275b44fa16 100644 --- a/redash/query_runner/python.py +++ b/redash/query_runner/python.py @@ -46,6 +46,9 @@ class Python(BaseQueryRunner): 'tuple', 'set', 'list', 'dict', 'bool', ) + default_doc_url = ("http://redash.readthedocs.io/en/latest/" + "datasources.html#python") + @classmethod def configuration_schema(cls): return { @@ -57,6 +60,11 @@ def configuration_schema(cls): }, 'additionalModulesPaths': { 'type': 'string' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, } diff --git a/redash/query_runner/script.py b/redash/query_runner/script.py index ea54362d57..dac69b66ec 100644 --- a/redash/query_runner/script.py +++ b/redash/query_runner/script.py @@ -29,6 +29,9 @@ def run_script(script, shell): class Script(BaseQueryRunner): + default_doc_url = ("http://redash.readthedocs.io/en/latest/" + "datasources.html#python") + @classmethod def annotate_query(cls): return False @@ -49,6 +52,11 @@ def configuration_schema(cls): 'shell': { 'type': 'boolean', 'title': 'Execute command through the shell' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['path'] diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py index 2bab1f27c4..18bef51816 100644 --- a/redash/query_runner/sqlite.py +++ b/redash/query_runner/sqlite.py @@ -13,6 +13,7 @@ class Sqlite(BaseSQLQueryRunner): noop_query = "pragma quick_check" + default_doc_url = "http://sqlite.org/lang.html" @classmethod def configuration_schema(cls): @@ -22,6 +23,11 @@ def configuration_schema(cls): "dbpath": { "type": "string", "title": "Database Path" + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, "required": ["dbpath"], diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py index 0ecfacd25d..1a05fbb0c5 100644 --- a/redash/query_runner/treasuredata.py +++ b/redash/query_runner/treasuredata.py @@ -36,6 +36,7 @@ class TreasureData(BaseQueryRunner): noop_query = "SELECT 1" + default_doc_url = "https://docs.treasuredata.com/categories/hive" @classmethod def configuration_schema(cls): @@ -59,6 +60,11 @@ def configuration_schema(cls): 'type': 'boolean', 'title': 'Auto Schema Retrieval', 'default': False + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } }, 'required': ['apikey','db'] diff --git a/redash/query_runner/url.py b/redash/query_runner/url.py index 8763b63ed2..5da7659390 100644 --- a/redash/query_runner/url.py +++ b/redash/query_runner/url.py @@ -3,6 +3,9 @@ class Url(BaseQueryRunner): + default_doc_url = ("http://redash.readthedocs.io/en/latest/" + "datasources.html#url") + @classmethod def configuration_schema(cls): return { @@ -11,6 +14,11 @@ def configuration_schema(cls): 'url': { 'type': 'string', 'title': 'URL base path' + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url } } } diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py index 05dd3b1ea0..680fbe3cfb 100644 --- a/redash/query_runner/vertica.py +++ b/redash/query_runner/vertica.py @@ -30,6 +30,10 @@ class Vertica(BaseSQLQueryRunner): noop_query = "SELECT 1" + default_doc_url = ( + "https://my.vertica.com/docs/8.0.x/HTML/index.htm#Authoring/" + "ConceptsGuide/Other/SQLOverview.htm%3FTocPath%3DSQL" + "%2520Reference%2520Manual%7C_____1") @classmethod def configuration_schema(cls): @@ -57,6 +61,11 @@ def configuration_schema(cls): "type": "number", "title": "Read Timeout" }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url + } }, 'required': ['database'], 'secret': ['password'] diff --git a/tests/handlers/test_data_sources.py b/tests/handlers/test_data_sources.py index f07a2b3719..4590056fd4 100644 --- a/tests/handlers/test_data_sources.py +++ b/tests/handlers/test_data_sources.py @@ -60,7 +60,8 @@ def test_updates_data_source(self): new_name = 'New Name' new_options = {"dbname": "newdb"} rv = self.make_request('post', self.path, - data={'name': new_name, 'type': 'pg', 'options': new_options}, + data={'name': new_name, 'type': 'pg', 'options': new_options, + 'doc_url': None}, user=admin) self.assertEqual(rv.status_code, 200) @@ -101,7 +102,9 @@ def test_returns_400_when_configuration_invalid(self): def test_creates_data_source(self): admin = self.factory.create_admin() rv = self.make_request('post', '/api/data_sources', - data={'name': 'DS 1', 'type': 'pg', 'options': {"dbname": "redash"}}, user=admin) + data={'name': 'DS 1', 'type': 'pg', + 'options': {"dbname": "redash"}, + 'doc_url': None}, user=admin) self.assertEqual(rv.status_code, 200) diff --git a/tests/test_cli.py b/tests/test_cli.py index fc6d707025..7788521ef1 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -16,7 +16,7 @@ def test_interactive_new(self): result = runner.invoke( manager, ['ds', 'new'], - input="test\n%s\n\n\nexample.com\n\n\ntestdb\n" % (pg_i,)) + input="test\n%s\n\n\n\nexample.com\n\n\ntestdb\n" % (pg_i,)) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertEqual(DataSource.query.count(), 1) From 9dd0160a5a56820def4d66dba8be1239bf610247 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Fri, 10 Feb 2017 00:49:30 -0600 Subject: [PATCH 09/48] Don't execute query when changing data sources (fixes #29) --- client/app/pages/queries/view.js | 1 - 1 file changed, 1 deletion(-) diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index 16c11bc14e..f3331771c9 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -324,7 +324,6 @@ function QueryViewCtrl( $scope.dataSource = find($scope.dataSources, ds => ds.id === $scope.query.data_source_id); document.getElementById('data-source-version').innerHTML = ''; getSchema(); - $scope.executeQuery(); }; $scope.setVisualizationTab = (visualization) => { From a212d1e25460564e4cb4d04a81c62b1d409511f8 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Thu, 16 Feb 2017 08:44:18 -0600 Subject: [PATCH 10/48] Retry fetching query result on failure (fixes #36) --- client/app/services/query-result.js | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/client/app/services/query-result.js b/client/app/services/query-result.js index 1533b2b948..c15734c4d4 100644 --- a/client/app/services/query-result.js +++ b/client/app/services/query-result.js @@ -496,8 +496,16 @@ function QueryResultService($resource, $timeout, $q, QueryResultError) { } }, (error) => { logger('Connection error', error); - // TODO: use QueryResultError, or better yet: exception/reject of promise. - this.update({ job: { error: 'failed communicating with server. Please check your Internet connection and try again.', status: 4 } }); + this.update({ + job: { + error: 'Failed communicating with server. Retrying...', + status: 4, + id: this.job.id, + }, + }); + $timeout(() => { + this.refreshStatus(query); + }, 3000); }); } From 64913b26f6f9041ca444b3422b113641c0f0b526 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Fri, 24 Feb 2017 13:06:36 -0600 Subject: [PATCH 11/48] Use saved values for parameters in scheduled queries (re #43) --- tests/tasks/test_refresh_queries.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/tasks/test_refresh_queries.py b/tests/tasks/test_refresh_queries.py index 90641ed1a3..5202559671 100644 --- a/tests/tasks/test_refresh_queries.py +++ b/tests/tasks/test_refresh_queries.py @@ -45,3 +45,23 @@ def test_doesnt_enqueue_outdated_queries_for_paused_data_source(self): add_job_mock.assert_called_with( query.query_text, query.data_source, query.user_id, scheduled_query=query, metadata=ANY) + + def test_enqueues_parameterized_queries(self): + """ + Scheduled queries with parameters use saved values. + """ + query = self.factory.create_query( + query_text="select {{n}}", + options={"parameters": [{ + "global": False, + "type": "text", + "name": "n", + "value": "42", + "title": "n"}]}) + oq = staticmethod(lambda: [query]) + with patch('redash.tasks.queries.enqueue_query') as add_job_mock, \ + patch.object(Query, 'outdated_queries', oq): + refresh_queries() + add_job_mock.assert_called_with( + "select 42", query.data_source, query.user_id, + scheduled_query=query, metadata=ANY) From 4e41acbece6ffd4fd3499547b7afee020a33a4a1 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Thu, 2 Mar 2017 16:43:57 -0600 Subject: [PATCH 12/48] Add `schedule_until` field to queries, to allow expiry (re #15) --- .../components/queries/schedule-dialog.html | 4 +++ .../app/components/queries/schedule-dialog.js | 12 ++++++++ client/app/services/query.js | 4 +++ migrations/versions/eb2f788f997e_.py | 27 ++++++++++++++++++ redash/handlers/queries.py | 2 ++ redash/models.py | 5 +++- redash/serializers.py | 1 + tests/test_models.py | 28 +++++++++++++++++++ 8 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 migrations/versions/eb2f788f997e_.py diff --git a/client/app/components/queries/schedule-dialog.html b/client/app/components/queries/schedule-dialog.html index 8f1ab21541..f9344238a1 100644 --- a/client/app/components/queries/schedule-dialog.html +++ b/client/app/components/queries/schedule-dialog.html @@ -15,4 +15,8 @@ + diff --git a/client/app/components/queries/schedule-dialog.js b/client/app/components/queries/schedule-dialog.js index 1fc60c3925..db6ebe0320 100644 --- a/client/app/components/queries/schedule-dialog.js +++ b/client/app/components/queries/schedule-dialog.js @@ -103,6 +103,17 @@ function queryRefreshSelect(clientConfig, Policy) { }; } +function scheduleUntil() { + return { + restrict: 'E', + scope: { + query: '=', + saveQuery: '=', + }, + template: '', + }; +} + const ScheduleForm = { controller() { this.query = this.resolve.query; @@ -125,5 +136,6 @@ const ScheduleForm = { export default function init(ngModule) { ngModule.directive('queryTimePicker', queryTimePicker); ngModule.directive('queryRefreshSelect', queryRefreshSelect); + ngModule.directive('scheduleUntil', scheduleUntil); ngModule.component('scheduleDialog', ScheduleForm); } diff --git a/client/app/services/query.js b/client/app/services/query.js index 9f525caff5..0b39a23a86 100644 --- a/client/app/services/query.js +++ b/client/app/services/query.js @@ -402,6 +402,10 @@ function QueryResource( .format('HH:mm'); }; + Query.prototype.hasScheduleExpiry = function hasScheduleExpiry() { + return (this.schedule && this.schedule_until); + }; + Query.prototype.hasResult = function hasResult() { return !!(this.latest_query_data || this.latest_query_data_id); }; diff --git a/migrations/versions/eb2f788f997e_.py b/migrations/versions/eb2f788f997e_.py new file mode 100644 index 0000000000..71fd2bd5b3 --- /dev/null +++ b/migrations/versions/eb2f788f997e_.py @@ -0,0 +1,27 @@ +"""Add 'schedule_until' column to queries. + +Revision ID: eb2f788f997e +Revises: d1eae8b9893e +Create Date: 2017-03-02 12:20:00.029066 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'eb2f788f997e' +down_revision = 'd1eae8b9893e' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + 'queries', + sa.Column('schedule_until', sa.DateTime(timezone=True), nullable=True)) + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('queries', 'schedule_until') diff --git a/redash/handlers/queries.py b/redash/handlers/queries.py index e2fd67a07b..673cb69682 100644 --- a/redash/handlers/queries.py +++ b/redash/handlers/queries.py @@ -103,6 +103,7 @@ def post(self): :json string query: Query text :>json string query_hash: Hash of query text :>json string schedule: Schedule interval, in seconds, for repeated execution of this query + :json string api_key: Key for public access to this query's results. :>json boolean is_archived: Whether this query is displayed in indexes and search results or not. :>json boolean is_draft: Whether this query is a draft or not diff --git a/redash/models.py b/redash/models.py index 775187ea36..4a8bec4eed 100644 --- a/redash/models.py +++ b/redash/models.py @@ -881,6 +881,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): is_draft = Column(db.Boolean, default=True, index=True) schedule = Column(db.String(10), nullable=True) schedule_failures = Column(db.Integer, default=0) + schedule_until = Column(db.DateTime(True), nullable=True) visualizations = db.relationship("Visualization", cascade="all, delete-orphan") options = Column(MutableDict.as_mutable(PseudoJSON), default={}) search_vector = Column(TSVectorType('id', 'name', 'description', 'query', @@ -1001,7 +1002,9 @@ def by_user(cls, user): def outdated_queries(cls): queries = (db.session.query(Query) .options(joinedload(Query.latest_query_data).load_only('retrieved_at')) - .filter(Query.schedule != None) + .filter(Query.schedule != None, + (Query.schedule_until == None) | + (Query.schedule_until > db.func.now())) .order_by(Query.id)) now = utils.utcnow() diff --git a/redash/serializers.py b/redash/serializers.py index f1e40de803..641c39ce43 100644 --- a/redash/serializers.py +++ b/redash/serializers.py @@ -90,6 +90,7 @@ def serialize_query(query, with_stats=False, with_visualizations=False, with_use 'query': query.query_text, 'query_hash': query.query_hash, 'schedule': query.schedule, + 'schedule_until': query.schedule_until, 'api_key': query.api_key, 'is_archived': query.is_archived, 'is_draft': query.is_draft, diff --git a/tests/test_models.py b/tests/test_models.py index f08e28ec53..7e60e510ab 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -192,6 +192,34 @@ def test_failure_extends_schedule(self): query_result.retrieved_at = utcnow() - datetime.timedelta(minutes=17) self.assertEqual(list(models.Query.outdated_queries()), [query]) + def test_schedule_until_after(self): + """ + Queries with non-null ``schedule_until`` are not reported by + Query.outdated_queries() after the given time is past. + """ + three_hours_ago = utcnow() - datetime.timedelta(hours=3) + two_hours_ago = utcnow() - datetime.timedelta(hours=2) + query = self.factory.create_query(schedule="3600", schedule_until=three_hours_ago) + query_result = self.factory.create_query_result(query=query.query_text, retrieved_at=two_hours_ago) + query.latest_query_data = query_result + + queries = models.Query.outdated_queries() + self.assertNotIn(query, queries) + + def test_schedule_until_before(self): + """ + Queries with non-null ``schedule_until`` are reported by + Query.outdated_queries() before the given time is past. + """ + one_hour_from_now = utcnow() + datetime.timedelta(hours=1) + two_hours_ago = utcnow() - datetime.timedelta(hours=2) + query = self.factory.create_query(schedule="3600", schedule_until=one_hour_from_now) + query_result = self.factory.create_query_result(query=query.query_text, retrieved_at=two_hours_ago) + query.latest_query_data = query_result + + queries = models.Query.outdated_queries() + self.assertIn(query, queries) + class QueryArchiveTest(BaseTestCase): def setUp(self): From 1e4129f9888c8f3e8f04453eb8bff87899b436f6 Mon Sep 17 00:00:00 2001 From: Davor Spasovski Date: Mon, 6 Feb 2017 13:39:46 -0500 Subject: [PATCH 13/48] add compare query version support (re #7) --- .../pages/queries/compare-query-dialog.css | 54 ++++++++++++++++ .../pages/queries/compare-query-dialog.html | 33 ++++++++++ .../app/pages/queries/compare-query-dialog.js | 63 +++++++++++++++++++ client/app/pages/queries/query.html | 3 + client/app/pages/queries/view.js | 15 +++++ package.json | 1 + redash/handlers/api.py | 4 +- redash/handlers/dashboards.py | 1 + redash/handlers/queries.py | 14 +++++ redash/models.py | 40 +++++++----- tests/handlers/test_queries.py | 26 ++++++++ tests/models/test_changes.py | 13 +--- tests/test_models.py | 3 +- 13 files changed, 240 insertions(+), 30 deletions(-) create mode 100644 client/app/pages/queries/compare-query-dialog.css create mode 100644 client/app/pages/queries/compare-query-dialog.html create mode 100644 client/app/pages/queries/compare-query-dialog.js diff --git a/client/app/pages/queries/compare-query-dialog.css b/client/app/pages/queries/compare-query-dialog.css new file mode 100644 index 0000000000..ce2d01370e --- /dev/null +++ b/client/app/pages/queries/compare-query-dialog.css @@ -0,0 +1,54 @@ +/* Compare Query Version container */ +/* Offers slight visual improvement (alignment) to modern UAs */ +.compare-query-version { + display: flex; + justify-content: space-between; + align-items: center; +} + +.diff-removed { + background-color: rgba(208, 2, 27, 0.3); +} + +.diff-added { + background-color: rgba(65, 117, 5, 0.3); +} + +.query-diff-container span { + display: inline-block; + border-radius: 3px; + line-height: 20px; + vertical-align: middle; + margin: 0 5px 0 0; +} + +.query-diff-container > div:not(.compare-query-version-controls) { + float: left; + width: calc(50% - 5px); + margin: 0 10px 0 0; +} + +.compare-query-version { + background-color: #f5f5f5; + padding: 5px; + border: 1px solid #ccc; + margin-right: 15px; + border-radius: 3px; +} + +.diff-content { + border: 1px solid #ccc; + background-color: #f5f5f5; + border-radius: 3px; + padding: 15px; +} + +.query-diff-container > div:last-child { + margin: 0; +} + +.compare-query-version-controls { + display: flex; + align-items: center; + margin-bottom: 25px; +} diff --git a/client/app/pages/queries/compare-query-dialog.html b/client/app/pages/queries/compare-query-dialog.html new file mode 100644 index 0000000000..5214046055 --- /dev/null +++ b/client/app/pages/queries/compare-query-dialog.html @@ -0,0 +1,33 @@ + + diff --git a/client/app/pages/queries/compare-query-dialog.js b/client/app/pages/queries/compare-query-dialog.js new file mode 100644 index 0000000000..fb4338971a --- /dev/null +++ b/client/app/pages/queries/compare-query-dialog.js @@ -0,0 +1,63 @@ +import * as jsDiff from 'diff'; +import template from './compare-query-dialog.html'; +import './compare-query-dialog.css'; + +const CompareQueryDialog = { + controller: ['clientConfig', '$http', function doCompare(clientConfig, $http) { + this.currentQuery = this.resolve.query; + + this.previousQuery = ''; + this.currentDiff = []; + this.previousDiff = []; + this.versions = []; + this.previousQueryVersion = this.currentQuery.version - 2; // due to 0-indexed versions[] + + this.compareQueries = (isInitialLoad) => { + if (!isInitialLoad) { + this.previousQueryVersion = document.getElementById('version-choice').value - 1; // due to 0-indexed versions[] + } + + this.previousQuery = this.versions[this.previousQueryVersion].change.query.current; + this.currentDiff = jsDiff.diffChars(this.previousQuery, this.currentQuery.query); + document.querySelector('.compare-query-revert-wrapper').classList.remove('hidden'); + }; + + this.revertQuery = () => { + this.resolve.query.query = this.previousQuery; + this.resolve.saveQuery(); + + // Close modal. + this.dismiss(); + }; + + $http.get(`/api/queries/${this.currentQuery.id}/version`).then((response) => { + this.versions = response.data; + + const compare = (a, b) => { + if (a.object_version < b.object_version) { + return -1; + } else if (a.object_version > b.object_version) { + return 1; + } + return 0; + }; + + this.versions.sort(compare); + this.compareQueries(true); + }); + }], + scope: { + query: '=', + saveQuery: '<', + }, + bindings: { + resolve: '<', + close: '&', + dismiss: '&', + }, + template, +}; + +export default function (ngModule) { + ngModule.component('compareQueryDialog', CompareQueryDialog); +} diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index 2961ecfed2..be12d3d6c6 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -67,6 +67,9 @@

  • Show API Key
  • +
  • + Query Versions +
  • diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index f3331771c9..2235ab853b 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -353,6 +353,21 @@ function QueryViewCtrl( }); }; + $scope.compareQueryVersion = () => { + if (!$scope.query.query) { + return; + } + + $uibModal.open({ + windowClass: 'modal-xl', + component: 'compareQueryDialog', + resolve: { + query: $scope.query, + saveQuery: () => $scope.saveQuery, + }, + }); + }; + $scope.$watch('query.name', () => { Title.set($scope.query.name); }); diff --git a/package.json b/package.json index f3899c9c44..d036f12353 100644 --- a/package.json +++ b/package.json @@ -51,6 +51,7 @@ "d3": "^3.5.17", "d3-cloud": "^1.2.4", "debug": "^3.1.0", + "diff": "^3.3.0", "font-awesome": "^4.7.0", "gridstack": "^0.3.0", "jquery": "^3.2.1", diff --git a/redash/handlers/api.py b/redash/handlers/api.py index fc23bc0031..fd6fe5fd15 100644 --- a/redash/handlers/api.py +++ b/redash/handlers/api.py @@ -9,7 +9,7 @@ from redash.handlers.dashboards import DashboardListResource, DashboardResource, DashboardShareResource, PublicDashboardResource from redash.handlers.data_sources import DataSourceTypeListResource, DataSourceListResource, DataSourceSchemaResource, DataSourceResource, DataSourcePauseResource, DataSourceTestResource, DataSourceVersionResource from redash.handlers.events import EventsResource -from redash.handlers.queries import QueryForkResource, QueryRefreshResource, QueryListResource, QueryRecentResource, QuerySearchResource, QueryResource, MyQueriesResource +from redash.handlers.queries import QueryForkResource, QueryRefreshResource, QueryListResource, QueryRecentResource, QuerySearchResource, QueryResource, MyQueriesResource, QueryVersionListResource, ChangeResource from redash.handlers.query_results import QueryResultListResource, QueryResultResource, JobResource from redash.handlers.users import UserResource, UserListResource, UserInviteResource, UserResetPasswordResource, UserDisableResource from redash.handlers.visualizations import VisualizationListResource @@ -85,6 +85,8 @@ def json_representation(data, code, headers=None): api.add_org_resource(QueryRefreshResource, '/api/queries//refresh', endpoint='query_refresh') api.add_org_resource(QueryResource, '/api/queries/', endpoint='query') api.add_org_resource(QueryForkResource, '/api/queries//fork', endpoint='query_fork') +api.add_org_resource(QueryVersionListResource, '/api/queries//version', endpoint='query_versions') +api.add_org_resource(ChangeResource, '/api/changes/', endpoint='changes') api.add_org_resource(ObjectPermissionsListResource, '/api///acl', endpoint='object_permissions') api.add_org_resource(CheckPermissionResource, '/api///acl/', endpoint='check_permissions') diff --git a/redash/handlers/dashboards.py b/redash/handlers/dashboards.py index da7c4e01f3..1d1a3e13ed 100644 --- a/redash/handlers/dashboards.py +++ b/redash/handlers/dashboards.py @@ -86,6 +86,7 @@ def post(self): user=self.current_user, is_draft=True, layout='[]') + dashboard.record_changes(changed_by=self.current_user) models.db.session.add(dashboard) models.db.session.commit() return serialize_dashboard(dashboard) diff --git a/redash/handlers/queries.py b/redash/handlers/queries.py index 673cb69682..1fc3e0af7c 100644 --- a/redash/handlers/queries.py +++ b/redash/handlers/queries.py @@ -142,6 +142,7 @@ def post(self): query_def['org'] = self.current_org query_def['is_draft'] = True query = models.Query.create(**query_def) + query.record_changes(changed_by=self.current_user) models.db.session.add(query) models.db.session.commit() @@ -275,6 +276,7 @@ def post(self, query_id): try: self.update_model(query, query_def) + query.record_changes(self.current_user) models.db.session.commit() except StaleDataError: abort(409) @@ -353,3 +355,15 @@ def post(self, query_id): class QueryTagsResource(BaseResource): def get(self): return {t[0]: t[1] for t in models.Query.all_tags(self.current_user, True)} + +class QueryVersionListResource(BaseResource): + @require_permission('view_query') + def get(self, query_id): + results = models.Change.list_versions(models.Query.get_by_id(query_id)) + return [q.to_dict() for q in results] + + +class ChangeResource(BaseResource): + @require_permission('view_query') + def get(self, change_id): + return models.Change.query.get(change_id).to_dict() diff --git a/redash/models.py b/redash/models.py index 4a8bec4eed..f62d1779fd 100644 --- a/redash/models.py +++ b/redash/models.py @@ -199,10 +199,6 @@ class ChangeTrackingMixin(object): skipped_fields = ('id', 'created_at', 'updated_at', 'version') _clean_values = None - def __init__(self, *a, **kw): - super(ChangeTrackingMixin, self).__init__(*a, **kw) - self.record_changes(self.user) - def prep_cleanvalues(self): self.__dict__['_clean_values'] = {} for attr in inspect(self.__class__).column_attrs: @@ -213,10 +209,10 @@ def prep_cleanvalues(self): def __setattr__(self, key, value): if self._clean_values is None: self.prep_cleanvalues() - for attr in inspect(self.__class__).column_attrs: - col, = attr.columns - previous = getattr(self, attr.key, None) - self._clean_values[col.name] = previous + + if key in inspect(self.__class__).column_attrs: + previous = getattr(self, key, None) + self._clean_values[key] = previous super(ChangeTrackingMixin, self).__setattr__(key, value) @@ -227,13 +223,19 @@ def record_changes(self, changed_by): for attr in inspect(self.__class__).column_attrs: col, = attr.columns if attr.key not in self.skipped_fields: - changes[col.name] = {'previous': self._clean_values[col.name], - 'current': getattr(self, attr.key)} + prev = self._clean_values[col.name] + current = getattr(self, attr.key) + if prev != current: + changes[col.name] = {'previous': prev, 'current': current} - db.session.add(Change(object=self, - object_version=self.version, - user=changed_by, - change=changes)) + if changes: + self.version = (self.version or 0) + 1 + change = Change(object=self, + object_version=self.version, + user=changed_by, + change=changes) + db.session.add(change) + return change class BelongsToOrgMixin(object): @@ -860,7 +862,7 @@ def should_schedule_next(previous_iteration, now, schedule, failures): class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): id = Column(db.Integer, primary_key=True) - version = Column(db.Integer, default=1) + version = Column(db.Integer, default=0) org_id = Column(db.Integer, db.ForeignKey('organizations.id')) org = db.relationship(Organization, backref="queries") data_source_id = Column(db.Integer, db.ForeignKey("data_sources.id"), nullable=True) @@ -1069,6 +1071,7 @@ def fork(self, user): kwargs = {a: getattr(self, a) for a in forked_list} forked_query = Query.create(name=u'Copy of (#{}) {}'.format(self.id, self.name), user=user, **kwargs) + forked_query.record_changes(changed_by=user) for v in self.visualizations: if v.type == 'TABLE': @@ -1242,7 +1245,6 @@ def to_dict(self, full=True): 'id': self.id, 'object_id': self.object_id, 'object_type': self.object_type, - 'change_type': self.change_type, 'object_version': self.object_version, 'change': self.change, 'created_at': self.created_at @@ -1262,6 +1264,12 @@ def last_change(cls, obj): cls.object_type == obj.__class__.__tablename__).order_by( cls.object_version.desc()).first() + @classmethod + def list_versions(cls, query): + return cls.query.filter( + cls.object_id == query.id, + cls.object_type == 'queries') + class Alert(TimestampMixin, db.Model): UNKNOWN_STATE = 'unknown' diff --git a/tests/handlers/test_queries.py b/tests/handlers/test_queries.py index 8e2352553e..93dfae83d0 100644 --- a/tests/handlers/test_queries.py +++ b/tests/handlers/test_queries.py @@ -259,3 +259,29 @@ def test_format_sql_query(self): self.assertEqual(rv.json['query'], expected) + +class ChangeResourceTests(BaseTestCase): + def test_list(self): + query = self.factory.create_query() + query.name = 'version A' + query.record_changes(self.factory.user) + query.name = 'version B' + query.record_changes(self.factory.user) + rv = self.make_request('get', '/api/queries/{0}/version'.format(query.id)) + self.assertEquals(rv.status_code, 200) + self.assertEquals(len(rv.json), 2) + self.assertEquals(rv.json[0]['change']['name']['current'], 'version A') + self.assertEquals(rv.json[1]['change']['name']['current'], 'version B') + + def test_get(self): + query = self.factory.create_query() + query.name = 'version A' + ch1 = query.record_changes(self.factory.user) + query.name = 'version B' + ch2 = query.record_changes(self.factory.user) + rv1 = self.make_request('get', '/api/changes/' + str(ch1.id)) + self.assertEqual(rv1.status_code, 200) + self.assertEqual(rv1.json['change']['name']['current'], 'version A') + rv2 = self.make_request('get', '/api/changes/' + str(ch2.id)) + self.assertEqual(rv2.status_code, 200) + self.assertEqual(rv2.json['change']['name']['current'], 'version B') diff --git a/tests/models/test_changes.py b/tests/models/test_changes.py index 124e17a30d..3d7c7496e8 100644 --- a/tests/models/test_changes.py +++ b/tests/models/test_changes.py @@ -56,23 +56,12 @@ def test_properly_log_modification(self): obj.record_changes(changed_by=self.factory.user) obj.name = 'Query 2' obj.description = 'description' - db.session.flush() obj.record_changes(changed_by=self.factory.user) change = Change.last_change(obj) self.assertIsNotNone(change) - # TODO: https://github.com/getredash/redash/issues/1550 - # self.assertEqual(change.object_version, 2) + self.assertEqual(change.object_version, 2) self.assertEqual(change.object_version, obj.version) self.assertIn('name', change.change) self.assertIn('description', change.change) - - def test_logs_create_method(self): - q = Query(name='Query', description='', query_text='', - user=self.factory.user, data_source=self.factory.data_source, - org=self.factory.org) - change = Change.last_change(q) - - self.assertIsNotNone(change) - self.assertEqual(q.user, change.user) diff --git a/tests/test_models.py b/tests/test_models.py index 7e60e510ab..4d15eae932 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -180,7 +180,8 @@ def test_failure_extends_schedule(self): Execution failures recorded for a query result in exponential backoff for scheduling future execution. """ - query = self.factory.create_query(schedule="60", schedule_failures=4) + query = self.factory.create_query(schedule="60") + query.schedule_failures = 4 retrieved_at = utcnow() - datetime.timedelta(minutes=16) query_result = self.factory.create_query_result( retrieved_at=retrieved_at, query_text=query.query_text, From 58a5e3649460f1c7246e07173c3ed897f1fb12df Mon Sep 17 00:00:00 2001 From: Allen Short Date: Mon, 9 Jul 2018 13:20:36 -0500 Subject: [PATCH 14/48] Filter tables from schema browser (re #31) --- client/app/components/queries/schema-browser.html | 12 +++++++++++- client/app/components/queries/schema-browser.js | 14 ++++++++++++++ client/app/pages/queries/query.html | 2 +- redash/query_runner/athena.py | 11 +++++++++++ redash/query_runner/axibase_tsd.py | 6 ++++++ redash/query_runner/big_query.py | 6 ++++++ redash/query_runner/cass.py | 6 ++++++ redash/query_runner/clickhouse.py | 6 ++++++ redash/query_runner/dynamodb_sql.py | 6 ++++++ redash/query_runner/elasticsearch.py | 6 ++++++ redash/query_runner/google_analytics.py | 6 ++++++ redash/query_runner/google_spreadsheets.py | 6 ++++++ redash/query_runner/graphite.py | 6 ++++++ redash/query_runner/hive_ds.py | 6 ++++++ redash/query_runner/impala_ds.py | 6 ++++++ redash/query_runner/influx_db.py | 6 ++++++ redash/query_runner/jql.py | 6 ++++++ redash/query_runner/memsql_ds.py | 6 ++++++ redash/query_runner/mongodb.py | 6 ++++++ redash/query_runner/mssql.py | 6 ++++++ redash/query_runner/mysql.py | 6 ++++++ redash/query_runner/oracle.py | 6 ++++++ redash/query_runner/pg.py | 6 ++++++ redash/query_runner/presto.py | 6 ++++++ redash/query_runner/python.py | 6 ++++++ redash/query_runner/salesforce.py | 6 ++++++ redash/query_runner/script.py | 6 ++++++ redash/query_runner/snowflake.py | 6 ++++++ redash/query_runner/sqlite.py | 6 ++++++ redash/query_runner/treasuredata.py | 6 ++++++ redash/query_runner/url.py | 6 ++++++ redash/query_runner/vertica.py | 6 ++++++ tests/test_cli.py | 2 +- 33 files changed, 206 insertions(+), 3 deletions(-) diff --git a/client/app/components/queries/schema-browser.html b/client/app/components/queries/schema-browser.html index a98e27aec6..ed5c9c1d15 100644 --- a/client/app/components/queries/schema-browser.html +++ b/client/app/components/queries/schema-browser.html @@ -6,10 +6,20 @@ ng-click="$ctrl.onRefresh()"> + +
    -
    +
    diff --git a/client/app/components/queries/schema-browser.js b/client/app/components/queries/schema-browser.js index 499852be75..9c072051d7 100644 --- a/client/app/components/queries/schema-browser.js +++ b/client/app/components/queries/schema-browser.js @@ -3,6 +3,9 @@ import template from './schema-browser.html'; function SchemaBrowserCtrl($rootScope, $scope) { 'ngInject'; + this.versionToggle = false; + this.versionFilter = 'abcdefghijklmnop'; + this.showTable = (table) => { table.collapsed = !table.collapsed; $scope.$broadcast('vsRepeatTrigger'); @@ -21,6 +24,15 @@ function SchemaBrowserCtrl($rootScope, $scope) { this.isEmpty = function isEmpty() { return this.schema === undefined || this.schema.length === 0; }; + this.flipToggleVersionedTables = (versionToggle, toggleString) => { + if (versionToggle === false) { + this.versionToggle = true; + this.versionFilter = toggleString; + } else { + this.versionToggle = false; + this.versionFilter = 'abcdefghijklmnop'; + } + }; this.itemSelected = ($event, hierarchy) => { $rootScope.$broadcast('query-editor.command', 'paste', hierarchy.join('.')); @@ -32,7 +44,9 @@ function SchemaBrowserCtrl($rootScope, $scope) { const SchemaBrowser = { bindings: { schema: '<', + tabletogglestring: '<', onRefresh: '&', + flipToggleVersionedTables: '&', }, controller: SchemaBrowserCtrl, template, diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index be12d3d6c6..7f657f73a0 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -90,7 +90,7 @@

    - +
     
    diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py index a07952934f..d529d3a01c 100644 --- a/redash/query_runner/athena.py +++ b/redash/query_runner/athena.py @@ -80,6 +80,17 @@ def configuration_schema(cls): 'type': 'boolean', 'title': 'Use Glue Data Catalog', }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." + } }, 'required': ['region', 's3_staging_dir'], 'order': ['region', 'aws_access_key', 'aws_secret_key', 's3_staging_dir', 'schema'], diff --git a/redash/query_runner/axibase_tsd.py b/redash/query_runner/axibase_tsd.py index 9737b6bc87..9d50cd3b26 100644 --- a/redash/query_runner/axibase_tsd.py +++ b/redash/query_runner/axibase_tsd.py @@ -133,6 +133,12 @@ def configuration_schema(cls): 'trust_certificate': { 'type': 'boolean', 'title': 'Trust SSL Certificate' + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['username', 'password', 'hostname', 'protocol', 'port'], diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 52f9e4cc99..bdc11b3585 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -131,6 +131,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['jsonKeyFile', 'projectId'], diff --git a/redash/query_runner/cass.py b/redash/query_runner/cass.py index 4c5af888e5..af4d159244 100644 --- a/redash/query_runner/cass.py +++ b/redash/query_runner/cass.py @@ -71,6 +71,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['keyspace', 'host'] diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py index 00fc2b578b..93c7cd6423 100644 --- a/redash/query_runner/clickhouse.py +++ b/redash/query_runner/clickhouse.py @@ -29,6 +29,12 @@ def configuration_schema(cls): "dbname": { "type": "string", "title": "Database Name" + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["dbname"], diff --git a/redash/query_runner/dynamodb_sql.py b/redash/query_runner/dynamodb_sql.py index b29942c4f7..37b0444f16 100644 --- a/redash/query_runner/dynamodb_sql.py +++ b/redash/query_runner/dynamodb_sql.py @@ -55,6 +55,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["access_key", "secret_key"], diff --git a/redash/query_runner/elasticsearch.py b/redash/query_runner/elasticsearch.py index b2404ffb7c..bc8d083557 100644 --- a/redash/query_runner/elasticsearch.py +++ b/redash/query_runner/elasticsearch.py @@ -68,6 +68,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "secret": ["basic_auth_password"], diff --git a/redash/query_runner/google_analytics.py b/redash/query_runner/google_analytics.py index cd14724b66..eb6457624f 100644 --- a/redash/query_runner/google_analytics.py +++ b/redash/query_runner/google_analytics.py @@ -103,6 +103,12 @@ def configuration_schema(cls): 'jsonKeyFile': { "type": "string", 'title': 'JSON Key File' + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['jsonKeyFile'], diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py index ccd2d8cbb2..bfe7a4abbf 100644 --- a/redash/query_runner/google_spreadsheets.py +++ b/redash/query_runner/google_spreadsheets.py @@ -176,6 +176,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['jsonKeyFile'], diff --git a/redash/query_runner/graphite.py b/redash/query_runner/graphite.py index e2aaff6643..edb1c6449d 100644 --- a/redash/query_runner/graphite.py +++ b/redash/query_runner/graphite.py @@ -47,6 +47,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['url'], diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py index 042647e8ef..f84cfbb4c8 100644 --- a/redash/query_runner/hive_ds.py +++ b/redash/query_runner/hive_ds.py @@ -60,6 +60,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["host"] diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py index 8fa7db4593..66085b3fba 100644 --- a/redash/query_runner/impala_ds.py +++ b/redash/query_runner/impala_ds.py @@ -73,6 +73,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["host"], diff --git a/redash/query_runner/influx_db.py b/redash/query_runner/influx_db.py index f512e140bc..08db51c9cd 100644 --- a/redash/query_runner/influx_db.py +++ b/redash/query_runner/influx_db.py @@ -65,6 +65,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['url'] diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index e3f9743547..61d8fc6598 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -161,6 +161,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['url', 'username', 'password'], diff --git a/redash/query_runner/memsql_ds.py b/redash/query_runner/memsql_ds.py index 66e9eddc3d..d54211ab25 100644 --- a/redash/query_runner/memsql_ds.py +++ b/redash/query_runner/memsql_ds.py @@ -56,6 +56,12 @@ def configuration_schema(cls): }, "password": { "type": "string" + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py index 04b5ef8dac..1bdb15e933 100644 --- a/redash/query_runner/mongodb.py +++ b/redash/query_runner/mongodb.py @@ -142,6 +142,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['connectionString', 'dbName'] diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py index 4865df9794..0708e21d52 100644 --- a/redash/query_runner/mssql.py +++ b/redash/query_runner/mssql.py @@ -74,6 +74,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["db"], diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index 525dd3d054..a2187d20c8 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -58,6 +58,12 @@ def configuration_schema(cls): 'port': { 'type': 'number', 'default': 3306, + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "order": ['host', 'port', 'user', 'passwd', 'db'], diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py index 12448029d5..503b86ca39 100644 --- a/redash/query_runner/oracle.py +++ b/redash/query_runner/oracle.py @@ -72,6 +72,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["servicename", "user", "password", "host", "port"], diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index 3bd20af567..8d5193f6d3 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -83,6 +83,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "order": ['host', 'port', 'user', 'password'], diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index 7ed9823f41..c08ee21b83 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -59,6 +59,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['host'] diff --git a/redash/query_runner/python.py b/redash/query_runner/python.py index 275b44fa16..2c43201378 100644 --- a/redash/query_runner/python.py +++ b/redash/query_runner/python.py @@ -65,6 +65,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, } diff --git a/redash/query_runner/salesforce.py b/redash/query_runner/salesforce.py index 527f1e26ec..7222028fd0 100644 --- a/redash/query_runner/salesforce.py +++ b/redash/query_runner/salesforce.py @@ -81,6 +81,12 @@ def configuration_schema(cls): "type": "string", "title": "Salesforce API Version", "default": DEFAULT_API_VERSION + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["username", "password", "token"], diff --git a/redash/query_runner/script.py b/redash/query_runner/script.py index dac69b66ec..9473a45d22 100644 --- a/redash/query_runner/script.py +++ b/redash/query_runner/script.py @@ -57,6 +57,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['path'] diff --git a/redash/query_runner/snowflake.py b/redash/query_runner/snowflake.py index a1a7ca447e..f223f54c9c 100644 --- a/redash/query_runner/snowflake.py +++ b/redash/query_runner/snowflake.py @@ -46,6 +46,12 @@ def configuration_schema(cls): }, "database": { "type": "string" + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["user", "password", "account", "database", "warehouse"], diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py index 18bef51816..1819dd38bb 100644 --- a/redash/query_runner/sqlite.py +++ b/redash/query_runner/sqlite.py @@ -28,6 +28,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["dbpath"], diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py index 1a05fbb0c5..3957cbab53 100644 --- a/redash/query_runner/treasuredata.py +++ b/redash/query_runner/treasuredata.py @@ -65,6 +65,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['apikey','db'] diff --git a/redash/query_runner/url.py b/redash/query_runner/url.py index 5da7659390..c99289cca4 100644 --- a/redash/query_runner/url.py +++ b/redash/query_runner/url.py @@ -19,6 +19,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } } } diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py index 680fbe3cfb..4c84c1874e 100644 --- a/redash/query_runner/vertica.py +++ b/redash/query_runner/vertica.py @@ -65,6 +65,12 @@ def configuration_schema(cls): "type": "string", "title": "Documentation URL", "default": cls.default_doc_url + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, 'required': ['database'], diff --git a/tests/test_cli.py b/tests/test_cli.py index 7788521ef1..77e1a772d6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -16,7 +16,7 @@ def test_interactive_new(self): result = runner.invoke( manager, ['ds', 'new'], - input="test\n%s\n\n\n\nexample.com\n\n\ntestdb\n" % (pg_i,)) + input="test\n%s\n\n\n\n\nexample.com\n\n\ntestdb\n" % (pg_i,)) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertEqual(DataSource.query.count(), 1) From 5fa01cce484e63b007e62eaced56c6e01e58b7ba Mon Sep 17 00:00:00 2001 From: Alison Date: Thu, 22 Jun 2017 10:02:11 -0500 Subject: [PATCH 15/48] give warning/error msg on inaccurate graph config (re #57) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I tried to make `JSON.stringify(this.visualization.options.columnMapping)` a variable to avoid repeating it, but if I make it a `let` the linter throws an error and if I make it a `const` then it doesn’t change with the UI and the logic doesn’t work. :( updated based on PR comments --- .../edit-visualization-dialog.css | 5 +++++ .../edit-visualization-dialog.html | 10 ++++++++- .../edit-visualization-dialog.js | 21 +++++++++++++++++++ 3 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 client/app/visualizations/edit-visualization-dialog.css diff --git a/client/app/visualizations/edit-visualization-dialog.css b/client/app/visualizations/edit-visualization-dialog.css new file mode 100644 index 0000000000..3e84b755b2 --- /dev/null +++ b/client/app/visualizations/edit-visualization-dialog.css @@ -0,0 +1,5 @@ +/* Edit Visualization Dialog specific CSS */ + +.slight-padding { + padding: 5px; +} \ No newline at end of file diff --git a/client/app/visualizations/edit-visualization-dialog.html b/client/app/visualizations/edit-visualization-dialog.html index 28791ee2ca..4d9b531b5c 100644 --- a/client/app/visualizations/edit-visualization-dialog.html +++ b/client/app/visualizations/edit-visualization-dialog.html @@ -34,10 +34,18 @@
    +
    +
    +
    +
    +
    +
    +
    +
    diff --git a/client/app/visualizations/edit-visualization-dialog.js b/client/app/visualizations/edit-visualization-dialog.js index 7c855d20be..197edcf31d 100644 --- a/client/app/visualizations/edit-visualization-dialog.js +++ b/client/app/visualizations/edit-visualization-dialog.js @@ -1,6 +1,7 @@ import { map } from 'lodash'; import { copy } from 'angular'; import template from './edit-visualization-dialog.html'; +import './edit-visualization-dialog.css'; const EditVisualizationDialog = { template, @@ -21,6 +22,8 @@ const EditVisualizationDialog = { // Don't allow to change type after creating visualization this.canChangeType = !(this.visualization && this.visualization.id); + this.warning_three_column_groupby = 'You have more than 2 columns in your result set. To ensure the chart is accurate, please do one of the following:
    • Change the SQL query to give 2 result columns. You can CONCAT() columns together if you wish.
    • Select column(s) to group by.
    '; + this.warning_three_column_stacking = 'You have more than 2 columns in your result set. You may wish to make the Stacking option equal to `Enabled` or `Percent`.'; this.newVisualization = () => ({ @@ -48,6 +51,24 @@ const EditVisualizationDialog = { } }; + this.has3plusColumnsFunction = () => { + let has3plusColumns = false; + if ((JSON.stringify(this.visualization.options.columnMapping).match(/,/g) || []).length > 2) { + has3plusColumns = true; + } + return has3plusColumns; + }; + + this.disableSubmit = () => { + if (this.visualization.options.globalSeriesType === 'column' + && this.has3plusColumnsFunction() + && !JSON.stringify(this.visualization.options.columnMapping).includes('"":') + && JSON.stringify(this.visualization.options.columnMapping).includes('unused')) { + return true; + } + return false; + }; + this.submit = () => { if (this.visualization.id) { Events.record('update', 'visualization', this.visualization.id, { type: this.visualization.type }); From 883c36bf66a1a410e841bf35363439266cbe5a07 Mon Sep 17 00:00:00 2001 From: Alison Date: Fri, 28 Jul 2017 06:36:19 -0500 Subject: [PATCH 16/48] add column type info to query runners (re #152, #23) --- redash/query_runner/athena.py | 4 ++-- redash/query_runner/mysql.py | 5 +++-- redash/query_runner/pg.py | 5 ++++- redash/query_runner/presto.py | 4 ++-- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py index d529d3a01c..4352cf65d5 100644 --- a/redash/query_runner/athena.py +++ b/redash/query_runner/athena.py @@ -154,7 +154,7 @@ def get_schema(self, get_stats=False): schema = {} query = """ - SELECT table_schema, table_name, column_name + SELECT table_schema, table_name, column_name, data_type as column_type FROM information_schema.columns WHERE table_schema NOT IN ('information_schema') """ @@ -168,7 +168,7 @@ def get_schema(self, get_stats=False): table_name = '{0}.{1}'.format(row['table_schema'], row['table_name']) if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - schema[table_name]['columns'].append(row['column_name']) + schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') return schema.values() diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index a2187d20c8..d4700d85c1 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -115,7 +115,8 @@ def _get_tables(self, schema): query = """ SELECT col.table_schema, col.table_name, - col.column_name + col.column_name, + col.column_type FROM `information_schema`.`columns` col WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql'); """ @@ -136,7 +137,7 @@ def _get_tables(self, schema): if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - schema[table_name]['columns'].append(row['column_name']) + schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') return schema.values() diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index 8d5193f6d3..accaf6ca44 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -117,7 +117,7 @@ def _get_definitions(self, schema, query): if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - schema[table_name]['columns'].append(row['column_name']) + schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') def _get_tables(self, schema): ''' @@ -137,6 +137,7 @@ def _get_tables(self, schema): query = """ SELECT s.nspname as table_schema, c.relname as table_name, + t.typname as column_type, a.attname as column_name FROM pg_class c JOIN pg_namespace s @@ -146,6 +147,8 @@ def _get_tables(self, schema): ON a.attrelid = c.oid AND a.attnum > 0 AND NOT a.attisdropped + JOIN pg_type t + ON c.reltype = t.oid WHERE c.relkind IN ('r', 'v', 'm', 'f', 'p') """ diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index c08ee21b83..a3e3cd0c73 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -81,7 +81,7 @@ def type(cls): def get_schema(self, get_stats=False): schema = {} query = """ - SELECT table_schema, table_name, column_name + SELECT table_schema, table_name, column_name, data_type as column_type FROM information_schema.columns WHERE table_schema NOT IN ('pg_catalog', 'information_schema') """ @@ -99,7 +99,7 @@ def get_schema(self, get_stats=False): if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - schema[table_name]['columns'].append(row['column_name']) + schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') return schema.values() From 829b773b3ef7f0ea5a630515c6a70cf021105035 Mon Sep 17 00:00:00 2001 From: Alison Date: Fri, 28 Jul 2017 16:27:20 -0500 Subject: [PATCH 17/48] add hideParameters param (re #163) --- client/app/components/parameters.html | 2 +- client/app/components/parameters.js | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/client/app/components/parameters.html b/client/app/components/parameters.html index 9aa5a76c87..ac8bbe7aac 100644 --- a/client/app/components/parameters.html +++ b/client/app/components/parameters.html @@ -1,5 +1,5 @@
    diff --git a/client/app/components/parameters.js b/client/app/components/parameters.js index 56e3c7fdfe..2975d3aa30 100644 --- a/client/app/components/parameters.js +++ b/client/app/components/parameters.js @@ -152,6 +152,7 @@ function ParametersDirective($location, $uibModal) { }, }); }; + scope.hideParameters = $location.search().hideParameters; }, }; } From be0cd8425d61e8761cb345f3ba05fa4e419a71ee Mon Sep 17 00:00:00 2001 From: Alison Date: Tue, 8 Aug 2017 15:59:27 -0500 Subject: [PATCH 18/48] add ability to add query to dashboard from query page (re #154) --- .../app/pages/queries/add-to-dashboard.html | 23 ++++++ client/app/pages/queries/add-to-dashboard.js | 70 +++++++++++++++++++ client/app/pages/queries/query.html | 1 + client/app/pages/queries/view.js | 12 ++++ redash/handlers/dashboards.py | 1 + redash/models.py | 8 +-- tests/handlers/test_dashboards.py | 28 ++++++++ tests/handlers/test_widgets.py | 12 ++++ 8 files changed, 151 insertions(+), 4 deletions(-) create mode 100644 client/app/pages/queries/add-to-dashboard.html create mode 100644 client/app/pages/queries/add-to-dashboard.js diff --git a/client/app/pages/queries/add-to-dashboard.html b/client/app/pages/queries/add-to-dashboard.html new file mode 100644 index 0000000000..1f5e6f027a --- /dev/null +++ b/client/app/pages/queries/add-to-dashboard.html @@ -0,0 +1,23 @@ + + diff --git a/client/app/pages/queries/add-to-dashboard.js b/client/app/pages/queries/add-to-dashboard.js new file mode 100644 index 0000000000..292727c141 --- /dev/null +++ b/client/app/pages/queries/add-to-dashboard.js @@ -0,0 +1,70 @@ +import template from './add-to-dashboard.html'; + +const AddToDashboardForm = { + controller($sce, Dashboard, currentUser, toastr, Query, Widget) { + 'ngInject'; + + this.query = this.resolve.query; + this.vis = this.resolve.vis; + this.saveAddToDashbosard = this.resolve.saveAddToDashboard; + this.saveInProgress = false; + + this.trustAsHtml = html => $sce.trustAsHtml(html); + + this.onDashboardSelected = (dash) => { + // add widget to dashboard + this.saveInProgress = true; + this.widgetSize = 1; + this.selectedVis = null; + this.query = {}; + this.selected_query = this.query.id; + this.type = 'visualization'; + this.isVisualization = () => this.type === 'visualization'; + + const widget = new Widget({ + visualization_id: this.vis && this.vis.id, + dashboard_id: dash.id, + options: {}, + width: this.widgetSize, + type: this.type, + }); + + // (response) + widget.save().then(() => { + // (dashboard) + this.selectedDashboard = Dashboard.get({ slug: dash.slug }, () => {}); + this.close(); + }).catch(() => { + toastr.error('Widget can not be added'); + }).finally(() => { + this.saveInProgress = false; + }); + }; + + this.selectedDashboard = null; + + this.searchDashboards = (term) => { // , limitToUsersDashboards + if (!term || term.length < 3) { + return; + } + + Dashboard.get({ + q: term, + include_drafts: true, + }, (results) => { + this.dashboards = results.results; + }); + }; + }, + bindings: { + resolve: '<', + close: '&', + dismiss: '&', + vis: '<', + }, + template, +}; + +export default function (ngModule) { + ngModule.component('addToDashboardDialog', AddToDashboardForm); +} diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index 7f657f73a0..051f9ce18a 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -254,6 +254,7 @@

    + +
  • diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index 2235ab853b..b724906612 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -455,6 +455,18 @@ function QueryViewCtrl( }); }; + $scope.openAddToDashboardForm = (vis) => { + $uibModal.open({ + component: 'addToDashboardDialog', + size: 'sm', + resolve: { + query: $scope.query, + vis, + saveAddToDashboard: () => $scope.saveAddToDashboard, + }, + }); + }; + $scope.showEmbedDialog = (query, visId) => { const visualization = getVisualization(visId); $uibModal.open({ diff --git a/redash/handlers/dashboards.py b/redash/handlers/dashboards.py index 1d1a3e13ed..54afc986dd 100644 --- a/redash/handlers/dashboards.py +++ b/redash/handlers/dashboards.py @@ -46,6 +46,7 @@ def get(self): self.current_user.group_ids, self.current_user.id, search_term, + 'include_drafts' in request.args, ) else: results = models.Dashboard.all( diff --git a/redash/models.py b/redash/models.py index f62d1779fd..6fbf7baccb 100644 --- a/redash/models.py +++ b/redash/models.py @@ -1361,7 +1361,7 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model } @classmethod - def all(cls, org, group_ids, user_id): + def all(cls, org, group_ids, user_id, include_drafts=False): query = ( Dashboard.query .options(joinedload(Dashboard.user)) @@ -1377,14 +1377,14 @@ def all(cls, org, group_ids, user_id): Dashboard.org == org) .distinct()) - query = query.filter(or_(Dashboard.user_id == user_id, Dashboard.is_draft == False)) + query = query.filter(or_(Dashboard.user_id == user_id, Dashboard.is_draft == include_drafts)) return query @classmethod - def search(cls, org, groups_ids, user_id, search_term): + def search(cls, org, groups_ids, user_id, search_term, include_drafts): # TODO: switch to FTS - return cls.all(org, groups_ids, user_id).filter(cls.name.ilike(u'%{}%'.format(search_term))) + return cls.all(org, groups_ids, user_id, include_drafts).filter(cls.name.ilike(u'%{}%'.format(search_term))) @classmethod def all_tags(cls, org, user): diff --git a/tests/handlers/test_dashboards.py b/tests/handlers/test_dashboards.py index 0cd38a5fea..03d6f2ba42 100644 --- a/tests/handlers/test_dashboards.py +++ b/tests/handlers/test_dashboards.py @@ -182,3 +182,31 @@ def test_requires_admin_or_owner(self): res = self.make_request('delete', '/api/dashboards/{}/share'.format(dashboard.id), user=user) self.assertEqual(res.status_code, 200) + +class TestDashboardSearchResourceGet(BaseTestCase): + def create_dashboard_sequence(self): + d1 = self.factory.create_dashboard() + new_name = 'Analytics' + rv1 = self.make_request('post', '/api/dashboards/{0}'.format(d1.id), + data={'name': new_name, 'layout': '[]', 'is_draft': False}) + d2 = self.factory.create_dashboard() + rv2 = self.make_request('post', '/api/dashboards/{0}'.format(d2.id), + data={'name': 'Metrics', 'layout': '[]', 'is_draft': True}) + user = self.factory.create_user() + return d1, d2, user + + def test_get_dashboard_search_results_does_not_contain_deleted(self): + d1, d2, user = self.create_dashboard_sequence() + res = self.make_request('delete', '/api/dashboards/{}/share'.format(d2.id)) + dash_search_list = self.make_request('get','/api/dashboards/search?q=Metrics') + dash_search_list_json = json.loads(dash_search_list.data) + self.assertNotIn(d2.id, dash_search_list_json) + + def test_get_dashboard_search_results_obeys_draft_flag(self): + d1, d2, user = self.create_dashboard_sequence() + dash_search_list = self.make_request('get','/api/dashboards/search?q=Metrics&test=True&user_id={}'.format(user.id)) + dash_search_list_json = json.loads(dash_search_list.data) + self.assertNotIn(d2.id, dash_search_list_json) + #self.assertIn(d1.id, dash_search_list_json) + + diff --git a/tests/handlers/test_widgets.py b/tests/handlers/test_widgets.py index 702ef6f828..cb89caab47 100644 --- a/tests/handlers/test_widgets.py +++ b/tests/handlers/test_widgets.py @@ -64,3 +64,15 @@ def test_delete_widget(self): self.assertEquals(rv.status_code, 200) dashboard = models.Dashboard.get_by_slug_and_org(widget.dashboard.slug, widget.dashboard.org) self.assertEquals(dashboard.widgets.count(), 0) + + def test_updates_textbox_widget(self): + widget = self.factory.create_widget() + + rv = self.make_request('post', '/api/widgets/{0}'.format(widget.id), data={'width':2,'text':'sing and shine on', 'options': {}}) + + self.assertEquals(rv.status_code, 200) + dashboard = models.Dashboard.get_by_slug_and_org(widget.dashboard.slug, widget.dashboard.org) + self.assertEquals(dashboard.widgets.count(), 1) + self.assertEquals(dashboard.layout, '[]') + + From 173ceb58e289b530dac5af4716bf41ce92527600 Mon Sep 17 00:00:00 2001 From: Alison Date: Fri, 11 Aug 2017 20:36:18 -0500 Subject: [PATCH 19/48] Add last_active_at column to users page (re #155) --- client/app/pages/users/list.html | 9 ++++++++- redash/models.py | 2 ++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/client/app/pages/users/list.html b/client/app/pages/users/list.html index b2a62243ac..c5cfd05f6c 100644 --- a/client/app/pages/users/list.html +++ b/client/app/pages/users/list.html @@ -46,6 +46,10 @@ Joined + + Last Active At + + @@ -62,6 +66,9 @@ + + +
    @@ -74,4 +81,4 @@
  • - \ No newline at end of file + diff --git a/redash/models.py b/redash/models.py index 6fbf7baccb..59e639fd07 100644 --- a/redash/models.py +++ b/redash/models.py @@ -475,6 +475,8 @@ def to_dict(self, with_api_key=False): if with_api_key: d['api_key'] = self.api_key + d['last_active_at'] = Event.query.filter(Event.user_id == self.id).with_entities(Event.created_at).order_by(Event.created_at.desc()).first() + return d def is_api_user(self): From 07346c71b92ef83866584968a19a3423747e51ba Mon Sep 17 00:00:00 2001 From: Allen Short Date: Sat, 9 Dec 2017 05:48:56 +0000 Subject: [PATCH 20/48] add partition key marker to Athena and Presto columns (re #185) --- redash/query_runner/athena.py | 14 ++++++++++++-- redash/query_runner/presto.py | 16 ++++++++++++++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py index 4352cf65d5..26081066c0 100644 --- a/redash/query_runner/athena.py +++ b/redash/query_runner/athena.py @@ -154,9 +154,10 @@ def get_schema(self, get_stats=False): schema = {} query = """ - SELECT table_schema, table_name, column_name, data_type as column_type + SELECT table_schema, table_name, column_name, data_type as column_type, comment as extra_info FROM information_schema.columns WHERE table_schema NOT IN ('information_schema') + ORDER BY 1, 5 DESC """ results, error = self.run_query(query, None) @@ -168,7 +169,16 @@ def get_schema(self, get_stats=False): table_name = '{0}.{1}'.format(row['table_schema'], row['table_name']) if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') + + if row['extra_info'] == 'Partition Key': + schema[table_name]['columns'].append('[P] ' + row['column_name'] + ' (' + row['column_type'] + ')') + elif row['column_type'] == 'integer' or row['column_type'] == 'varchar' or row['column_type'] == 'timestamp' or row['column_type'] == 'boolean' or row['column_type'] == 'bigint': + schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') + elif row['column_type'][0:2] == 'row' or row['column_type'][0:2] == 'map' or row['column_type'][0:2] == 'arr': + schema[table_name]['columns'].append(row['column_name'] + ' (row or map or array)') + else: + schema[table_name]['columns'].append(row['column_name']) + return schema.values() diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index a3e3cd0c73..1f20d17663 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -1,4 +1,5 @@ import json +from markupsafe import Markup, escape from redash.utils import JSONEncoder from redash.query_runner import * @@ -81,9 +82,10 @@ def type(cls): def get_schema(self, get_stats=False): schema = {} query = """ - SELECT table_schema, table_name, column_name, data_type as column_type + SELECT table_schema, table_name, column_name, data_type as column_type, extra_info FROM information_schema.columns WHERE table_schema NOT IN ('pg_catalog', 'information_schema') + ORDER BY 1, 5 DESC """ results, error = self.run_query(query, None) @@ -99,7 +101,14 @@ def get_schema(self, get_stats=False): if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') + if row['extra_info'] == 'partition key': + schema[table_name]['columns'].append('[P] ' + row['column_name'] + ' (' + row['column_type'] + ')') + elif row['column_type'] == 'integer' or row['column_type'] == 'varchar' or row['column_type'] == 'timestamp' or row['column_type'] == 'boolean' or row['column_type'] == 'bigint': + schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')') + elif row['column_type'][0:2] == 'row' or row['column_type'][0:2] == 'map' or row['column_type'][0:2] == 'arr': + schema[table_name]['columns'].append(row['column_name'] + ' (row or map or array)') + else: + schema[table_name]['columns'].append(row['column_name']) return schema.values() @@ -119,6 +128,9 @@ def run_query(self, query, user): column_tuples = [(i[0], PRESTO_TYPES_MAPPING.get(i[1], None)) for i in cursor.description] columns = self.fetch_columns(column_tuples) rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())] + for row in rows: + for field in row: + field = escape(field) data = {'columns': columns, 'rows': rows} json_data = json.dumps(data, cls=JSONEncoder) error = None From 48ddd18963fc06e01029c17d384a576245ec7236 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Sat, 9 Dec 2017 05:50:15 +0000 Subject: [PATCH 21/48] make autocomplete for large schemas faster (re #232) --- client/app/components/queries/query-editor.js | 41 +++++++++++++------ 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/client/app/components/queries/query-editor.js b/client/app/components/queries/query-editor.js index 3c7d9f6fe4..a37a29717e 100644 --- a/client/app/components/queries/query-editor.js +++ b/client/app/components/queries/query-editor.js @@ -93,7 +93,7 @@ function queryEditor(QuerySnippet, $timeout) { editor.getSession().setMode(newMode); }); - $scope.$watch('schema', (newSchema, oldSchema) => { + $scope.$watch('autoCompleteSchema', (newSchema, oldSchema) => { if (newSchema !== oldSchema) { if (newSchema === undefined) { return; @@ -103,8 +103,10 @@ function queryEditor(QuerySnippet, $timeout) { // as it makes typing slower. if (tokensCount > 5000) { editor.setOption('enableLiveAutocompletion', false); + editor.setOption('enableBasicAutocompletion', false); } else { editor.setOption('enableLiveAutocompletion', true); + editor.setOption('enableBasicAutocompletion', true); } } }); @@ -119,31 +121,44 @@ function queryEditor(QuerySnippet, $timeout) { const schemaCompleter = { getCompletions(state, session, pos, prefix, callback) { - if (prefix.length === 0 || !$scope.schema) { + // make a variable for the auto completion in the query editor + $scope.autoCompleteSchema = $scope.schema; // removeExtraSchemaInfo( + + if (prefix.length === 0 || !$scope.autoCompleteSchema) { callback(null, []); return; } - if (!$scope.schema.keywords) { + if (!$scope.autoCompleteSchema.keywords) { const keywords = {}; - $scope.schema.forEach((table) => { + $scope.autoCompleteSchema.forEach((table) => { keywords[table.name] = 'Table'; - table.columns.forEach((c) => { - keywords[c] = 'Column'; + table.columns.forEach((c) => { // autoCompleteColumns + if (c.charAt(c.length - 1) === ')') { + let parensStartAt = c.indexOf('(') - 1; + c = c.substring(0, parensStartAt); + parensStartAt = 1; // linter complains without this line + } + // remove '[P] ' for partition keys + if (c.charAt(0) === '[') { + c = c.substring(4, c.length); + } + // keywords[c] = 'Column'; // dups columns keywords[`${table.name}.${c}`] = 'Column'; }); }); - $scope.schema.keywords = map(keywords, (v, k) => ({ - name: k, - value: k, - score: 0, - meta: v, - })); + $scope.autoCompleteSchema.keywords = map(keywords, (v, k) => + ({ + name: k, + value: k, + score: 0, + meta: v, + })); } - callback(null, $scope.schema.keywords); + callback(null, $scope.autoCompleteSchema.keywords); }, }; From 77a92a70de36751a6720af562c31010a56de2f17 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Tue, 19 Dec 2017 22:09:13 +0000 Subject: [PATCH 22/48] Toggle for query editor autocomplete (re #282) --- client/app/components/queries/query-editor.js | 13 +++++++++---- client/app/pages/queries/query.html | 5 +++++ client/app/pages/queries/source-view.js | 5 +++++ 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/client/app/components/queries/query-editor.js b/client/app/components/queries/query-editor.js index a37a29717e..f0555be12a 100644 --- a/client/app/components/queries/query-editor.js +++ b/client/app/components/queries/query-editor.js @@ -98,10 +98,11 @@ function queryEditor(QuerySnippet, $timeout) { if (newSchema === undefined) { return; } - const tokensCount = newSchema.reduce((totalLength, table) => totalLength + table.columns.length, 0); - // If there are too many tokens we disable live autocomplete, - // as it makes typing slower. - if (tokensCount > 5000) { + const tokensCount = + newSchema.reduce((totalLength, table) => totalLength + table.columns.length, 0); + // If there are too many tokens or if it's requested via the UI + // we disable live autocomplete, as it makes typing slower. + if (tokensCount > 5000 || !$scope.$parent.autocompleteQuery) { editor.setOption('enableLiveAutocompletion', false); editor.setOption('enableBasicAutocompletion', false); } else { @@ -114,6 +115,10 @@ function queryEditor(QuerySnippet, $timeout) { $scope.$parent.$on('angular-resizable.resizing', () => { editor.resize(); }); + $scope.$parent.$watch('autocompleteQuery', () => { + editor.setOption('enableLiveAutocompletion', $scope.$parent.autocompleteQuery); + editor.setOption('enableBasicAutocompletion', $scope.$parent.autocompleteQuery); + }); editor.focus(); }, diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index 051f9ce18a..e1d34e1344 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -163,6 +163,11 @@

    + + + Autocomplete + + diff --git a/client/app/pages/queries/source-view.js b/client/app/pages/queries/source-view.js index dfcb779bfb..25bc8a52ce 100644 --- a/client/app/pages/queries/source-view.js +++ b/client/app/pages/queries/source-view.js @@ -102,6 +102,11 @@ function QuerySourceCtrl( }); }; + $scope.autocompleteQuery = true; + $scope.toggleAutocompleteQuery = () => { + $scope.autocompleteQuery = !$scope.autocompleteQuery; + }; + $scope.$watch('query.query', (newQueryText) => { $scope.isDirty = newQueryText !== queryText; }); From 231f7dffbbd177d8a6e504efb7c55ccece46963c Mon Sep 17 00:00:00 2001 From: Davor Spasovski Date: Thu, 17 Aug 2017 16:09:11 -0400 Subject: [PATCH 23/48] hide query more menu if empty (re #208) --- client/app/pages/queries/query.html | 2 +- client/app/pages/queries/view.js | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index e1d34e1344..9bb586590b 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -45,7 +45,7 @@

    -
    +
    diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index b724906612..34e010f572 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -499,6 +499,17 @@ function QueryViewCtrl( }, }); }; + + $scope.moreMenuIsPopulated = () => { + const menuParent = document.getElementById('query-more-menu'); + + if (menuParent) { + if (menuParent.querySelectorAll('.dropdown-menu li').length) { + return true; + } + } + return false; + }; } export default function init(ngModule) { From 569cda6c1e136d930f5633e42dea3f04c9a22d7c Mon Sep 17 00:00:00 2001 From: Alison Date: Fri, 1 Sep 2017 17:13:23 -0500 Subject: [PATCH 24/48] Move events server side (re #245) --- client/app/components/dashboards/widget.js | 2 -- .../app/pages/admin/outdated-queries/index.js | 3 +- client/app/pages/admin/tasks/index.js | 3 +- client/app/pages/alert/index.js | 2 -- client/app/pages/alerts-list/index.js | 3 +- client/app/pages/dashboards/dashboard.js | 2 -- client/app/pages/data-sources/list.js | 4 +-- client/app/pages/data-sources/show.js | 9 +----- client/app/pages/destinations/list.js | 4 +-- client/app/pages/destinations/show.js | 5 +--- client/app/pages/groups/data-sources.js | 3 +- client/app/pages/groups/list.js | 3 +- client/app/pages/groups/show.js | 4 +-- client/app/pages/queries-list/index.js | 4 +-- client/app/pages/queries/view.js | 3 -- client/app/pages/query-snippets/edit.js | 1 - client/app/pages/query-snippets/list.js | 4 +-- client/app/pages/users/list.js | 4 +-- client/app/pages/users/show.js | 3 +- redash/handlers/admin.py | 19 ++++++++++-- redash/handlers/alerts.py | 7 +++++ redash/handlers/dashboards.py | 16 ++++++++++ redash/handlers/data_sources.py | 29 ++++++++++++++++++- redash/handlers/destinations.py | 20 ++++++++++++- redash/handlers/groups.py | 18 ++++++++++++ redash/handlers/queries.py | 16 ++++++++++ redash/handlers/query_snippets.py | 10 +++++++ redash/handlers/users.py | 17 ++++++++--- redash/handlers/visualizations.py | 5 ++++ redash/handlers/widgets.py | 5 ++++ 30 files changed, 168 insertions(+), 60 deletions(-) diff --git a/client/app/components/dashboards/widget.js b/client/app/components/dashboards/widget.js index ecd061464b..50eda3d21e 100644 --- a/client/app/components/dashboards/widget.js +++ b/client/app/components/dashboards/widget.js @@ -66,8 +66,6 @@ function DashboardWidgetCtrl($location, $uibModal, $window, Events, currentUser) return; } - Events.record('delete', 'widget', this.widget.id); - this.widget.delete().then(() => { if (this.deleted) { this.deleted({}); diff --git a/client/app/pages/admin/outdated-queries/index.js b/client/app/pages/admin/outdated-queries/index.js index aa5a54ad5d..e1dfd1e280 100644 --- a/client/app/pages/admin/outdated-queries/index.js +++ b/client/app/pages/admin/outdated-queries/index.js @@ -3,8 +3,7 @@ import moment from 'moment'; import { Paginator } from '@/lib/pagination'; import template from './outdated-queries.html'; -function OutdatedQueriesCtrl($scope, Events, $http, $timeout) { - Events.record('view', 'page', 'admin/outdated_queries'); +function OutdatedQueriesCtrl($scope, $http, $timeout) { $scope.autoUpdate = true; this.queries = new Paginator([], { itemsPerPage: 50 }); diff --git a/client/app/pages/admin/tasks/index.js b/client/app/pages/admin/tasks/index.js index 53d9007ea9..bceb11e53c 100644 --- a/client/app/pages/admin/tasks/index.js +++ b/client/app/pages/admin/tasks/index.js @@ -3,8 +3,7 @@ import moment from 'moment'; import { Paginator } from '@/lib/pagination'; import template from './tasks.html'; -function TasksCtrl($scope, $location, $http, $timeout, Events) { - Events.record('view', 'page', 'admin/tasks'); +function TasksCtrl($scope, $location, $http, $timeout) { $scope.autoUpdate = true; $scope.selectedTab = 'in_progress'; diff --git a/client/app/pages/alert/index.js b/client/app/pages/alert/index.js index f33f791574..e7f5f008e6 100644 --- a/client/app/pages/alert/index.js +++ b/client/app/pages/alert/index.js @@ -6,8 +6,6 @@ function AlertCtrl($routeParams, $location, $sce, toastr, currentUser, Query, Ev if (this.alertId === 'new') { Events.record('view', 'page', 'alerts/new'); - } else { - Events.record('view', 'alert', this.alertId); } this.trustAsHtml = html => $sce.trustAsHtml(html); diff --git a/client/app/pages/alerts-list/index.js b/client/app/pages/alerts-list/index.js index 25cf0689a0..082ec203c1 100644 --- a/client/app/pages/alerts-list/index.js +++ b/client/app/pages/alerts-list/index.js @@ -8,8 +8,7 @@ const stateClass = { }; class AlertsListCtrl { - constructor(Events, Alert) { - Events.record('view', 'page', 'alerts'); + constructor(Alert) { this.showEmptyState = false; this.showList = false; diff --git a/client/app/pages/dashboards/dashboard.js b/client/app/pages/dashboards/dashboard.js index 875c22a5cf..8035b2a41f 100644 --- a/client/app/pages/dashboards/dashboard.js +++ b/client/app/pages/dashboards/dashboard.js @@ -179,7 +179,6 @@ function DashboardCtrl( (dashboard) => { this.dashboard = dashboard; this.isDashboardOwner = currentUser.id === dashboard.user.id || currentUser.hasPermission('admin'); - Events.record('view', 'dashboard', dashboard.id); renderDashboard(dashboard, force); if ($location.search().edit === true) { @@ -229,7 +228,6 @@ function DashboardCtrl( this.archiveDashboard = () => { const archive = () => { - Events.record('archive', 'dashboard', this.dashboard.id); this.dashboard.$delete(); }; diff --git a/client/app/pages/data-sources/list.js b/client/app/pages/data-sources/list.js index 12a6b107c3..7ae3bff7aa 100644 --- a/client/app/pages/data-sources/list.js +++ b/client/app/pages/data-sources/list.js @@ -1,9 +1,7 @@ import settingsMenu from '@/lib/settings-menu'; import template from './list.html'; -function DataSourcesCtrl(Policy, Events, DataSource) { - Events.record('view', 'page', 'admin/data_sources'); - +function DataSourcesCtrl(Policy, DataSource) { this.policy = Policy; this.dataSources = DataSource.query(); } diff --git a/client/app/pages/data-sources/show.js b/client/app/pages/data-sources/show.js index 0c4b4ae15e..584aa76e96 100644 --- a/client/app/pages/data-sources/show.js +++ b/client/app/pages/data-sources/show.js @@ -6,9 +6,8 @@ const logger = debug('redash:http'); function DataSourceCtrl( $scope, $route, $routeParams, $http, $location, toastr, - currentUser, AlertDialog, Events, DataSource, + currentUser, AlertDialog, DataSource, ) { - Events.record('view', 'page', 'admin/data_source'); $scope.dataSource = $route.current.locals.dataSource; $scope.dataSourceId = $routeParams.dataSourceId; @@ -45,8 +44,6 @@ function DataSourceCtrl( function deleteDataSource(callback) { const doDelete = () => { - Events.record('delete', 'datasource', $scope.dataSource.id); - $scope.dataSource.$delete(() => { toastr.success('Data source deleted successfully.'); $location.path('/data_sources/'); @@ -64,8 +61,6 @@ function DataSourceCtrl( } function testConnection(callback) { - Events.record('test', 'datasource', $scope.dataSource.id); - DataSource.test({ id: $scope.dataSource.id }, (httpResponse) => { if (httpResponse.ok) { toastr.success('Success'); @@ -81,8 +76,6 @@ function DataSourceCtrl( } function getDataSourceVersion(callback) { - Events.record('test', 'data_source_version', $scope.dataSource.id); - DataSource.version({ id: $scope.dataSource.id }, (httpResponse) => { if (httpResponse.ok) { const versionNumber = httpResponse.message; diff --git a/client/app/pages/destinations/list.js b/client/app/pages/destinations/list.js index 5e96eb2be7..84a87327d3 100644 --- a/client/app/pages/destinations/list.js +++ b/client/app/pages/destinations/list.js @@ -1,9 +1,7 @@ import settingsMenu from '@/lib/settings-menu'; import template from './list.html'; -function DestinationsCtrl($scope, $location, toastr, currentUser, Events, Destination) { - Events.record('view', 'page', 'admin/destinations'); - +function DestinationsCtrl($scope, $location, toastr, currentUser, Destination) { $scope.destinations = Destination.query(); } diff --git a/client/app/pages/destinations/show.js b/client/app/pages/destinations/show.js index 795d8b77a5..bdba532ebb 100644 --- a/client/app/pages/destinations/show.js +++ b/client/app/pages/destinations/show.js @@ -6,9 +6,8 @@ const logger = debug('redash:http'); function DestinationCtrl( $scope, $route, $routeParams, $http, $location, toastr, - currentUser, AlertDialog, Events, Destination, + currentUser, AlertDialog, Destination, ) { - Events.record('view', 'page', 'admin/destination'); $scope.destination = $route.current.locals.destination; $scope.destinationId = $routeParams.destinationId; @@ -34,8 +33,6 @@ function DestinationCtrl( $scope.delete = () => { const doDelete = () => { - Events.record('delete', 'destination', $scope.destination.id); - $scope.destination.$delete(() => { toastr.success('Destination deleted successfully.'); $location.path('/destinations/'); diff --git a/client/app/pages/groups/data-sources.js b/client/app/pages/groups/data-sources.js index 1e0a94f9f3..b571981709 100644 --- a/client/app/pages/groups/data-sources.js +++ b/client/app/pages/groups/data-sources.js @@ -1,8 +1,7 @@ import { includes } from 'lodash'; import template from './data-sources.html'; -function GroupDataSourcesCtrl($scope, $routeParams, $http, Events, Group, DataSource) { - Events.record('view', 'group_data_sources', $scope.groupId); +function GroupDataSourcesCtrl($scope, $routeParams, $http, Group, DataSource) { $scope.group = Group.get({ id: $routeParams.groupId }); $scope.dataSources = Group.dataSources({ id: $routeParams.groupId }); $scope.newDataSource = {}; diff --git a/client/app/pages/groups/list.js b/client/app/pages/groups/list.js index 48b1480cfc..3ce9f60174 100644 --- a/client/app/pages/groups/list.js +++ b/client/app/pages/groups/list.js @@ -2,8 +2,7 @@ import settingsMenu from '@/lib/settings-menu'; import { Paginator } from '@/lib/pagination'; import template from './list.html'; -function GroupsCtrl($scope, $uibModal, currentUser, Events, Group) { - Events.record('view', 'page', 'groups'); +function GroupsCtrl($scope, $uibModal, currentUser, Group) { $scope.currentUser = currentUser; $scope.groups = new Paginator([], { itemsPerPage: 20 }); Group.query((groups) => { diff --git a/client/app/pages/groups/show.js b/client/app/pages/groups/show.js index 3a6eb01250..46978b53b5 100644 --- a/client/app/pages/groups/show.js +++ b/client/app/pages/groups/show.js @@ -1,9 +1,7 @@ import { includes } from 'lodash'; import template from './show.html'; -function GroupCtrl($scope, $routeParams, $http, currentUser, Events, Group, User) { - Events.record('view', 'group', $scope.groupId); - +function GroupCtrl($scope, $routeParams, $http, currentUser, Group, User) { $scope.currentUser = currentUser; $scope.group = Group.get({ id: $routeParams.groupId }); $scope.members = Group.members({ id: $routeParams.groupId }); diff --git a/client/app/pages/queries-list/index.js b/client/app/pages/queries-list/index.js index 257768652e..cc509a4aff 100644 --- a/client/app/pages/queries-list/index.js +++ b/client/app/pages/queries-list/index.js @@ -20,9 +20,7 @@ class QueriesListCtrl { this.pageSize = parseInt($location.search().page_size || 20, 10); this.pageSizeOptions = [5, 10, 20, 50, 100]; - if (isString(this.term) && this.term !== '') { - Events.record('search', 'query', '', { term: this.term }); - } else { + if (!isString(this.term)) { this.term = ''; } diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index 34e010f572..3336e21d56 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -136,7 +136,6 @@ function QueryViewCtrl( KeyboardShortcuts.unbind(shortcuts); }); - Events.record('view', 'query', $scope.query.id); if ($scope.query.hasResult() || $scope.query.paramsRequired()) { getQueryResult(); } @@ -178,8 +177,6 @@ function QueryViewCtrl( }; $scope.duplicateQuery = () => { - Events.record('fork', 'query', $scope.query.id); - Query.fork({ id: $scope.query.id }, (newQuery) => { $location.url(newQuery.getSourceLink()).replace(); }); diff --git a/client/app/pages/query-snippets/edit.js b/client/app/pages/query-snippets/edit.js index 9522a70c9a..4419478d3f 100644 --- a/client/app/pages/query-snippets/edit.js +++ b/client/app/pages/query-snippets/edit.js @@ -3,7 +3,6 @@ import template from './edit.html'; function SnippetCtrl($routeParams, $http, $location, toastr, currentUser, AlertDialog, Events, QuerySnippet) { this.snippetId = $routeParams.snippetId; - Events.record('view', 'query_snippet', this.snippetId); this.editorOptions = { mode: 'snippets', diff --git a/client/app/pages/query-snippets/list.js b/client/app/pages/query-snippets/list.js index ee0f268218..48d12c070e 100644 --- a/client/app/pages/query-snippets/list.js +++ b/client/app/pages/query-snippets/list.js @@ -2,9 +2,7 @@ import settingsMenu from '@/lib/settings-menu'; import { Paginator } from '@/lib/pagination'; import template from './list.html'; -function SnippetsCtrl($location, currentUser, Events, QuerySnippet) { - Events.record('view', 'page', 'query_snippets'); - +function SnippetsCtrl($location, currentUser, QuerySnippet) { this.snippets = new Paginator([], { itemsPerPage: 20 }); QuerySnippet.query((snippets) => { this.snippets.updateRows(snippets); diff --git a/client/app/pages/users/list.js b/client/app/pages/users/list.js index 2d6d5e4f3c..3e10805283 100644 --- a/client/app/pages/users/list.js +++ b/client/app/pages/users/list.js @@ -3,9 +3,7 @@ import settingsMenu from '@/lib/settings-menu'; import { LivePaginator } from '@/lib/pagination'; import template from './list.html'; -function UsersCtrl($location, currentUser, Policy, Events, User) { - Events.record('view', 'page', 'users'); - +function UsersCtrl($location, currentUser, Policy, User) { this.currentUser = currentUser; if ($location.path() === '/users/disabled') { this.currentPage = 'disabled_users'; diff --git a/client/app/pages/users/show.js b/client/app/pages/users/show.js index aec7abe39f..e6f17be3af 100644 --- a/client/app/pages/users/show.js +++ b/client/app/pages/users/show.js @@ -6,7 +6,7 @@ import './settings.less'; function UserCtrl( $scope, $routeParams, $http, $location, toastr, - clientConfig, currentUser, Events, User, + clientConfig, currentUser, User, ) { $scope.userId = $routeParams.userId; $scope.currentUser = currentUser; @@ -16,7 +16,6 @@ function UserCtrl( $scope.userId = currentUser.id; } - Events.record('view', 'user', $scope.userId); $scope.canEdit = currentUser.hasPermission('admin') || currentUser.id === parseInt($scope.userId, 10); $scope.showSettings = false; $scope.showPasswordSettings = false; diff --git a/redash/handlers/admin.py b/redash/handlers/admin.py index 919dc91924..51b0c0ca05 100644 --- a/redash/handlers/admin.py +++ b/redash/handlers/admin.py @@ -1,10 +1,12 @@ import json +import time from flask import request -from flask_login import login_required +from flask_login import current_user, login_required from redash import models, redis_connection +from redash.authentication import current_org from redash.handlers import routes -from redash.handlers.base import json_response +from redash.handlers.base import json_response, record_event from redash.permissions import require_super_admin from redash.tasks.queries import QueryTaskTracker @@ -23,6 +25,13 @@ def outdated_queries(): else: outdated_queries = [] + record_event(current_org, current_user, { + 'action': 'view', + 'object_type': 'api_call', + 'object_id': 'admin/outdated_queries', + 'timestamp': int(time.time()), + }) + return json_response( dict(queries=[q.to_dict(with_stats=True, with_last_modified_by=False) for q in outdated_queries], @@ -41,6 +50,12 @@ def queries_tasks(): waiting = QueryTaskTracker.all(QueryTaskTracker.WAITING_LIST, limit=waiting_limit) in_progress = QueryTaskTracker.all(QueryTaskTracker.IN_PROGRESS_LIST, limit=progress_limit) done = QueryTaskTracker.all(QueryTaskTracker.DONE_LIST, limit=done_limit) + record_event(current_org, current_user, { + 'action': 'view', + 'object_type': 'api_call', + 'object_id': 'admin/tasks', + 'timestamp': int(time.time()), + }) response = { 'waiting': [t.data for t in waiting if t is not None], diff --git a/redash/handlers/alerts.py b/redash/handlers/alerts.py index 569e5e2753..f5bfc0e13c 100644 --- a/redash/handlers/alerts.py +++ b/redash/handlers/alerts.py @@ -15,7 +15,14 @@ class AlertResource(BaseResource): def get(self, alert_id): alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org) require_access(alert.groups, self.current_user, view_only) + self.record_event({ + 'action': 'view', + 'timestamp': int(time.time()), + 'object_id': alert.id, + 'object_type': 'alert' + }) return serialize_alert(alert) + return alert.to_dict() def post(self, alert_id): req = request.get_json(True) diff --git a/redash/handlers/dashboards.py b/redash/handlers/dashboards.py index 54afc986dd..2e43b445a0 100644 --- a/redash/handlers/dashboards.py +++ b/redash/handlers/dashboards.py @@ -139,6 +139,12 @@ def get(self, dashboard_slug=None): response['can_edit'] = can_modify(dashboard, self.current_user) + self.record_event({ + 'action': 'view', + 'object_id': dashboard.id, + 'object_type': 'dashboard', + }) + return response @require_permission('edit_dashboard') @@ -178,6 +184,11 @@ def post(self, dashboard_slug): abort(409) result = serialize_dashboard(dashboard, with_widgets=True, user=self.current_user) + self.record_event({ + 'action': 'edit', + 'object_id': dashboard.id, + 'object_type': 'dashboard', + }) return result @require_permission('edit_dashboard') @@ -195,6 +206,11 @@ def delete(self, dashboard_slug): models.db.session.add(dashboard) d = serialize_dashboard(dashboard, with_widgets=True, user=self.current_user) models.db.session.commit() + self.record_event({ + 'action': 'archive', + 'object_id': dashboard.id, + 'object_type': 'dashboard', + }) return d diff --git a/redash/handlers/data_sources.py b/redash/handlers/data_sources.py index cfb7a03c24..3d7dbe1948 100644 --- a/redash/handlers/data_sources.py +++ b/redash/handlers/data_sources.py @@ -25,7 +25,13 @@ class DataSourceResource(BaseResource): @require_admin def get(self, data_source_id): data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org) - return data_source.to_dict(all=True) + ds = data_source.to_dict(all=True) + self.record_event({ + 'action': 'view', + 'object_id': data_source.id, + 'object_type': 'data_source', + }) + return ds @require_admin def post(self, data_source_id): @@ -59,6 +65,11 @@ def post(self, data_source_id): def delete(self, data_source_id): data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org) data_source.delete() + self.record_event({ + 'action': 'delete', + 'object_id': data_source_id, + 'object_type': 'datasource', + }) return make_response('', 204) @@ -83,6 +94,11 @@ def get(self): except AttributeError: logging.exception("Error with DataSource#to_dict (data source id: %d)", ds.id) + self.record_event({ + 'action': 'view', + 'object_id': 'admin/data_sources', + 'object_type': 'api_call', + }) return sorted(response.values(), key=lambda d: d['name'].lower()) @require_admin @@ -186,6 +202,12 @@ class DataSourceTestResource(BaseResource): def post(self, data_source_id): data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org) + self.record_event({ + 'action': 'test', + 'object_id': data_source_id, + 'object_type': 'datasource', + }) + try: data_source.query_runner.test_connection() except Exception as e: @@ -197,6 +219,11 @@ class DataSourceVersionResource(BaseResource): def get(self, data_source_id): data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org) require_access(data_source.groups, self.current_user, view_only) + self.record_event({ + 'action': 'test', + 'object_id': data_source_id, + 'object_type': 'data_source_version', + }) try: version_info = data_source.query_runner.get_data_source_version() except Exception as e: diff --git a/redash/handlers/destinations.py b/redash/handlers/destinations.py index c1895b7321..254e51f078 100644 --- a/redash/handlers/destinations.py +++ b/redash/handlers/destinations.py @@ -19,7 +19,13 @@ class DestinationResource(BaseResource): @require_admin def get(self, destination_id): destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org) - return destination.to_dict(all=True) + d = destination.to_dict(all=True) + self.record_event({ + 'action': 'view', + 'object_id': destination_id, + 'object_type': 'destination' + }) + return d @require_admin def post(self, destination_id): @@ -48,6 +54,12 @@ def delete(self, destination_id): models.db.session.delete(destination) models.db.session.commit() + self.record_event({ + 'action': 'delete', + 'object_id': destination_id, + 'object_type': 'destination', + }) + return make_response('', 204) @@ -63,6 +75,12 @@ def get(self): d = ds.to_dict() response[ds.id] = d + self.record_event({ + 'action': 'view', + 'object_id': 'admin/destinations', + 'object_type': 'api_call', + }) + return response.values() @require_admin diff --git a/redash/handlers/groups.py b/redash/handlers/groups.py index 7790044468..ba72346b10 100644 --- a/redash/handlers/groups.py +++ b/redash/handlers/groups.py @@ -30,6 +30,12 @@ def get(self): groups = models.Group.query.filter( models.Group.id.in_(self.current_user.group_ids)) + self.record_event({ + 'action': 'view', + 'object_id': 'groups', + 'object_type': 'api_call', + }) + return [g.to_dict() for g in groups] @@ -59,6 +65,12 @@ def get(self, group_id): group = models.Group.get_by_id_and_org(group_id, self.current_org) + self.record_event({ + 'action': 'view', + 'object_id': group_id, + 'object_type': 'group', + }) + return group.to_dict() @require_admin @@ -154,6 +166,12 @@ def get(self, group_id): data_sources = (models.DataSource.query .join(models.DataSourceGroup) .filter(models.DataSourceGroup.group == group)) + + self.record_event({ + 'action': 'view', + 'object_id': group_id, + 'object_type': 'group_data_sources', + }) return [ds.to_dict(with_permissions_for=group) for ds in data_sources] diff --git a/redash/handlers/queries.py b/redash/handlers/queries.py index 1fc3e0af7c..cb344ba7f5 100644 --- a/redash/handlers/queries.py +++ b/redash/handlers/queries.py @@ -68,6 +68,11 @@ def get(self): return [] include_drafts = request.args.get('include_drafts') is not None + self.record_event({ + 'action': 'search', + 'object_id': term, + 'object_type': 'query', + }) # this redirects to the new query list API that is aware of search new_location = url_for( @@ -297,6 +302,12 @@ def get(self, query_id): result = QuerySerializer(q, with_visualizations=True).serialize() result['can_edit'] = can_modify(q, self.current_user) + + self.record_event({ + 'action': 'view', + 'object_id': query_id, + 'object_type': 'query', + }) return result # TODO: move to resource of its own? (POST /queries/{id}/archive) @@ -326,6 +337,11 @@ def post(self, query_id): require_access(query.data_source.groups, self.current_user, not_view_only) forked_query = query.fork(self.current_user) models.db.session.commit() + self.record_event({ + 'action': 'fork', + 'object_id': query_id, + 'object_type': 'query', + }) return QuerySerializer(forked_query, with_visualizations=True).serialize() diff --git a/redash/handlers/query_snippets.py b/redash/handlers/query_snippets.py index fc74865771..fbc6a2871c 100644 --- a/redash/handlers/query_snippets.py +++ b/redash/handlers/query_snippets.py @@ -11,6 +11,11 @@ class QuerySnippetResource(BaseResource): def get(self, snippet_id): snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org) + self.record_event({ + 'action': 'view', + 'object_id': snippet_id, + 'object_type': 'query_snippet', + }) return snippet.to_dict() def post(self, snippet_id): @@ -69,5 +74,10 @@ def post(self): return snippet.to_dict() def get(self): + self.record_event({ + 'action': 'view', + 'object_id': 'query_snippets', + 'object_type': 'api_call', + }) return [snippet.to_dict() for snippet in models.QuerySnippet.all(org=self.current_org)] diff --git a/redash/handlers/users.py b/redash/handlers/users.py index d0fa73a9ab..f06e28a228 100644 --- a/redash/handlers/users.py +++ b/redash/handlers/users.py @@ -49,13 +49,13 @@ def serialize_user(user): if group: user_groups.append({'id': group.id, 'name': group.name}) - + d['groups'] = user_groups return d search_term = request.args.get('q', '') - + if request.args.get('disabled', None) is not None: users = models.User.all_disabled(self.current_org) else: @@ -63,9 +63,14 @@ def serialize_user(user): if search_term: users = models.User.search(users, search_term) - + users = order_results(users) + self.record_event({ + 'action': 'view', + 'object_id': 'users', + 'object_type': 'api_call', + }) return paginate(users, page, page_size, serialize_user) @require_admin @@ -138,7 +143,11 @@ class UserResource(BaseResource): def get(self, user_id): require_permission_or_owner('list_users', user_id) user = get_object_or_404(models.User.get_by_id_and_org, user_id, self.current_org) - + self.record_event({ + 'action': 'view', + 'object_id': user_id, + 'object_type': 'user', + }) return user.to_dict(with_api_key=is_admin_or_owner(user_id)) def post(self, user_id): diff --git a/redash/handlers/visualizations.py b/redash/handlers/visualizations.py index 79bcf7a528..bcddac5440 100644 --- a/redash/handlers/visualizations.py +++ b/redash/handlers/visualizations.py @@ -49,5 +49,10 @@ def post(self, visualization_id): def delete(self, visualization_id): vis = get_object_or_404(models.Visualization.get_by_id_and_org, visualization_id, self.current_org) require_object_modify_permission(vis.query_rel, self.current_user) + self.record_event({ + 'action': 'delete', + 'object_id': visualization_id, + 'object_type': 'visualization', + }) models.db.session.delete(vis) models.db.session.commit() diff --git a/redash/handlers/widgets.py b/redash/handlers/widgets.py index d88908dac2..cae7246221 100644 --- a/redash/handlers/widgets.py +++ b/redash/handlers/widgets.py @@ -78,3 +78,8 @@ def delete(self, widget_id): require_object_modify_permission(widget.dashboard, self.current_user) models.db.session.delete(widget) models.db.session.commit() + self.record_event({ + 'action': 'delete', + 'object_id': widget_id, + 'object_type': 'widget', + }) From 6362537a791b061e134864f4a3a95cbcceb03c6c Mon Sep 17 00:00:00 2001 From: Allen Short Date: Wed, 6 Sep 2017 20:29:50 +0000 Subject: [PATCH 25/48] Run queries with no cached result in public dashboards (re #220) --- redash/serializers.py | 17 +++++++++++++---- tests/handlers/test_embed.py | 12 ++++++++++++ 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/redash/serializers.py b/redash/serializers.py index 641c39ce43..21ca3d1238 100644 --- a/redash/serializers.py +++ b/redash/serializers.py @@ -9,6 +9,7 @@ from flask_login import current_user from redash import models from redash.permissions import has_access, view_only +from redash.handlers.query_results import run_query_sync def public_widget(widget): @@ -21,8 +22,15 @@ def public_widget(widget): 'created_at': widget.created_at } - if widget.visualization and widget.visualization.id: - query_data = models.QueryResult.query.get(widget.visualization.query_rel.latest_query_data_id).to_dict() + if (widget.visualization and widget.visualization.id and + widget.visualization.query_rel is not None): + q = widget.visualization.query_rel + # make sure the widget's query has a latest_query_data_id that is + # not null so public dashboards work + if (q.latest_query_data_id is None): + run_query_sync(q.data_source, {}, q.query_text) + + query_data = q.latest_query_data.to_dict() res['visualization'] = { 'type': widget.visualization.type, 'name': widget.visualization.name, @@ -31,9 +39,10 @@ def public_widget(widget): 'updated_at': widget.visualization.updated_at, 'created_at': widget.visualization.created_at, 'query': { + 'id': q.id, 'query': ' ', # workaround, as otherwise the query data won't be loaded. - 'name': widget.visualization.query_rel.name, - 'description': widget.visualization.query_rel.description, + 'name': q.name, + 'description': q.description, 'options': {}, 'latest_query_data': query_data } diff --git a/tests/handlers/test_embed.py b/tests/handlers/test_embed.py index 18f119d786..905a6f8672 100644 --- a/tests/handlers/test_embed.py +++ b/tests/handlers/test_embed.py @@ -1,5 +1,8 @@ +import mock + from tests import BaseTestCase from redash.models import db +from redash.query_runner.pg import PostgreSQL class TestEmbedVisualization(BaseTestCase): @@ -97,6 +100,15 @@ def test_inactive_token(self): res = self.make_request('get', '/api/dashboards/public/{}'.format(api_key.api_key), user=False, is_json=False) self.assertEqual(res.status_code, 404) + def test_dashboard_widgets(self): + dashboard = self.factory.create_dashboard() + w1 = self.factory.create_widget(dashboard=dashboard) + w2 = self.factory.create_widget(dashboard=dashboard, visualization=None, text="a text box") + api_key = self.factory.create_api_key(object=dashboard) + with mock.patch.object(PostgreSQL, "run_query") as qr: + qr.return_value = ("[1, 2]", None) + res = self.make_request('get', '/api/dashboards/public/{}'.format(api_key.api_key), user=False, is_json=False) + self.assertEqual(res.status_code, 200) # Not relevant for now, as tokens in api_keys table are only created for dashboards. Once this changes, we should # add this test. # def test_token_doesnt_belong_to_dashboard(self): From f2d5cd2254bacf080743defe6c4b7e831f287a28 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Mon, 9 Jul 2018 15:56:10 -0500 Subject: [PATCH 26/48] allow x-axis label truncation (re #249) --- .../visualizations/chart/chart-editor.html | 6 +++++ client/app/visualizations/chart/index.js | 13 ++++++++++ .../app/visualizations/chart/plotly/utils.js | 25 +++++++++++++++++-- 3 files changed, 42 insertions(+), 2 deletions(-) diff --git a/client/app/visualizations/chart/chart-editor.html b/client/app/visualizations/chart/chart-editor.html index e7133ee956..8f4b0837b2 100644 --- a/client/app/visualizations/chart/chart-editor.html +++ b/client/app/visualizations/chart/chart-editor.html @@ -196,6 +196,12 @@ Show Labels
    + +
    + + + How many characters should X Axis Labels be truncated at in the legend? +
    diff --git a/client/app/visualizations/chart/index.js b/client/app/visualizations/chart/index.js index 4946aeb431..e8a95c131d 100644 --- a/client/app/visualizations/chart/index.js +++ b/client/app/visualizations/chart/index.js @@ -282,6 +282,19 @@ function ChartEditor(ColorPalette, clientConfig) { scope.options.legend = { enabled: true }; } + scope.$watch('options.globalSeriesType', (newType, oldType) => { + const defaultXAxisLength = 10; + if (!has(scope.options, 'xAxisLabelLength')) { + scope.options.xAxisLabelLength = defaultXAxisLength; + } + if (oldType !== newType) { + scope.options.xAxisLabelLength = defaultXAxisLength; + if (newType === 'pie') { + scope.options.xAxisLabelLength = 300; + } + } + }, true); + if (scope.columnNames) { each(scope.options.columnMapping, (value, key) => { if (scope.columnNames.length > 0 && !includes(scope.columnNames, key)) { diff --git a/client/app/visualizations/chart/plotly/utils.js b/client/app/visualizations/chart/plotly/utils.js index 537b7c7fe9..733b3d9643 100644 --- a/client/app/visualizations/chart/plotly/utils.js +++ b/client/app/visualizations/chart/plotly/utils.js @@ -209,6 +209,19 @@ function getUnifiedXAxisValues(seriesList, sorted) { return sorted ? sortBy(result, identity) : result; } +const DEFAULT_XAXIS_LABEL_LENGTH = 300; + +// We only truncate category x-axis labels because the other types +// are correctly formatted by Plotly. +function truncateCategoryAxis(oldXLabel, options) { + const xAxisLabelLength = parseInt(options.xAxisLabelLength, 10) || DEFAULT_XAXIS_LABEL_LENGTH; + + if (options && options.xAxis && options.xAxis.type === 'category') { + return String(oldXLabel).substr(0, xAxisLabelLength); + } + return oldXLabel; +} + function preparePieData(seriesList, options) { const { cellWidth, cellHeight, xPadding, yPadding, cellsInRow, hasX, @@ -260,9 +273,17 @@ function preparePieData(seriesList, options) { }); }); + const colorPalette = ColorPaletteArray.slice(); return { values: map(serie.data, i => i.y), - labels: map(serie.data, row => (hasX ? normalizeValue(row.x) : `Slice ${index}`)), + labels: map(serie.data, (row, rowIdx) => { + const rowX = hasX ? truncateCategoryAxis(normalizeValue(row.x), options) : `Slice ${index}`; + const rowOpts = options.seriesOptions[rowX]; + if (rowOpts) { + colorPalette[rowIdx] = rowOpts.color; + } + return rowX; + }), type: 'pie', hole: 0.4, marker: { @@ -317,7 +338,7 @@ function prepareChartData(seriesList, options) { const yValues = []; const yErrorValues = []; each(data, (row) => { - const x = normalizeValue(row.x); + const x = truncateCategoryAxis(normalizeValue(row.x), options); const y = normalizeValue(row.y); const yError = normalizeValue(row.yError); const size = normalizeValue(row.size); From d97f76123047724e867650e110e3423f5dbb4113 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Wed, 27 Sep 2017 21:18:32 +0000 Subject: [PATCH 27/48] secure cookies, add X-Content-Type-Options header (bug 1371613) --- redash/__init__.py | 5 +++++ redash/settings/__init__.py | 1 + 2 files changed, 6 insertions(+) diff --git a/redash/__init__.py b/redash/__init__.py index bd9a8e5859..233f44196b 100644 --- a/redash/__init__.py +++ b/redash/__init__.py @@ -128,6 +128,11 @@ def create_app(load_admin=True): app.config['SQLALCHEMY_DATABASE_URI'] = settings.SQLALCHEMY_DATABASE_URI app.config.update(settings.all_settings()) + def set_response_headers(response): + response.headers['X-Content-Type-Options'] = 'nosniff' + return response + + app.after_request(set_response_headers) provision_app(app) db.init_app(app) migrate.init_app(app, db) diff --git a/redash/settings/__init__.py b/redash/settings/__init__.py index 44cb71eef4..5fd36c9941 100644 --- a/redash/settings/__init__.py +++ b/redash/settings/__init__.py @@ -14,6 +14,7 @@ def all_settings(): return settings +SESSION_COOKIE_SECURE = True REDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', "redis://localhost:6379/0")) PROXIES_COUNT = int(os.environ.get('REDASH_PROXIES_COUNT', "1")) From 9bcda876ccff179a4a12b69e4312f4a3f775a540 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Tue, 12 Dec 2017 04:47:08 +0000 Subject: [PATCH 28/48] Merge mozilla schema updates with schema from master --- migrations/versions/40384fa03dd1_.py | 40 ++++++++++++++++++++++++++++ migrations/versions/58f810489c47_.py | 28 +++++++++++++++++++ migrations/versions/f9571a5ab4f3_.py | 28 +++++++++++++++++++ migrations/versions/fbc0849e2674_.py | 26 ++++++++++++++++++ 4 files changed, 122 insertions(+) create mode 100644 migrations/versions/40384fa03dd1_.py create mode 100644 migrations/versions/58f810489c47_.py create mode 100644 migrations/versions/f9571a5ab4f3_.py create mode 100644 migrations/versions/fbc0849e2674_.py diff --git a/migrations/versions/40384fa03dd1_.py b/migrations/versions/40384fa03dd1_.py new file mode 100644 index 0000000000..f2c53711c0 --- /dev/null +++ b/migrations/versions/40384fa03dd1_.py @@ -0,0 +1,40 @@ +"""Upgrade 'data_scanned' column to form used in upstream + +Revision ID: 40384fa03dd1 +Revises: 58f810489c47 +Create Date: 2018-01-18 18:44:04.917081 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.sql.expression import func, cast + +# revision identifiers, used by Alembic. +revision = '40384fa03dd1' +down_revision = 'fbc0849e2674' +branch_labels = None +depends_on = None + + +def upgrade(): + qr = sa.sql.table('query_results', + sa.sql.column('data_scanned', sa.String), + sa.sql.column('data', sa.String)) + op.execute( + qr.update() + .where(qr.c.data_scanned != '') + .where(qr.c.data_scanned != 'error') + .where(qr.c.data_scanned != 'N/A') + .values(data=cast( + func.jsonb_set(cast(qr.c.data, JSONB), + '{metadata}', + cast('{"data_scanned": ' + + qr.c.data_scanned + '}', + JSONB)), + sa.String))) + op.drop_column('query_results', 'data_scanned') + + +def downgrade(): + op.add_column('query_results', sa.Column('data_scanned', sa.String(length=255), nullable=True)) diff --git a/migrations/versions/58f810489c47_.py b/migrations/versions/58f810489c47_.py new file mode 100644 index 0000000000..1ed4190288 --- /dev/null +++ b/migrations/versions/58f810489c47_.py @@ -0,0 +1,28 @@ +"""add 'data_scanned' column to query_results + +Revision ID: 58f810489c47 +Revises: eb2f788f997e +Create Date: 2017-06-25 21:24:54.942119 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '58f810489c47' +down_revision = 'eb2f788f997e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('query_results', sa.Column('data_scanned', sa.String(length=255), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('query_results', 'data_scanned') + # ### end Alembic commands ### diff --git a/migrations/versions/f9571a5ab4f3_.py b/migrations/versions/f9571a5ab4f3_.py new file mode 100644 index 0000000000..da1ba02d6d --- /dev/null +++ b/migrations/versions/f9571a5ab4f3_.py @@ -0,0 +1,28 @@ +"""Rename 'image_url' to 'profile_image_url' + + a revision was changed after we pulled it from upstream in m12, so it had to + be fixed here. + + +Revision ID: f9571a5ab4f3 +Revises: 40384fa03dd1 +Create Date: 2018-01-18 18:04:07.943843 +""" +from alembic import op + + +# revision identifiers, used by Alembic. +revision = 'f9571a5ab4f3' +down_revision = '40384fa03dd1' +branch_labels = None +depends_on = None + + +def upgrade(): + # Upstream changed the column name in migration revision 7671dca4e604 -- + # see git revision 62e5e3892603502c5f3a6da277c33c73510b8819 + op.alter_column('users', 'image_url', new_column_name='profile_image_url') + + +def downgrade(): + op.alter_column('users', 'profile_image_url', new_column_name='image_url') diff --git a/migrations/versions/fbc0849e2674_.py b/migrations/versions/fbc0849e2674_.py new file mode 100644 index 0000000000..6195141496 --- /dev/null +++ b/migrations/versions/fbc0849e2674_.py @@ -0,0 +1,26 @@ +""" +Merge upstream fulltext search + +This formerly merged the fulltext search changes (6b5be7e0a0ef, 5ec5c84ba61e) +with upstream's 7671dca4e604 - but then those changes moved in the revision +graph to be direct descendants of that upstream revision, so the merge point +has been moved. + +Revision ID: fbc0849e2674 +Revises: 6b5be7e0a0ef, eb2f788f997e +Create Date: 2017-12-12 04:45:34.360587 +""" + +# revision identifiers, used by Alembic. +revision = 'fbc0849e2674' +down_revision = ('6b5be7e0a0ef', '58f810489c47') +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass From 4ccca6719f52afa203b33d5ef7ddc6c7e296b550 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Wed, 14 Feb 2018 17:52:43 +0000 Subject: [PATCH 29/48] merge upstream db changes --- migrations/versions/15041b7085fe_.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 migrations/versions/15041b7085fe_.py diff --git a/migrations/versions/15041b7085fe_.py b/migrations/versions/15041b7085fe_.py new file mode 100644 index 0000000000..fcb10aa78f --- /dev/null +++ b/migrations/versions/15041b7085fe_.py @@ -0,0 +1,24 @@ +"""empty message + +Revision ID: 15041b7085fe +Revises: f9571a5ab4f3, 969126bd800f +Create Date: 2018-02-14 17:52:17.138127 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '15041b7085fe' +down_revision = ('f9571a5ab4f3', '969126bd800f') +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass From 326e0f8a22171ef2ed2c18a540e3f1493de854e7 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Tue, 20 Mar 2018 19:10:22 +0000 Subject: [PATCH 30/48] Support authentication for URL data source (re #330) (#336) * Support authentication for URL data source (re #330) * Refactor authentication support for data sources. Adds a new BaseHTTPQueryRunner class. --- redash/query_runner/__init__.py | 109 ++++++++++++++++++++++++- redash/query_runner/jql.py | 50 +++--------- redash/query_runner/url.py | 50 +++--------- tests/query_runner/test_http.py | 136 ++++++++++++++++++++++++++++++++ 4 files changed, 262 insertions(+), 83 deletions(-) create mode 100644 tests/query_runner/test_http.py diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py index 60bcbe4005..73c5aa5918 100644 --- a/redash/query_runner/__init__.py +++ b/redash/query_runner/__init__.py @@ -1,14 +1,16 @@ -import sys import logging import json +import sys + +import requests -from collections import OrderedDict from redash import settings logger = logging.getLogger(__name__) __all__ = [ 'BaseQueryRunner', + 'BaseHTTPQueryRunner', 'InterruptException', 'BaseSQLQueryRunner', 'TYPE_DATETIME', @@ -90,7 +92,7 @@ def get_data_source_version(self): version = json.loads(data)['rows'][0]['version'] except KeyError as e: raise Exception(e) - + if self.data_source_version_post_process == "split by space take second": version = version.split(" ")[1] elif self.data_source_version_post_process == "split by space take last": @@ -167,6 +169,107 @@ def _get_tables_stats(self, tables_dict): tables_dict[t]['size'] = res[0]['cnt'] +class BaseHTTPQueryRunner(BaseQueryRunner): + response_error = "Endpoint returned unexpected status code" + requires_authentication = False + url_title = 'URL base path' + username_title = 'HTTP Basic Auth Username' + password_title = 'HTTP Basic Auth Password' + + @classmethod + def configuration_schema(cls): + schema = { + 'type': 'object', + 'properties': { + 'url': { + 'type': 'string', + 'title': cls.url_title, + }, + 'username': { + 'type': 'string', + 'title': cls.username_title, + }, + 'password': { + 'type': 'string', + 'title': cls.password_title, + }, + "doc_url": { + "type": "string", + "title": "Documentation URL", + "default": cls.default_doc_url, + }, + "toggle_table_string": { + "type": "string", + "title": "Toggle Table String", + "default": "_v", + "info": ( + "This string will be used to toggle visibility of " + "tables in the schema browser when editing a query " + "in order to remove non-useful tables from sight." + ), + } + }, + 'required': ['url'], + 'secret': ['password'] + } + if cls.requires_authentication: + schema['required'] += ['username', 'password'] + return schema + + def get_auth(self): + username = self.configuration.get('username') + password = self.configuration.get('password') + if username and password: + return (username, password) + if self.requires_authentication: + raise ValueError("Username and Password required") + else: + return None + + def get_response(self, url, auth=None, **kwargs): + # Get authentication values if not given + if auth is None: + auth = self.get_auth() + + # Then call requests to get the response from the given endpoint + # URL optionally, with the additional requests parameters. + error = None + response = None + try: + response = requests.get(url, auth=auth, **kwargs) + # Raise a requests HTTP exception with the appropriate reason + # for 4xx and 5xx response status codes which is later caught + # and passed back. + response.raise_for_status() + + # Any other responses (e.g. 2xx and 3xx): + if response.status_code != 200: + error = '{} ({}).'.format( + self.response_error, + response.status_code, + ) + + except requests.HTTPError as exc: + logger.exception(exc) + error = ( + "Failed to execute query. " + "Return Code: {} Reason: {}".format( + response.status_code, + response.text + ) + ) + except requests.RequestException as exc: + # Catch all other requests exceptions and return the error. + logger.exception(exc) + error = str(exc) + except Exception as exc: + # Catch any other exceptions, log it and reraise it. + logger.exception(exc) + raise sys.exc_info()[1], None, sys.exc_info()[2] + + return response, error + + query_runners = {} diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index 61d8fc6598..04c9c8fefc 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -1,5 +1,4 @@ import json -import requests import re from collections import OrderedDict @@ -137,41 +136,15 @@ def get_dict_output_field_name(cls,field_name, member_name): return None -class JiraJQL(BaseQueryRunner): +class JiraJQL(BaseHTTPQueryRunner): noop_query = '{"queryType": "count"}' default_doc_url = ("https://confluence.atlassian.com/jirasoftwarecloud/" "advanced-searching-764478330.html") - - @classmethod - def configuration_schema(cls): - return { - 'type': 'object', - 'properties': { - 'url': { - 'type': 'string', - 'title': 'JIRA URL' - }, - 'username': { - 'type': 'string', - }, - 'password': { - 'type': 'string' - }, - "doc_url": { - "type": "string", - "title": "Documentation URL", - "default": cls.default_doc_url - }, - "toggle_table_string": { - "type": "string", - "title": "Toggle Table String", - "default": "_v", - "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." - } - }, - 'required': ['url', 'username', 'password'], - 'secret': ['password'] - } + response_error = "JIRA returned unexpected status code" + requires_authentication = True + url_title = 'JIRA URL' + username_title = 'Username' + password_title = 'Password' @classmethod def name(cls): @@ -199,13 +172,9 @@ def run_query(self, query, user): else: query['maxResults'] = query.get('maxResults', 1000) - response = requests.get(jql_url, params=query, auth=(self.configuration.get('username'), self.configuration.get('password'))) - - if response.status_code == 401 or response.status_code == 403: - return None, "Authentication error. Please check username/password." - - if response.status_code != 200: - return None, "JIRA returned unexpected status code ({})".format(response.status_code) + response, error = self.get_response(jql_url, params=query) + if error is not None: + return None, error data = response.json() @@ -219,4 +188,3 @@ def run_query(self, query, user): return None, "Query cancelled by user." register(JiraJQL) - diff --git a/redash/query_runner/url.py b/redash/query_runner/url.py index c99289cca4..cfc1b03864 100644 --- a/redash/query_runner/url.py +++ b/redash/query_runner/url.py @@ -1,34 +1,10 @@ -import requests -from redash.query_runner import BaseQueryRunner, register +from redash.query_runner import BaseHTTPQueryRunner, register -class Url(BaseQueryRunner): +class Url(BaseHTTPQueryRunner): default_doc_url = ("http://redash.readthedocs.io/en/latest/" "datasources.html#url") - @classmethod - def configuration_schema(cls): - return { - 'type': 'object', - 'properties': { - 'url': { - 'type': 'string', - 'title': 'URL base path' - }, - "doc_url": { - "type": "string", - "title": "Documentation URL", - "default": cls.default_doc_url - }, - "toggle_table_string": { - "type": "string", - "title": "Toggle Table String", - "default": "_v", - "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." - } - } - } - @classmethod def annotate_query(cls): return False @@ -40,7 +16,6 @@ def run_query(self, query, user): base_url = self.configuration.get("url", None) try: - error = None query = query.strip() if base_url is not None and base_url != "": @@ -52,20 +27,17 @@ def run_query(self, query, user): url = base_url + query - response = requests.get(url) - response.raise_for_status() - json_data = response.content.strip() + response, error = self.get_response(url) + if error is not None: + return None, error - if not json_data: - error = "Got empty response from '{}'.".format(url) + json_data = response.content.strip() - return json_data, error - except requests.RequestException as e: - return None, str(e) + if json_data: + return json_data, None + else: + return None, "Got empty response from '{}'.".format(url) except KeyboardInterrupt: - error = "Query cancelled by user." - json_data = None - - return json_data, error + return None, "Query cancelled by user." register(Url) diff --git a/tests/query_runner/test_http.py b/tests/query_runner/test_http.py new file mode 100644 index 0000000000..e4d88c24a8 --- /dev/null +++ b/tests/query_runner/test_http.py @@ -0,0 +1,136 @@ +import mock +from unittest import TestCase + +import requests +from redash.query_runner import BaseHTTPQueryRunner + + +class RequiresAuthQueryRunner(BaseHTTPQueryRunner): + requires_authentication = True + + +class TestBaseHTTPQueryRunner(TestCase): + + def test_requires_authentication_default(self): + self.assertFalse(BaseHTTPQueryRunner.requires_authentication) + schema = BaseHTTPQueryRunner.configuration_schema() + self.assertNotIn('username', schema['required']) + self.assertNotIn('password', schema['required']) + + def test_requires_authentication_true(self): + schema = RequiresAuthQueryRunner.configuration_schema() + self.assertIn('username', schema['required']) + self.assertIn('password', schema['required']) + + def test_get_auth_with_values(self): + query_runner = BaseHTTPQueryRunner({ + 'username': 'username', + 'password': 'password' + }) + self.assertEqual(query_runner.get_auth(), ('username', 'password')) + + def test_get_auth_empty(self): + query_runner = BaseHTTPQueryRunner({}) + self.assertIsNone(query_runner.get_auth()) + + def test_get_auth_empty_requires_authentication(self): + query_runner = RequiresAuthQueryRunner({}) + self.assertRaisesRegexp( + ValueError, + "Username and Password required", + query_runner.get_auth + ) + + @mock.patch('requests.get') + def test_get_response_success(self, mock_get): + mock_response = mock.Mock() + mock_response.status_code = 200 + mock_response.text = "Success" + mock_get.return_value = mock_response + + url = 'https://example.com/' + query_runner = BaseHTTPQueryRunner({}) + response, error = query_runner.get_response(url) + mock_get.assert_called_once_with(url, auth=None) + self.assertEqual(response.status_code, 200) + self.assertIsNone(error) + + @mock.patch('requests.get') + def test_get_response_success_custom_auth(self, mock_get): + mock_response = mock.Mock() + mock_response.status_code = 200 + mock_response.text = "Success" + mock_get.return_value = mock_response + + url = 'https://example.com/' + query_runner = BaseHTTPQueryRunner({}) + auth = ('username', 'password') + response, error = query_runner.get_response(url, auth=auth) + mock_get.assert_called_once_with(url, auth=auth) + self.assertEqual(response.status_code, 200) + self.assertIsNone(error) + + @mock.patch('requests.get') + def test_get_response_failure(self, mock_get): + mock_response = mock.Mock() + mock_response.status_code = 301 + mock_response.text = "Redirect" + mock_get.return_value = mock_response + + url = 'https://example.com/' + query_runner = BaseHTTPQueryRunner({}) + response, error = query_runner.get_response(url) + mock_get.assert_called_once_with(url, auth=None) + self.assertIn(query_runner.response_error, error) + + @mock.patch('requests.get') + def test_get_response_httperror_exception(self, mock_get): + mock_response = mock.Mock() + mock_response.status_code = 500 + mock_response.text = "Server Error" + http_error = requests.HTTPError() + mock_response.raise_for_status.side_effect = http_error + mock_get.return_value = mock_response + + url = 'https://example.com/' + query_runner = BaseHTTPQueryRunner({}) + response, error = query_runner.get_response(url) + mock_get.assert_called_once_with(url, auth=None) + self.assertIsNotNone(error) + self.assertIn("Failed to execute query", error) + + @mock.patch('requests.get') + def test_get_response_requests_exception(self, mock_get): + mock_response = mock.Mock() + mock_response.status_code = 500 + mock_response.text = "Server Error" + exception_message = "Some requests exception" + requests_exception = requests.RequestException(exception_message) + mock_response.raise_for_status.side_effect = requests_exception + mock_get.return_value = mock_response + + url = 'https://example.com/' + query_runner = BaseHTTPQueryRunner({}) + response, error = query_runner.get_response(url) + mock_get.assert_called_once_with(url, auth=None) + self.assertIsNotNone(error) + self.assertEqual(exception_message, error) + + @mock.patch('requests.get') + def test_get_response_generic_exception(self, mock_get): + mock_response = mock.Mock() + mock_response.status_code = 500 + mock_response.text = "Server Error" + exception_message = "Some generic exception" + exception = ValueError(exception_message) + mock_response.raise_for_status.side_effect = exception + mock_get.return_value = mock_response + + url = 'https://example.com/' + query_runner = BaseHTTPQueryRunner({}) + self.assertRaisesRegexp( + ValueError, + exception_message, + query_runner.get_response, + url + ) From bd401fc2ccf69ab73a1247081870b3a242c4a00b Mon Sep 17 00:00:00 2001 From: Allen Short Date: Wed, 21 Mar 2018 20:38:48 +0000 Subject: [PATCH 31/48] properly rollback failed db commits --- redash/handlers/dashboards.py | 5 +++++ redash/handlers/data_sources.py | 2 ++ redash/handlers/users.py | 3 ++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/redash/handlers/dashboards.py b/redash/handlers/dashboards.py index 2e43b445a0..20ee945715 100644 --- a/redash/handlers/dashboards.py +++ b/redash/handlers/dashboards.py @@ -10,6 +10,7 @@ from redash.permissions import (can_modify, require_admin_or_owner, require_object_modify_permission, require_permission) +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import StaleDataError @@ -181,7 +182,11 @@ def post(self, dashboard_slug): try: models.db.session.commit() except StaleDataError: + models.db.session.rollback() abort(409) + except IntegrityError: + models.db.session.rollback() + abort(400) result = serialize_dashboard(dashboard, with_widgets=True, user=self.current_user) self.record_event({ diff --git a/redash/handlers/data_sources.py b/redash/handlers/data_sources.py index 3d7dbe1948..60de6e04d7 100644 --- a/redash/handlers/data_sources.py +++ b/redash/handlers/data_sources.py @@ -54,6 +54,7 @@ def post(self, data_source_id): try: models.db.session.commit() except IntegrityError as e: + models.db.session.rollback() if req['name'] in e.message: abort(400, message="Data source with the name {} already exists.".format(req['name'])) @@ -127,6 +128,7 @@ def post(self): models.db.session.commit() except IntegrityError as e: + models.db.session.rollback() if req['name'] in e.message: abort(400, message="Data source with the name {} already exists.".format(req['name'])) diff --git a/redash/handlers/users.py b/redash/handlers/users.py index f06e28a228..586420aff9 100644 --- a/redash/handlers/users.py +++ b/redash/handlers/users.py @@ -92,6 +92,7 @@ def post(self): models.db.session.add(user) models.db.session.commit() except IntegrityError as e: + models.db.session.rollback() if "email" in e.message: abort(400, message='Email already taken.') abort(500) @@ -179,7 +180,7 @@ def post(self, user_id): message = "Email already taken." else: message = "Error updating record" - + models.db.session.rollback() abort(400, message=message) self.record_event({ From 0e4df2a715e784689cfef327c50693a1c164b25d Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Wed, 28 Feb 2018 21:14:30 +0100 Subject: [PATCH 32/48] Install redash-stmo. In the long run we'll be able to install additional dependencies by having an own Dockerfile to build images based on the Redash image but that installs additional Python dependencies. But until we have a fork with lots of changes ourselves we need to do it this way. Redash-stmo contains the ability to hook up our own Dockerflow library. Refs #13 Refs #37 --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 314045fd1b..f5ec78e3b7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -54,3 +54,4 @@ disposable-email-domains # Uncomment the requirement for ldap3 if using ldap. # It is not included by default because of the GPL license conflict. # ldap3==2.2.4 +redash-stmo>=2018.4.0 From 2af92dc9a7a04881f1613d84624390461be5ab31 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Fri, 23 Mar 2018 05:45:39 +0100 Subject: [PATCH 33/48] Extend the Remote User Auth backend with REMOTE_GROUPS ability (#311) Extend the Remote User Auth backend with the ability to pass remote user groups via a configurable request header similar to the REMOTE_USER header. Refs #37. If enabled the feature allows checks the header value against a configured list of group names, including the ability to use UNIX shell-style wildcards. --- redash/authentication/remote_user_auth.py | 15 +++++++++++++++ redash/settings/__init__.py | 7 +++++++ redash/settings/helpers.py | 2 +- 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/redash/authentication/remote_user_auth.py b/redash/authentication/remote_user_auth.py index 9a78da77ab..f159eb7dc9 100644 --- a/redash/authentication/remote_user_auth.py +++ b/redash/authentication/remote_user_auth.py @@ -30,6 +30,21 @@ def login(org_slug=None): logger.error("Cannot use remote user for login when it's not provided in the request (looked in headers['" + settings.REMOTE_USER_HEADER + "'])") return redirect(url_for('redash.index', next=next_path, org_slug=org_slug)) + # Check if there is a header of user groups and if yes + # check it against a list of allowed user groups from the settings + if settings.REMOTE_GROUPS_ENABLED: + remote_groups = settings.set_from_string( + request.headers.get(settings.REMOTE_GROUPS_HEADER) or '' + ) + allowed_groups = settings.REMOTE_GROUPS_ALLOWED + if not allowed_groups.intersection(remote_groups): + logger.error( + "User groups provided in the %s header are not " + "matching the allowed groups.", + settings.REMOTE_GROUPS_HEADER + ) + return redirect(url_for('redash.index', next=next_path)) + logger.info("Logging in " + email + " via remote user") user = create_and_login_user(current_org, email, email) diff --git a/redash/settings/__init__.py b/redash/settings/__init__.py index 5fd36c9941..b91217173b 100644 --- a/redash/settings/__init__.py +++ b/redash/settings/__init__.py @@ -83,6 +83,13 @@ def all_settings(): REMOTE_USER_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_REMOTE_USER_LOGIN_ENABLED", "false")) REMOTE_USER_HEADER = os.environ.get("REDASH_REMOTE_USER_HEADER", "X-Forwarded-Remote-User") +# When enabled this will match the given remote groups request header with a +# configured list of allowed user groups using UNIX shell-style wildcards such +# as * and ?. +REMOTE_GROUPS_ENABLED = parse_boolean(os.environ.get("REDASH_REMOTE_GROUPS_ENABLED", "false")) +REMOTE_GROUPS_HEADER = os.environ.get("REDASH_REMOTE_GROUPS_HEADER", "X-Forwarded-Remote-Groups") +REMOTE_GROUPS_ALLOWED = set_from_string(os.environ.get("REDASH_REMOTE_GROUPS_ALLOWED", "")) + # If the organization setting auth_password_login_enabled is not false, then users will still be # able to login through Redash instead of the LDAP server LDAP_LOGIN_ENABLED = parse_boolean(os.environ.get('REDASH_LDAP_LOGIN_ENABLED', 'false')) diff --git a/redash/settings/helpers.py b/redash/settings/helpers.py index aa23e7125a..e55d61001d 100644 --- a/redash/settings/helpers.py +++ b/redash/settings/helpers.py @@ -31,7 +31,7 @@ def array_from_string(s): if "" in array: array.remove("") - return array + return [item.strip() for item in array] def set_from_string(s): From 3f200657b9bf357a15bd5e8b72eedbdca9b87648 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Tue, 16 Jan 2018 22:06:32 +0000 Subject: [PATCH 34/48] Unique names for query parameters (re #164) --- client/app/services/query.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/client/app/services/query.js b/client/app/services/query.js index 0b39a23a86..66e38f3998 100644 --- a/client/app/services/query.js +++ b/client/app/services/query.js @@ -144,7 +144,7 @@ class Parameter { }; } return { - [`p_${this.name}`]: this.value, + [`p_${this.name}_${this.queryId}`]: this.value, }; } @@ -156,7 +156,7 @@ class Parameter { this.setValue([query[keyStart], query[keyEnd]]); } } else { - const key = `p_${this.name}`; + const key = `p_${this.name}_${this.queryId}`; if (has(query, key)) { this.setValue(query[key]); } @@ -219,7 +219,9 @@ class Parameters { }); const parameterExists = p => includes(parameterNames, p.name); - this.query.options.parameters = this.query.options.parameters.filter(parameterExists).map(p => new Parameter(p)); + this.query.options.parameters = this.query.options.parameters + .filter(parameterExists) + .map(p => new Parameter(Object.assign({ queryId: this.query.id }, p))); } initFromQueryString(query) { @@ -484,7 +486,7 @@ function QueryResource( params += '&'; } - params += `p_${encodeURIComponent(name)}=${encodeURIComponent(value)}`; + params += `p_${encodeURIComponent(name)}_${this.id}=${encodeURIComponent(value)}`; }); } From 79478628c9d36c0ba824ddda0daaa6829a55ec18 Mon Sep 17 00:00:00 2001 From: Allen Short Date: Tue, 27 Mar 2018 20:58:26 +0000 Subject: [PATCH 35/48] Aggregate query results (re #35) (#339) --- .../components/queries/schedule-dialog.html | 3 + .../app/components/queries/schedule-dialog.js | 13 ++- client/app/pages/alerts-list/index.js | 1 - client/app/pages/queries/view.js | 1 + client/app/services/query-result.js | 10 +++ client/app/services/query.js | 6 +- migrations/versions/9d7678c47452_.py | 34 ++++++++ redash/handlers/api.py | 3 +- redash/handlers/queries.py | 3 + redash/handlers/query_results.py | 29 ++++++- redash/models.py | 53 +++++++++++- redash/serializers.py | 1 + redash/tasks/queries.py | 1 + tests/factories.py | 7 +- tests/handlers/test_queries.py | 80 +++++++++++++++++++ tests/test_models.py | 64 +++++++++++++-- 16 files changed, 294 insertions(+), 15 deletions(-) create mode 100644 migrations/versions/9d7678c47452_.py diff --git a/client/app/components/queries/schedule-dialog.html b/client/app/components/queries/schedule-dialog.html index f9344238a1..aca492cdfe 100644 --- a/client/app/components/queries/schedule-dialog.html +++ b/client/app/components/queries/schedule-dialog.html @@ -19,4 +19,7 @@ Stop scheduling at date/time (format yyyy-MM-ddTHH:mm:ss, like 2016-12-28T14:57:00): +
    diff --git a/client/app/components/queries/schedule-dialog.js b/client/app/components/queries/schedule-dialog.js index db6ebe0320..41c29e031c 100644 --- a/client/app/components/queries/schedule-dialog.js +++ b/client/app/components/queries/schedule-dialog.js @@ -114,11 +114,21 @@ function scheduleUntil() { }; } +function scheduleKeepResults() { + return { + restrict: 'E', + scope: { + query: '=', + saveQuery: '=', + }, + template: '', + }; +} + const ScheduleForm = { controller() { this.query = this.resolve.query; this.saveQuery = this.resolve.saveQuery; - if (this.query.hasDailySchedule()) { this.refreshType = 'daily'; } else { @@ -137,5 +147,6 @@ export default function init(ngModule) { ngModule.directive('queryTimePicker', queryTimePicker); ngModule.directive('queryRefreshSelect', queryRefreshSelect); ngModule.directive('scheduleUntil', scheduleUntil); + ngModule.directive('scheduleKeepResults', scheduleKeepResults); ngModule.component('scheduleDialog', ScheduleForm); } diff --git a/client/app/pages/alerts-list/index.js b/client/app/pages/alerts-list/index.js index 082ec203c1..19869bcc5b 100644 --- a/client/app/pages/alerts-list/index.js +++ b/client/app/pages/alerts-list/index.js @@ -9,7 +9,6 @@ const stateClass = { class AlertsListCtrl { constructor(Alert) { - this.showEmptyState = false; this.showList = false; diff --git a/client/app/pages/queries/view.js b/client/app/pages/queries/view.js index 3336e21d56..6d864c8c02 100644 --- a/client/app/pages/queries/view.js +++ b/client/app/pages/queries/view.js @@ -205,6 +205,7 @@ function QueryViewCtrl( } else { request = pick($scope.query, [ 'schedule', + 'schedule_resultset_size', 'query', 'id', 'description', diff --git a/client/app/services/query-result.js b/client/app/services/query-result.js index c15734c4d4..c9b3ac1a46 100644 --- a/client/app/services/query-result.js +++ b/client/app/services/query-result.js @@ -54,6 +54,7 @@ function addPointToSeries(point, seriesCollection, seriesName) { function QueryResultService($resource, $timeout, $q, QueryResultError) { const QueryResultResource = $resource('api/query_results/:id', { id: '@id' }, { post: { method: 'POST' } }); + const QueryResultSetResource = $resource('api/queries/:id/resultset', { id: '@id' }); const Job = $resource('api/jobs/:id', { id: '@id' }); const statuses = { 1: 'waiting', @@ -452,6 +453,15 @@ function QueryResultService($resource, $timeout, $q, QueryResultError) { return queryResult; } + static getResultSet(queryId) { + const queryResult = new QueryResult(); + + QueryResultSetResource.get({ id: queryId }, (response) => { + queryResult.update(response); + }); + + return queryResult; + } loadResult(tryCount) { this.isLoadingResult = true; QueryResultResource.get( diff --git a/client/app/services/query.js b/client/app/services/query.js index 66e38f3998..6e03eeaee7 100644 --- a/client/app/services/query.js +++ b/client/app/services/query.js @@ -449,7 +449,11 @@ function QueryResource( this.latest_query_data_id = null; } - if (this.latest_query_data && maxAge !== 0) { + if (this.schedule_resultset_size) { + if (!this.queryResult) { + this.queryResult = QueryResult.getResultSet(this.id); + } + } else if (this.latest_query_data && maxAge !== 0) { if (!this.queryResult) { this.queryResult = new QueryResult({ query_result: this.latest_query_data, diff --git a/migrations/versions/9d7678c47452_.py b/migrations/versions/9d7678c47452_.py new file mode 100644 index 0000000000..d351153c87 --- /dev/null +++ b/migrations/versions/9d7678c47452_.py @@ -0,0 +1,34 @@ +"""Incremental query results aggregation + +Revision ID: 9d7678c47452 +Revises: 15041b7085fe +Create Date: 2018-03-08 04:36:12.802199 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9d7678c47452' +down_revision = '15041b7085fe' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table('query_resultsets', + sa.Column('query_id', sa.Integer(), nullable=False), + sa.Column('result_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['query_id'], ['queries.id'], ), + sa.ForeignKeyConstraint(['result_id'], ['query_results.id'], ), + sa.PrimaryKeyConstraint('query_id', 'result_id') + ) + op.add_column(u'queries', sa.Column('schedule_resultset_size', sa.Integer(), nullable=True)) +1 + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column(u'queries', 'schedule_resultset_size') + op.drop_table('query_resultsets') + # ### end Alembic commands ### diff --git a/redash/handlers/api.py b/redash/handlers/api.py index fd6fe5fd15..5e7eb483f9 100644 --- a/redash/handlers/api.py +++ b/redash/handlers/api.py @@ -10,7 +10,7 @@ from redash.handlers.data_sources import DataSourceTypeListResource, DataSourceListResource, DataSourceSchemaResource, DataSourceResource, DataSourcePauseResource, DataSourceTestResource, DataSourceVersionResource from redash.handlers.events import EventsResource from redash.handlers.queries import QueryForkResource, QueryRefreshResource, QueryListResource, QueryRecentResource, QuerySearchResource, QueryResource, MyQueriesResource, QueryVersionListResource, ChangeResource -from redash.handlers.query_results import QueryResultListResource, QueryResultResource, JobResource +from redash.handlers.query_results import QueryResultListResource, QueryResultResource, JobResource, QueryResultSetResource from redash.handlers.users import UserResource, UserListResource, UserInviteResource, UserResetPasswordResource, UserDisableResource from redash.handlers.visualizations import VisualizationListResource from redash.handlers.visualizations import VisualizationResource @@ -85,6 +85,7 @@ def json_representation(data, code, headers=None): api.add_org_resource(QueryRefreshResource, '/api/queries//refresh', endpoint='query_refresh') api.add_org_resource(QueryResource, '/api/queries/', endpoint='query') api.add_org_resource(QueryForkResource, '/api/queries//fork', endpoint='query_fork') +api.add_org_resource(QueryResultSetResource, '/api/queries//resultset', endpoint='query_aggregate_results') api.add_org_resource(QueryVersionListResource, '/api/queries//version', endpoint='query_versions') api.add_org_resource(ChangeResource, '/api/changes/', endpoint='changes') diff --git a/redash/handlers/queries.py b/redash/handlers/queries.py index cb344ba7f5..86b24b7296 100644 --- a/redash/handlers/queries.py +++ b/redash/handlers/queries.py @@ -109,6 +109,7 @@ def post(self): : 0: + q.query_results.append(query_result) query_ids = [q.id for q in queries] logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash) @@ -871,6 +875,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): data_source = db.relationship(DataSource, backref='queries') latest_query_data_id = Column(db.Integer, db.ForeignKey("query_results.id"), nullable=True) latest_query_data = db.relationship(QueryResult) + query_results = db.relationship("QueryResult", secondary="query_resultsets") name = Column(db.String(255)) description = Column(db.String(4096), nullable=True) query_text = Column("query", db.Text) @@ -886,6 +891,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): schedule = Column(db.String(10), nullable=True) schedule_failures = Column(db.Integer, default=0) schedule_until = Column(db.DateTime(True), nullable=True) + schedule_resultset_size = Column(db.Integer, nullable=True) visualizations = db.relationship("Visualization", cascade="all, delete-orphan") options = Column(MutableDict.as_mutable(PseudoJSON), default={}) search_vector = Column(TSVectorType('id', 'name', 'description', 'query', @@ -1035,6 +1041,37 @@ def search(cls, term, group_ids, user_id=None, include_drafts=False, limit=None) # sort the result using the weight as defined in the search vector column return all_queries.search(term, sort=True).limit(limit) + @classmethod + def delete_stale_resultsets(cls): + delete_count = 0 + texts = [c[0] for c in db.session.query(Query.query_text) + .filter(Query.schedule_resultset_size != None).distinct()] + for text in texts: + queries = (Query.query.filter(Query.query_text == text, + Query.schedule_resultset_size != None) + .order_by(Query.schedule_resultset_size.desc())) + # Multiple queries with the same text may request multiple result sets + # be kept. We start with the one that keeps the most, and delete both + # the unneeded bridge rows and result sets. + first_query = queries.first() + if first_query is not None and first_query.schedule_resultset_size: + resultsets = QueryResultSet.query.filter(QueryResultSet.query_rel == first_query).order_by(QueryResultSet.result_id) + resultset_count = resultsets.count() + if resultset_count > first_query.schedule_resultset_size: + n_to_delete = resultset_count - first_query.schedule_resultset_size + r_ids = [r.result_id for r in resultsets][:n_to_delete] + QueryResultSet.query.filter(QueryResultSet.result_id.in_(r_ids)).delete(synchronize_session=False) + delete_count += QueryResult.query.filter(QueryResult.id.in_(r_ids)).delete(synchronize_session=False) + # By this point there are no stale result sets left. + # Delete unneeded bridge rows for the remaining queries. + for q in queries[1:]: + resultsets = db.session.query(QueryResultSet.result_id).filter(QueryResultSet.query_rel == q).order_by(QueryResultSet.result_id) + n_to_delete = resultsets.count() - q.schedule_resultset_size + if n_to_delete > 0: + stale_r = QueryResultSet.query.filter(QueryResultSet.result_id.in_(resultsets.limit(n_to_delete).subquery())) + stale_r.delete(synchronize_session=False) + return delete_count + @classmethod def search_by_user(cls, term, user, limit=None): return cls.by_user(user).search(term, sort=True).limit(limit) @@ -1116,6 +1153,16 @@ def __repr__(self): return '' % (self.id, self.name or 'untitled') +class QueryResultSet(db.Model): + query_id = Column(db.Integer, db.ForeignKey("queries.id"), + primary_key=True) + query_rel = db.relationship(Query) + result_id = Column(db.Integer, db.ForeignKey("query_results.id"), + primary_key=True) + result = db.relationship(QueryResult) + __tablename__ = 'query_resultsets' + + @vectorizer(db.Integer) def integer_vectorizer(column): return db.func.cast(column, db.Text) diff --git a/redash/serializers.py b/redash/serializers.py index 21ca3d1238..44f43ae454 100644 --- a/redash/serializers.py +++ b/redash/serializers.py @@ -100,6 +100,7 @@ def serialize_query(query, with_stats=False, with_visualizations=False, with_use 'query_hash': query.query_hash, 'schedule': query.schedule, 'schedule_until': query.schedule_until, + 'schedule_resultset_size': query.schedule_resultset_size, 'api_key': query.api_key, 'is_archived': query.is_archived, 'is_draft': query.is_draft, diff --git a/redash/tasks/queries.py b/redash/tasks/queries.py index 8d8d3cbe9b..ff4b08bb26 100644 --- a/redash/tasks/queries.py +++ b/redash/tasks/queries.py @@ -355,6 +355,7 @@ def cleanup_query_results(): deleted_count = models.QueryResult.query.filter( models.QueryResult.id.in_(unused_query_results.subquery()) ).delete(synchronize_session=False) + deleted_count += models.Query.delete_stale_resultsets() models.db.session.commit() logger.info("Deleted %d unused query results.", deleted_count) diff --git a/tests/factories.py b/tests/factories.py index 15f85e58cb..00423fdfa7 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -109,7 +109,9 @@ def __call__(self): query_hash=gen_query_hash('SELECT 1'), data_source=data_source_factory.create, org_id=1) - +query_resultset_factory = ModelFactory(redash.models.QueryResultSet, + query_rel=query_factory.create, + result=query_result_factory.create) visualization_factory = ModelFactory(redash.models.Visualization, type='CHART', query_rel=query_factory.create, @@ -295,6 +297,9 @@ def create_query_result(self, **kwargs): return query_result_factory.create(**args) + def create_query_resultset(self, **kwargs): + return query_resultset_factory.create(**kwargs) + def create_visualization(self, **kwargs): args = { 'query_rel': self.create_query() diff --git a/tests/handlers/test_queries.py b/tests/handlers/test_queries.py index 93dfae83d0..135d29c69a 100644 --- a/tests/handlers/test_queries.py +++ b/tests/handlers/test_queries.py @@ -1,3 +1,5 @@ +import json + from tests import BaseTestCase from redash import models from redash.models import db @@ -285,3 +287,81 @@ def test_get(self): rv2 = self.make_request('get', '/api/changes/' + str(ch2.id)) self.assertEqual(rv2.status_code, 200) self.assertEqual(rv2.json['change']['name']['current'], 'version B') + + +class AggregateResultsTests(BaseTestCase): + def test_aggregate(self): + qtxt = "SELECT x FROM mytable;" + q = self.factory.create_query(query_text=qtxt, schedule_resultset_size=3) + qr0 = self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'eve', 'color': 'grue'}, + {'name': 'mallory', 'color': 'bleen'}]})) + qr1 = self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'bob', 'color': 'green'}, + {'name': 'fred', 'color': 'blue'}]})) + qr2 = self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'alice', 'color': 'red'}, + {'name': 'eddie', 'color': 'orange'}]})) + qr3 = self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'dave', 'color': 'yellow'}, + {'name': 'carol', 'color': 'taupe'}]})) + for qr in (qr0, qr1, qr2, qr3): + self.factory.create_query_resultset(query_rel=q, result=qr) + rv = self.make_request('get', '/api/queries/{}/resultset'.format(q.id)) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.json['query_result']['data'], + {'columns': ['name', 'color'], + 'rows': [ + {'name': 'bob', 'color': 'green'}, + {'name': 'fred', 'color': 'blue'}, + {'name': 'alice', 'color': 'red'}, + {'name': 'eddie', 'color': 'orange'}, + {'name': 'dave', 'color': 'yellow'}, + {'name': 'carol', 'color': 'taupe'} + ]}) + + def test_underfilled_aggregate(self): + qtxt = "SELECT x FROM mytable;" + q = self.factory.create_query(query_text=qtxt, + schedule_resultset_size=3) + qr1 = self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'bob', 'color': 'green'}, + {'name': 'fred', 'color': 'blue'}]})) + qr2 = self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'alice', 'color': 'red'}, + {'name': 'eddie', 'color': 'orange'}]})) + for qr in (qr1, qr2): + self.factory.create_query_resultset(query_rel=q, result=qr) + rv = self.make_request('get', '/api/queries/{}/resultset'.format(q.id)) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.json['query_result']['data'], + {'columns': ['name', 'color'], + 'rows': [ + {'name': 'bob', 'color': 'green'}, + {'name': 'fred', 'color': 'blue'}, + {'name': 'alice', 'color': 'red'}, + {'name': 'eddie', 'color': 'orange'} + ]}) + + def test_no_aggregate(self): + qtxt = "SELECT x FROM mytable;" + q = self.factory.create_query(query_text=qtxt) + self.factory.create_query_result( + query_text=qtxt, + data=json.dumps({'columns': ['name', 'color'], + 'rows': [{'name': 'eve', 'color': 'grue'}, + {'name': 'mallory', 'color': 'bleen'}]})) + rv = self.make_request('get', '/api/queries/{}/resultset'.format(q.id)) + self.assertEqual(rv.status_code, 404) diff --git a/tests/test_models.py b/tests/test_models.py index 4d15eae932..8767b67fcf 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -335,22 +335,74 @@ def test_get_latest_returns_the_last_cached_result_for_negative_ttl(self): class TestUnusedQueryResults(BaseTestCase): def test_returns_only_unused_query_results(self): two_weeks_ago = utcnow() - datetime.timedelta(days=14) - qr = self.factory.create_query_result() - query = self.factory.create_query(latest_query_data=qr) + qt = "SELECT 1" + qr = self.factory.create_query_result(query_text=qt, retrieved_at=two_weeks_ago) + query = self.factory.create_query(query_text=qt, latest_query_data=qr) + unused_qr = self.factory.create_query_result(query_text=qt, retrieved_at=two_weeks_ago) db.session.flush() - unused_qr = self.factory.create_query_result(retrieved_at=two_weeks_ago) self.assertIn((unused_qr.id,), models.QueryResult.unused()) self.assertNotIn((qr.id,), list(models.QueryResult.unused())) def test_returns_only_over_a_week_old_results(self): two_weeks_ago = utcnow() - datetime.timedelta(days=14) - unused_qr = self.factory.create_query_result(retrieved_at=two_weeks_ago) + qt = "SELECT 1" + unused_qr = self.factory.create_query_result(query_text=qt, retrieved_at=two_weeks_ago) db.session.flush() - new_unused_qr = self.factory.create_query_result() - + new_unused_qr = self.factory.create_query_result(query_text=qt) self.assertIn((unused_qr.id,), models.QueryResult.unused()) self.assertNotIn((new_unused_qr.id,), models.QueryResult.unused()) + def test_doesnt_return_live_incremental_results(self): + two_weeks_ago = utcnow() - datetime.timedelta(days=14) + qt = "SELECT 1" + qrs = [self.factory.create_query_result(query_text=qt, retrieved_at=two_weeks_ago) + for _ in range(5)] + q = self.factory.create_query(query_text=qt, latest_query_data=qrs[0], + schedule_resultset_size=3) + for qr in qrs: + self.factory.create_query_resultset(query_rel=q, result=qr) + db.session.flush() + self.assertEqual([], list(models.QueryResult.unused())) + + def test_deletes_stale_resultsets(self): + qt = "SELECT 17" + query = self.factory.create_query(query_text=qt, + schedule_resultset_size=5) + for _ in range(10): + r = self.factory.create_query_result(query_text=qt) + self.factory.create_query_resultset(query_rel=query, result=r) + qt2 = "SELECT 100" + query2 = self.factory.create_query(query_text=qt2, schedule_resultset_size=5) + for _ in range(10): + r = self.factory.create_query_result(query_text=qt2) + self.factory.create_query_resultset(query_rel=query2, result=r) + db.session.flush() + self.assertEqual(models.QueryResultSet.query.count(), 20) + self.assertEqual(models.Query.delete_stale_resultsets(), 10) + self.assertEqual(models.QueryResultSet.query.count(), 10) + + def test_deletes_stale_resultsets_with_dupe_queries(self): + qt = "SELECT 17" + query = self.factory.create_query(query_text=qt, + schedule_resultset_size=5) + for _ in range(10): + r = self.factory.create_query_result(query_text=qt) + self.factory.create_query_resultset(query_rel=query, result=r) + query2 = self.factory.create_query(query_text=qt, + schedule_resultset_size=3) + for _ in range(10): + self.factory.create_query_result(query_text=qt) + self.factory.create_query_resultset(query_rel=query2) + qt2 = "SELECT 100" + query3 = self.factory.create_query(query_text=qt2, schedule_resultset_size=5) + for _ in range(10): + r = self.factory.create_query_result(query_text=qt2) + self.factory.create_query_resultset(query_rel=query3, result=r) + db.session.flush() + self.assertEqual(models.QueryResultSet.query.count(), 30) + self.assertEqual(models.Query.delete_stale_resultsets(), 10) + self.assertEqual(models.QueryResultSet.query.count(), 13) + class TestQueryAll(BaseTestCase): def test_returns_only_queries_in_given_groups(self): From ee85ed3818dc24a91e329f577c5e64d9785d1ebe Mon Sep 17 00:00:00 2001 From: Jason Thomas Date: Wed, 11 Apr 2018 11:37:18 -0400 Subject: [PATCH 36/48] Updates to docker-entrypoint for worker and scheduler (#364) * Use --max-tasks-per-child as per celery documentation * Set --max-memory-per-child to 1/4th of total system memory * Split exec command over multiple lines * Fix memory variable typo --- bin/docker-entrypoint | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/bin/docker-entrypoint b/bin/docker-entrypoint index 1bed803efd..a91be66fc8 100755 --- a/bin/docker-entrypoint +++ b/bin/docker-entrypoint @@ -5,9 +5,13 @@ worker() { /app/manage.py db upgrade WORKERS_COUNT=${WORKERS_COUNT:-2} QUEUES=${QUEUES:-queries,scheduled_queries,celery} + MAX_MEMORY=$(($(/usr/bin/awk '/MemTotal/ {print $2}' /proc/meminfo)/4)) echo "Starting $WORKERS_COUNT workers for queues: $QUEUES..." - exec /usr/local/bin/celery worker --app=redash.worker -c$WORKERS_COUNT -Q$QUEUES -linfo --maxtasksperchild=10 -Ofair + exec /usr/local/bin/celery worker --app=redash.worker -c$WORKERS_COUNT -Q$QUEUES -linfo \ + --max-tasks-per-child=10 \ + --max-memory-per-child=$MAX_MEMORY \ + -Ofair } scheduler() { @@ -17,7 +21,9 @@ scheduler() { echo "Starting scheduler and $WORKERS_COUNT workers for queues: $QUEUES..." - exec /usr/local/bin/celery worker --app=redash.worker --beat -c$WORKERS_COUNT -Q$QUEUES -linfo --maxtasksperchild=10 -Ofair + exec /usr/local/bin/celery worker --app=redash.worker --beat -c$WORKERS_COUNT -Q$QUEUES -linfo \ + --max-tasks-per-child=10 \ + -Ofair } server() { From b9d48b6b255367c89f9aaf5a582a39204829fe35 Mon Sep 17 00:00:00 2001 From: Marina Samuel Date: Fri, 11 May 2018 16:02:26 -0400 Subject: [PATCH 37/48] Closes #396: Integration with Flower. --- docker-compose.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index b454410bff..16d6f13582 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,3 +41,13 @@ services: # tests. command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF" restart: unless-stopped + flower: + image: mher/flower:latest + command: flower + environment: + CELERY_BROKER_URL: redis://redis:6379/0 + CELERY_RESULT_BACKEND: redis://redis:6379/0 + ports: + - "5555:5555" + links: + - redis From 6c55cb4750b06d07749fae48454664b12ba3a8cc Mon Sep 17 00:00:00 2001 From: Allen Short Date: Thu, 28 Jun 2018 11:10:12 -0500 Subject: [PATCH 38/48] merge upstream db changes --- migrations/versions/2ba47e9812b1_.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 migrations/versions/2ba47e9812b1_.py diff --git a/migrations/versions/2ba47e9812b1_.py b/migrations/versions/2ba47e9812b1_.py new file mode 100644 index 0000000000..93d0f59268 --- /dev/null +++ b/migrations/versions/2ba47e9812b1_.py @@ -0,0 +1,24 @@ +"""empty message + +Revision ID: 2ba47e9812b1 +Revises: 71477dadd6ef, 9d7678c47452 +Create Date: 2018-07-25 16:09:54.769289 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '2ba47e9812b1' +down_revision = ('71477dadd6ef', '9d7678c47452', ) +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass From 74d19eae72efe3124e53f43c43dcf272108467ed Mon Sep 17 00:00:00 2001 From: Marina Samuel Date: Fri, 27 Apr 2018 10:31:01 -0400 Subject: [PATCH 39/48] Add data source health monitoring via an extension. Refs: #379, #415 --- redash/monitor.py | 2 +- redash/worker.py | 7 +++++++ requirements.txt | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/redash/monitor.py b/redash/monitor.py index f1f241eb26..41fdc0ddb5 100644 --- a/redash/monitor.py +++ b/redash/monitor.py @@ -35,7 +35,7 @@ def get_queues_status(): 'data_sources': ', '.join(sources), 'size': redis_connection.llen(queue) } - + queues['celery'] = { 'size': redis_connection.llen('celery'), 'data_sources': '' diff --git a/redash/worker.py b/redash/worker.py index 629180b1f1..d2add5a218 100644 --- a/redash/worker.py +++ b/redash/worker.py @@ -78,3 +78,10 @@ def __call__(self, *args, **kwargs): def init_celery_flask_app(**kwargs): app = create_app() app.app_context().push() + +@celery.on_after_configure.connect +def add_periodic_tasks(sender, **kwargs): + app = create_app() + periodic_tasks = getattr(app, 'periodic_tasks', {}) + for params in periodic_tasks.values(): + sender.add_periodic_task(**params) diff --git a/requirements.txt b/requirements.txt index f5ec78e3b7..045ded98b3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -54,4 +54,4 @@ disposable-email-domains # Uncomment the requirement for ldap3 if using ldap. # It is not included by default because of the GPL license conflict. # ldap3==2.2.4 -redash-stmo>=2018.4.0 +redash-stmo>=2018.8.1 From 3d7ba67f396cf6051d37e2487069c20b4ae03894 Mon Sep 17 00:00:00 2001 From: Marina Samuel Date: Mon, 16 Jul 2018 16:20:24 -0400 Subject: [PATCH 40/48] Frontend Extensions API. --- .circleci/config.yml | 1 + Dockerfile | 2 +- Makefile | 22 ++++++++++++++++++++++ bin/bundle-extensions | 37 +++++++++++++++++++++++++++++++++++++ client/app/config/index.js | 6 ++++++ package.json | 1 + webpack.config.js | 7 ++++++- 7 files changed, 74 insertions(+), 2 deletions(-) create mode 100644 Makefile create mode 100755 bin/bundle-extensions diff --git a/.circleci/config.yml b/.circleci/config.yml index 9c830220f1..1460364639 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -34,6 +34,7 @@ jobs: - run: sudo pip install -r requirements_dev.txt - run: sudo pip install -r requirements.txt - run: sudo npm install + - run: sudo npm run bundle - run: sudo npm run build - run: command: pytest --junitxml=/tmp/test-reports/pytest/junit.xml tests/ diff --git a/Dockerfile b/Dockerfile index bbfaa77f40..cbb333eba2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ RUN sudo tar --strip-components 1 -xzvf node-v* -C /usr/local # Upgrade npm RUN npm upgrade npm -RUN npm install && npm run build && rm -rf node_modules +RUN npm install && npm run bundle && npm run build && rm -rf node_modules RUN chown -R redash /app USER redash diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000..c776130b5a --- /dev/null +++ b/Makefile @@ -0,0 +1,22 @@ +.PHONY: build bundle compose_build create_database tests test_db clean + +compose_build: + docker-compose build + +test_db: + docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests" + +create_database: + docker-compose run server create_db + +clean: + docker ps -a -q | xargs docker kill;docker ps -a -q | xargs docker rm + +bundle: + docker-compose run server bin/bundle-extensions + +tests: + docker-compose run server tests + +build: bundle + npm run build diff --git a/bin/bundle-extensions b/bin/bundle-extensions new file mode 100755 index 0000000000..fe4def797d --- /dev/null +++ b/bin/bundle-extensions @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import os +import redash_stmo +from subprocess import call +from distutils.dir_util import copy_tree + +from pkg_resources import iter_entry_points, resource_filename + + +# Make a directory for extensions and set it as an environment variable +# to be picked up by webpack. +EXTENSIONS_RELATIVE_PATH = os.path.join('client', 'app', 'extensions') +EXTENSIONS_DIRECTORY = os.path.join( + os.path.dirname(os.path.dirname(__file__)), + EXTENSIONS_RELATIVE_PATH) + +if not os.path.exists(EXTENSIONS_DIRECTORY): + os.makedirs(EXTENSIONS_DIRECTORY) +os.environ["EXTENSIONS_DIRECTORY"] = EXTENSIONS_RELATIVE_PATH + +for entry_point in iter_entry_points('webpack.bundles'): + extension_data = entry_point.load() + + # This is where the frontend code for an extension lives + # inside of its package. + content_folder_relative = os.path.join( + extension_data['extension_directory'], + extension_data['frontend_content']) + content_folder = resource_filename(redash_stmo.__name__, content_folder_relative) + + # This is where we place our extensions folder. + destination = os.path.join( + EXTENSIONS_DIRECTORY, + extension_data['extension_directory']) + + copy_tree(content_folder, destination) diff --git a/client/app/config/index.js b/client/app/config/index.js index aecdaf14b0..d51affad70 100644 --- a/client/app/config/index.js +++ b/client/app/config/index.js @@ -82,6 +82,11 @@ function registerComponents() { registerAll(context); } +function registerExtensions() { + const context = require.context('%', true, /^((?![\\/]test[\\/]).)*\.js$/); + registerAll(context); +} + function registerServices() { const context = require.context('@/services', true, /^((?![\\/]test[\\/]).)*\.js$/); registerAll(context); @@ -142,6 +147,7 @@ markdownFilter(ngModule); dateTimeFilter(ngModule); registerComponents(); registerPages(); +registerExtensions(); registerVisualizations(ngModule); export default ngModule; diff --git a/package.json b/package.json index d036f12353..5f53bc5fbd 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,7 @@ "scripts": { "start": "webpack-dev-server", "dev": "REDASH_BACKEND=https://dev.redashapp.com npm start", + "bundle": "bin/bundle-extensions", "build": "rm -rf ./client/dist/ && NODE_ENV=production webpack", "watch": "webpack --watch --progress --colors -d", "analyze": "rm -rf ./client/dist/ && BUNDLE_ANALYZER=on webpack", diff --git a/webpack.config.js b/webpack.config.js index 3d9200a513..50eea339c0 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -17,6 +17,10 @@ const redashBackend = process.env.REDASH_BACKEND || "http://localhost:5000"; const basePath = fs.realpathSync(path.join(__dirname, "client")); const appPath = fs.realpathSync(path.join(__dirname, "client", "app")); +const extensionsRelativePath = process.env.EXTENSIONS_DIRECTORY || + path.join("client", "app", "extensions"); +const extensionPath = fs.realpathSync(path.join(__dirname, extensionsRelativePath)); + const config = { entry: { app: ["./client/app/index.js", "./client/app/assets/less/main.less"], @@ -29,7 +33,8 @@ const config = { }, resolve: { alias: { - "@": appPath + "@": appPath, + "%": extensionPath } }, plugins: [ From 44bb2b528ad6027fb60e096cd619a7fdb50e5235 Mon Sep 17 00:00:00 2001 From: Marina Samuel Date: Thu, 6 Sep 2018 12:05:50 -0400 Subject: [PATCH 41/48] Frontend extension API should be under the redash.extensions entry. --- bin/bundle-extensions | 36 +++++++++++++++++++----------------- redash/extensions.py | 17 ++++++++++++++--- 2 files changed, 33 insertions(+), 20 deletions(-) diff --git a/bin/bundle-extensions b/bin/bundle-extensions index fe4def797d..8416aab776 100755 --- a/bin/bundle-extensions +++ b/bin/bundle-extensions @@ -1,37 +1,39 @@ #!/usr/bin/env python import os -import redash_stmo from subprocess import call from distutils.dir_util import copy_tree -from pkg_resources import iter_entry_points, resource_filename +from pkg_resources import iter_entry_points, resource_filename, resource_isdir + # Make a directory for extensions and set it as an environment variable # to be picked up by webpack. EXTENSIONS_RELATIVE_PATH = os.path.join('client', 'app', 'extensions') EXTENSIONS_DIRECTORY = os.path.join( - os.path.dirname(os.path.dirname(__file__)), - EXTENSIONS_RELATIVE_PATH) + os.path.dirname(os.path.dirname(__file__)), + EXTENSIONS_RELATIVE_PATH) if not os.path.exists(EXTENSIONS_DIRECTORY): os.makedirs(EXTENSIONS_DIRECTORY) os.environ["EXTENSIONS_DIRECTORY"] = EXTENSIONS_RELATIVE_PATH -for entry_point in iter_entry_points('webpack.bundles'): - extension_data = entry_point.load() +for entry_point in iter_entry_points('redash.extensions'): + # This is where the frontend code for an extension lives + # inside of its package. + content_folder_relative = os.path.join( + entry_point.name, 'bundle') + (root_module, _) = os.path.splitext(entry_point.module_name) + + if not resource_isdir(root_module, content_folder_relative): + continue - # This is where the frontend code for an extension lives - # inside of its package. - content_folder_relative = os.path.join( - extension_data['extension_directory'], - extension_data['frontend_content']) - content_folder = resource_filename(redash_stmo.__name__, content_folder_relative) + content_folder = resource_filename(root_module, content_folder_relative) - # This is where we place our extensions folder. - destination = os.path.join( - EXTENSIONS_DIRECTORY, - extension_data['extension_directory']) + # This is where we place our extensions folder. + destination = os.path.join( + EXTENSIONS_DIRECTORY, + entry_point.name) - copy_tree(content_folder, destination) + copy_tree(content_folder, destination) diff --git a/redash/extensions.py b/redash/extensions.py index 78125842d3..e00a8b164c 100644 --- a/redash/extensions.py +++ b/redash/extensions.py @@ -1,4 +1,5 @@ -from pkg_resources import iter_entry_points +import os +from pkg_resources import iter_entry_points, resource_isdir, resource_listdir def init_extensions(app): @@ -10,5 +11,15 @@ def init_extensions(app): for entry_point in iter_entry_points('redash.extensions'): app.logger.info('Loading Redash extension %s.', entry_point.name) - extension = entry_point.load() - app.redash_extensions[entry_point.name] = extension(app) + try: + extension = entry_point.load() + app.redash_extensions[entry_point.name] = extension(app) + except ImportError: + app.logger.info('%s does not have a callable and will not be loaded.', entry_point.name) + (root_module, _) = os.path.splitext(entry_point.module_name) + content_folder_relative = os.path.join(entry_point.name, 'bundle') + + # If it's a frontend extension only, store a list of files in the bundle directory. + if resource_isdir(root_module, content_folder_relative): + app.redash_extensions[entry_point.name] = resource_listdir( + root_module, content_folder_relative) From b4209b1c036f0b64921632d3de105de6c0011c00 Mon Sep 17 00:00:00 2001 From: Marina Samuel Date: Thu, 6 Sep 2018 12:08:05 -0400 Subject: [PATCH 42/48] Closes #537: Port datasource URL code to an extension. --- client/app/pages/queries/query.html | 6 +- redash/models.py | 9 +- redash/query_runner/__init__.py | 62 ++++++----- redash/query_runner/athena.py | 5 - redash/query_runner/big_query.py | 89 +++++++-------- redash/query_runner/cass.py | 81 +++++++------- redash/query_runner/dynamodb_sql.py | 43 ++++--- redash/query_runner/elasticsearch.py | 49 ++++---- redash/query_runner/google_spreadsheets.py | 32 +++--- redash/query_runner/graphite.py | 5 - redash/query_runner/hive_ds.py | 48 ++++---- redash/query_runner/impala_ds.py | 74 ++++++------- redash/query_runner/influx_db.py | 30 ++--- redash/query_runner/jql.py | 2 - redash/query_runner/mongodb.py | 48 ++++---- redash/query_runner/mssql.py | 79 +++++++------ redash/query_runner/mysql.py | 61 +++++----- redash/query_runner/oracle.py | 55 +++++---- redash/query_runner/pg.py | 123 ++++++++++----------- redash/query_runner/presto.py | 53 ++++----- redash/query_runner/python.py | 38 +++---- redash/query_runner/script.py | 40 +++---- redash/query_runner/sqlite.py | 31 +++--- redash/query_runner/treasuredata.py | 59 +++++----- redash/query_runner/url.py | 3 - redash/query_runner/vertica.py | 68 +++++------- requirements.txt | 2 +- 27 files changed, 534 insertions(+), 661 deletions(-) diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html index 9bb586590b..1fffd3bb2c 100644 --- a/client/app/pages/queries/query.html +++ b/client/app/pages/queries/query.html @@ -84,8 +84,7 @@

    {{ds.name}} - {{dataSource.type_name}} documentation - {{ dataSource.type_name }} documentation + @@ -171,8 +170,7 @@

    - {{dataSource.type_name}} documentation - {{dataSource.type_name}} +