diff --git a/.babelrc b/.babelrc index 32a31f43fddc..3200c2c40002 100644 --- a/.babelrc +++ b/.babelrc @@ -1,16 +1,29 @@ { "plugins": [ + "babel-plugin-styled-components", "transform-flow-strip-types", "add-react-displayname", "transform-decorators-legacy", ["transform-builtin-extend", { "globals": ["Error", "Array"] - }] + }], + "syntax-trailing-function-commas" ], "presets": ["es2015", "stage-0", "react"], "env": { "development": { "presets": [] + }, + "extract": { + "plugins": [ + ["c-3po", { + "extract": { + "output": "locales/metabase-frontend.pot" + }, + "discover": ["t", "jt"], + "numberedExpressions": true + }] + ] } } } diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000000..b95d181d3c26 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,628 @@ +defaults: &defaults + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + +restore-be-deps-cache: &restore-be-deps-cache + keys: + - be-deps-{{ checksum "project.clj" }} + - be-deps- + +restore-fe-deps-cache: &restore-fe-deps-cache + keys: + - fe-deps-{{ checksum "yarn.lock" }} + - fe-deps- + +version: 2.1 +jobs: + +######################################################################################################################## +# CHECKOUT # +######################################################################################################################## + + checkout: + <<: *defaults + steps: + - restore_cache: + keys: + - source-{{ .Branch }}-{{ .Revision }} + - source-{{ .Branch }} + - source- + - checkout + - save_cache: + key: source-{{ .Branch }}-{{ .Revision }} + paths: + - .git + # The basic idea here is to generate a file with checksums for all the backend source files, and save it as + # `./backend-checksums.txt`. Then we'll use the checksum of that files for uberjar caching; thus we can reuse + # the same uberjar for integration tests across any build where the backend files are the same + - run: + name: Generate checksums of all backend source files to use as Uberjar cache key + command: > + for file in `find ./src -type f -name '*.clj' | sort`; + do echo `md5sum $file` >> backend-checksums.txt; + done + - persist_to_workspace: + root: /home/circleci/ + paths: + - metabase/metabase + +######################################################################################################################## +# BACKEND # +######################################################################################################################## + + be-deps: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: lein deps + - save_cache: + key: be-deps-{{ checksum "project.clj" }} + paths: + - /home/circleci/.m2 + + be-tests: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests + command: lein with-profile +ci test + no_output_timeout: 5m + + be-linter-eastwood: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run Eastwood linter + command: lein with-profile +ci eastwood + no_output_timeout: 5m + + be-linter-docstring-checker: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run dockstring-checker + command: lein with-profile +ci docstring-checker + no_output_timeout: 5m + + be-linter-bikeshed: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run dockstring-checker + command: lein with-profile +ci bikeshed + no_output_timeout: 5m + + be-linter-reflection-warnings: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run dockstring-checker + command: ./bin/reflection-linter + no_output_timeout: 5m + + be-tests-mysql: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + - image: circleci/mysql:5.7.23 + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (MySQL) + environment: + ENGINES: h2,mysql + MB_ENCRYPTION_SECRET_KEY: Orw0AAyzkO/kPTLJRxiyKoBHXa/d6ZcO+p+gpZO/wSQ= + MB_DB_TYPE: mysql + MB_DB_HOST: localhost + MB_DB_PORT: 3306 + MB_DB_DBNAME: circle_test + MB_DB_USER: root + MB_MYSQL_TEST_USER: root + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh mysql || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-postgres: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + - image: circleci/postgres:9.6-alpine + environment: + POSTGRES_USER: circle_test + POSTGRES_DB: circle_test + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (Postgres) + environment: + ENGINES: h2,postgres + MB_DB_TYPE: postgres + MB_DB_PORT: 5432 + MB_DB_HOST: localhost + MB_DB_DBNAME: circle_test + MB_DB_USER: circle_test + MB_POSTGRESQL_TEST_USER: circle_test + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh postgres || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-sparksql: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + - image: metabase/spark:2.1.1 + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Make plugins dir + command: mkdir /home/circleci/metabase/metabase/plugins + - run: + name: Download SparkSQL deps JAR + command: wget --output-document=plugins/spark-deps.jar https://s3.amazonaws.com/sparksql-deps/metabase-sparksql-deps-1.2.1.spark2-standalone.jar + - run: + name: Wait for SparkSQL to be ready + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh sparksql || + while ! nc -z localhost 10000; do sleep 0.1; done + no_output_timeout: 5m + - run: + name: Run backend unit tests (SparkSQL) + environment: + ENGINES: h2,sparksql + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh sparksql || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-mongo: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + - image: circleci/mongo:3.2 + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (MongoDB 3.2) + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh mongo || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-vertica: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + - image: sumitchawla/vertica + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Make plugins dir + command: mkdir /home/circleci/metabase/metabase/plugins + - run: + name: Download Vertica JAR + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh vertica || + wget --output-document=plugins/vertica-jdbc-7.1.2-0.jar $VERTICA_JDBC_JAR + no_output_timeout: 5m + - run: + name: Run backend unit tests (Vertica) + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh vertica || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-sqlserver: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (SQL Server) + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh sqlserver || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-bigquery: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (BigQuery) + environment: + ENGINES: h2,bigquery + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh bigquery || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-sqlite: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (SQLite) + environment: + ENGINES: h2,sqlite + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh sqlite || + lein with-profile +ci test + + be-tests-druid: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (Druid) + environment: + ENGINES: h2,druid + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh druid || + lein with-profile +ci test + no_output_timeout: 5m + + be-tests-redshift: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (Redshift) + environment: + ENGINES: h2,redshift + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh redshift || + lein with-profile +ci test + no_output_timeout: 5m + + + be-tests-presto: + working_directory: /home/circleci/metabase/metabase/ + docker: + - image: circleci/clojure:lein-2.8.1-node-browsers + - image: metabase/presto-mb-ci + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Run backend unit tests (Presto) + environment: + ENGINES: h2,presto + MB_PRESTO_TEST_HOST: localhost + MB_PRESTO_TEST_PORT: 8080 + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh presto || + lein with-profile +ci test + no_output_timeout: 5m + + + be-tests-oracle: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - run: + name: Make plugins dir + command: mkdir /home/circleci/metabase/metabase/plugins + - run: + name: Download Oracle JAR + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh oracle || + wget --output-document=/home/circleci/metabase/metabase/plugins/ojdbc7.jar $ORACLE_JDBC_JAR + - run: + name: Run backend unit tests (Oracle) + environment: + ENGINES: h2,oracle + command: > + /home/circleci/metabase/metabase/.circleci/skip-driver-tests.sh oracle || + lein with-profile +ci test + no_output_timeout: 5m + + +######################################################################################################################## +# FRONTEND # +######################################################################################################################## + + fe-deps: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - run: + name: Run yarn + command: SAUCE_CONNECT_DOWNLOAD_ON_INSTALL=true yarn + no_output_timeout: 5m + - save_cache: + key: fe-deps-{{ checksum "yarn.lock" }} + paths: + - /home/circleci/.yarn + - /home/circleci/.yarn-cache + - /home/circleci/metabase/metabase/node_modules + + fe-linter-eslint: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - run: + name: Run ESLint linter + command: yarn lint-eslint + no_output_timeout: 5m + + fe-linter-prettier: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - run: + name: Run Prettier formatting linter + command: yarn lint-prettier + no_output_timeout: 5m + + fe-linter-flow: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - run: + name: Run Flow type checker + command: yarn flow + no_output_timeout: 5m + + fe-tests-karma: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - run: + name: Run frontend tests (karma) + command: yarn run test-karma + no_output_timeout: 5m + + fe-tests-unit: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - run: + name: Run frontend unit tests + command: yarn run test-unit + no_output_timeout: 5m + + build-uberjar: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-be-deps-cache + - restore_cache: + keys: + - uberjar-{{ checksum "./backend-checksums.txt" }} + - run: + name: Build uberjar if needed + command: > + if [ ! -f './target/uberjar/metabase.jar' ]; then ./bin/build version uberjar; fi + no_output_timeout: 5m + - save_cache: + key: uberjar-{{ checksum "./backend-checksums.txt" }} + paths: + - /home/circleci/metabase/metabase/target/uberjar/metabase.jar + + fe-tests-integrated: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - restore_cache: + <<: *restore-fe-deps-cache + - restore_cache: + keys: + - uberjar-{{ checksum "./backend-checksums.txt" }} + - run: + name: Generate version file + command: ./bin/build version + - run: + name: Run frontend integrated tests + command: yarn run test-integrated-no-build + no_output_timeout: 5m + + +######################################################################################################################## +# DEPLOYMENT, ETC. # +######################################################################################################################## + + deploy-master: + <<: *defaults + steps: + - attach_workspace: + at: /home/circleci/ + - run: ./bin/deploy-webhook $DEPLOY_WEBHOOK + + +######################################################################################################################## +# WORKFLOWS # +######################################################################################################################## + +workflows: + version: 2 + build: + jobs: + - checkout + - be-deps: + requires: + - checkout + - be-tests: + requires: + - be-deps + - be-linter-eastwood: + requires: + - be-deps + - be-linter-docstring-checker: + requires: + - be-deps + - be-linter-bikeshed: + requires: + - be-deps + - be-linter-reflection-warnings: + requires: + - be-deps + - be-tests-mysql: + requires: + - be-tests + - be-tests-postgres: + requires: + - be-tests + - be-tests-sparksql: + requires: + - be-tests + - be-tests-mongo: + requires: + - be-tests + - be-tests-sqlserver: + requires: + - be-tests + - be-tests-bigquery: + requires: + - be-tests + - be-tests-sqlite: + requires: + - be-tests + - be-tests-presto: + requires: + - be-tests + - be-tests-oracle: + requires: + - be-tests + - be-tests-druid: + requires: + - be-tests + - be-tests-redshift: + requires: + - be-tests + - be-tests-vertica: + requires: + - be-tests + - fe-deps: + requires: + - checkout + - fe-linter-eslint: + requires: + - fe-deps + - fe-linter-prettier: + requires: + - fe-deps + - fe-linter-flow: + requires: + - fe-deps + - fe-tests-karma: + requires: + - fe-deps + - fe-tests-unit: + requires: + - fe-deps + - build-uberjar: + requires: + - be-deps + - fe-tests-integrated: + requires: + - build-uberjar + - fe-deps + - deploy-master: + requires: + - be-linter-eastwood + - be-linter-docstring-checker + - be-linter-bikeshed + - be-linter-reflection-warnings + - be-tests + - be-tests-mysql + - be-tests-postgres + - be-tests-sparksql + - be-tests-mongo + - be-tests-sqlserver + - be-tests-bigquery + - be-tests-sqlite + - be-tests-presto + - be-tests-oracle + - be-tests-druid + - be-tests-redshift + - be-tests-vertica + - fe-linter-eslint + - fe-linter-prettier + - fe-linter-flow + - fe-tests-karma + - fe-tests-unit + - fe-tests-integrated + filters: + branches: + only: master diff --git a/.circleci/skip-driver-tests.sh b/.circleci/skip-driver-tests.sh new file mode 100755 index 000000000000..d0ee1ff9fddf --- /dev/null +++ b/.circleci/skip-driver-tests.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Determines whether we should skip tests for a driver, usage: +# +# ./.circleci/skip-driver-tests.sh oracle +# +# Returns false if the commit message contains [ci all], [ci drivers], or [ci ], +# or if the current branch is master or a release branch. + +set -eu + +COMMIT_MESSAGE=`git log -1 --oneline` + +! [[ "$CIRCLE_BRANCH" =~ ^master|release-.+$ ]] && + ! [[ "$COMMIT_MESSAGE" == *"[ci all]"* ]] && + ! [[ "$COMMIT_MESSAGE" == *"[ci drivers]"* ]] && + ! [[ "$COMMIT_MESSAGE" == *"[ci $1]"* ]] && + echo "Skipping driver tests: $1" diff --git a/.dir-locals.el b/.dir-locals.el index 274958fe9eb6..e737fa70faac 100644 --- a/.dir-locals.el +++ b/.dir-locals.el @@ -10,37 +10,11 @@ ;; Define custom indentation for functions inside metabase. ;; This list isn't complete; add more forms as we come across them. (define-clojure-indent - (api-let 2) (assert 1) (assoc 1) - (auto-parse 1) - (catch-api-exceptions 0) - (check 1) - (checkp 1) - (context 2) - (create-database-definition 1) (ex-info 1) - (execute-query 1) (execute-sql! 2) (expect 0) - (expect-with-all-engines 0) - (expect-with-engine 1) - (expect-with-engines 1) - (let-400 1) - (let-404 1) - (let-500 1) (match 1) - (match-$ 1) (merge-with 1) - (post-select 1) - (pre-delete 1) - (pre-insert 1) - (pre-update 1) - (project 1) - (qp-expect-with-engines 1) - (render-file 1) - (resolve-private-vars 1) - (select 1) - (sync-in-context 2) - (when-testing-engine 1) (with-redefs-fn 1))))))) diff --git a/.dockerignore b/.dockerignore index 17cd3d69e4d2..e76ea27e44bc 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,4 +3,8 @@ docs/* OSX/* target/* +**node_modules **metabase.jar + +.dockerignore +Dockerfile diff --git a/.editorconfig b/.editorconfig index 1dfee5e6ced8..7ce521f7d487 100644 --- a/.editorconfig +++ b/.editorconfig @@ -21,10 +21,10 @@ indent_size = 2 indent_size = 2 [*.html] -indent_size = 4 +indent_size = 2 [*.js] -indent_size = 4 +indent_size = 2 [*.jsx] -indent_size = 4 +indent_size = 2 diff --git a/.eslintrc b/.eslintrc index aa4c8e9f6361..07a483fb6b80 100644 --- a/.eslintrc +++ b/.eslintrc @@ -2,35 +2,12 @@ "rules": { "strict": [2, "never"], "no-undef": 2, + "no-var": 1, "no-unused-vars": [1, {"vars": "all", "args": "none"}], + "no-empty": [1, { "allowEmptyCatch": true }], + "curly": [1, "all"], "import/no-commonjs": 1, - "quotes": 0, - "camelcase": 0, - "eqeqeq": 0, - "key-spacing": 0, - "no-underscore-dangle": 0, - "curly": 0, - "no-use-before-define": 0, - "comma-dangle": 0, - "space-infix-ops": 0, - "no-shadow": 0, - "no-empty": 0, - "no-extra-bind": 0, - "eol-last": 0, - "consistent-return": 0, - "yoda": 0, - "no-multi-spaces": 0, - "no-mixed-spaces-and-tabs": 0, - "no-alert": 0, "no-console": 0, - "dot-notation": 0, - "space-unary-ops": 0, - "semi": 0, - "global-strict": 0, - "new-cap": 0, - "no-fallthrough": 0, - "no-useless-escape": 0, - "no-case-declarations": 0, "react/no-is-mounted": 2, "react/prefer-es6-class": 2, "react/display-name": 1, @@ -39,7 +16,8 @@ "react/no-did-update-set-state": 0, "react/no-find-dom-node": 0, "flowtype/define-flow-type": 1, - "flowtype/use-flow-type": 1 + "flowtype/use-flow-type": 1, + "no-color-literals": 1 }, "globals": { "pending": false diff --git a/.flowconfig b/.flowconfig index 269aa17fef4c..6f040b3101cb 100644 --- a/.flowconfig +++ b/.flowconfig @@ -1,8 +1,18 @@ [ignore] .*/node_modules/react/node_modules/.* .*/node_modules/postcss-import/node_modules/.* +.*/node_modules/react-resizable/.* .*/node_modules/documentation/.* .*/node_modules/.*/\(lib\|test\).*\.json$ +.*/node_modules/react-element-to-jsx-string/.* +.*/node_modules/react-element-to-jsx-string/.* +.*/node_modules/resize-observer-polyfill/.* +.*/node_modules/react-virtualized/.* +.*/node_modules/styled-components/.* +.*/node_modules/grid-styled/.* +.*/node_modules/update-notifier/.* +.*/node_modules/boxen/.* +.*/node_modules/libnpx/.* [include] .*/frontend/.* diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 614c0ce6f34c..58275417d725 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -7,6 +7,10 @@ If there's an existing issue, please add a :+1: reaction to the description of the issue. One way we prioritize issues is by the number of :+1: reactions on their descriptions. Please DO NOT add `+1` or :+1: comments. +### Security Disclosure + +Security is very important to us. If discover any issue regarding security, please disclose the information responsibly by sending an email to security@metabase.com and not by creating a GitHub issue. + ### Database support For requests for new database drivers, please include the name diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index ef2e2989c07f..dd7b1148ff66 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,7 @@ -###### TODO +###### Before submitting the PR, please make sure you do the following +- [ ] If there are changes to the backend codebase, run the backend tests with `lein test && lein eastwood && lein bikeshed && lein docstring-checker && ./bin/reflection-linter` +- [ ] Run the frontend and integration tests with `yarn lint && yarn flow && yarn test`) - [ ] Sign the [Contributor License Agreement](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform) (unless it's a tiny documentation change). diff --git a/.gitignore b/.gitignore index 38ecf1822a3d..e854252eb57f 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ pom.xml.asc *.class /.lein-env /.lein-failures +/.lein-plugins /.lein-repl-history /.nrepl-port .idea/ @@ -45,3 +46,13 @@ bin/release/aws-eb/metabase-aws-eb.zip /build.xml /test-report-* /crate-* +*.po~ +/locales/metabase-*.pot +/stats.json +coverage-summary.json +.DS_Store +bin/node_modules/ +*.log +*.trace.db +/metabase.iml +/backend-checksums.txt diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 000000000000..bf357fbbc081 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,3 @@ +{ + "trailingComma": "all" +} diff --git a/Dockerfile b/Dockerfile index 12313c85fdbe..a8751a36229d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,43 +1,80 @@ -# NOTE: this Dockerfile builds Metabase from source. We recommend deploying the pre-built -# images hosted on Docker Hub https://hub.docker.com/r/metabase/metabase/ which use the -# Dockerfile located at ./bin/docker/Dockerfile +################### +# STAGE 1: builder +################### -FROM java:openjdk-8-jre-alpine +FROM openjdk:8-jdk-alpine as builder -ENV JAVA_HOME=/usr/lib/jvm/default-jvm -ENV PATH /usr/local/bin:$PATH -ENV LEIN_ROOT 1 +WORKDIR /app/source ENV FC_LANG en-US ENV LC_CTYPE en_US.UTF-8 -# install core build tools -RUN apk add --update nodejs git wget bash python make g++ java-cacerts ttf-dejavu fontconfig && \ - npm install -g yarn && \ - ln -sf "${JAVA_HOME}/bin/"* "/usr/bin/" +# bash: various shell scripts +# wget: installing lein +# git: ./bin/version +# nodejs: frontend building +# make: backend building +# gettext: translations +RUN apk add --update bash nodejs nodejs-npm git wget make gettext -# fix broken cacerts -RUN rm -f /usr/lib/jvm/default-jvm/jre/lib/security/cacerts && \ - ln -s /etc/ssl/certs/java/cacerts /usr/lib/jvm/default-jvm/jre/lib/security/cacerts +# yarn: frontend dependencies +RUN npm install -g yarn -# install lein +# lein: backend dependencies and building ADD https://raw.github.com/technomancy/leiningen/stable/bin/lein /usr/local/bin/lein RUN chmod 744 /usr/local/bin/lein +RUN lein upgrade + +# install dependencies before adding the rest of the source to maximize caching + +# backend dependencies +ADD project.clj . +RUN lein deps + +# frontend dependencies +ADD yarn.lock package.json ./ +RUN yarn -# add the application source to the image -ADD . /app/source +# add the rest of the source +ADD . . # build the app -WORKDIR /app/source RUN bin/build -# remove unnecessary packages & tidy up -RUN apk del nodejs git wget python make g++ -RUN rm -rf /root/.lein /root/.m2 /root/.node-gyp /root/.npm /root/.yarn /root/.yarn-cache /tmp/* /var/cache/apk/* /app/source/node_modules +# install updated cacerts to /etc/ssl/certs/java/cacerts +RUN apk add --update java-cacerts + +# import AWS RDS cert into /etc/ssl/certs/java/cacerts +ADD https://s3.amazonaws.com/rds-downloads/rds-combined-ca-bundle.pem . +RUN keytool -noprompt -import -trustcacerts -alias aws-rds \ + -file rds-combined-ca-bundle.pem \ + -keystore /etc/ssl/certs/java/cacerts \ + -keypass changeit -storepass changeit + +# ################### +# # STAGE 2: runner +# ################### + +FROM java:openjdk-8-jre-alpine as runner + +WORKDIR /app + +ENV FC_LANG en-US +ENV LC_CTYPE en_US.UTF-8 + +# dependencies +RUN apk add --update bash ttf-dejavu fontconfig + +# add fixed cacerts +COPY --from=builder /etc/ssl/certs/java/cacerts /usr/lib/jvm/default-jvm/jre/lib/security/cacerts + +# add Metabase script and uberjar +RUN mkdir -p bin target/uberjar +COPY --from=builder /app/source/target/uberjar/metabase.jar /app/target/uberjar/ +COPY --from=builder /app/source/bin/start /app/bin/ # expose our default runtime port EXPOSE 3000 -# build and then run it -WORKDIR /app/source -ENTRYPOINT ["./bin/start"] +# run it +ENTRYPOINT ["/app/bin/start"] diff --git a/OSX/Metabase.xcodeproj/project.pbxproj b/OSX/Metabase.xcodeproj/project.pbxproj index 852019b3738f..2d365e2dab97 100644 --- a/OSX/Metabase.xcodeproj/project.pbxproj +++ b/OSX/Metabase.xcodeproj/project.pbxproj @@ -537,6 +537,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; + ARCHS = "$(ARCHS_STANDARD_64_BIT)"; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; @@ -569,6 +570,7 @@ MACOSX_DEPLOYMENT_TARGET = 10.9; ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; + VALID_ARCHS = x86_64; }; name = Debug; }; @@ -576,6 +578,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; + ARCHS = "$(ARCHS_STANDARD_64_BIT)"; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; @@ -601,7 +604,9 @@ GCC_WARN_UNUSED_VARIABLE = YES; LD_RUNPATH_SEARCH_PATHS = "@loader_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 10.9; + ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; + VALID_ARCHS = x86_64; }; name = Release; }; diff --git a/OSX/Metabase/Backend/AppDelegate.h b/OSX/Metabase/Backend/AppDelegate.h index 6a5a0a3df945..a34c5c173ff1 100644 --- a/OSX/Metabase/Backend/AppDelegate.h +++ b/OSX/Metabase/Backend/AppDelegate.h @@ -14,4 +14,7 @@ @property (readonly) NSUInteger port; +- (void)stopMetabaseTask; +- (void)startMetabaseTask; + @end diff --git a/OSX/Metabase/Backend/AppDelegate.m b/OSX/Metabase/Backend/AppDelegate.m index 44966ed48409..7badb813f661 100644 --- a/OSX/Metabase/Backend/AppDelegate.m +++ b/OSX/Metabase/Backend/AppDelegate.m @@ -33,50 +33,35 @@ + (AppDelegate *)instance { return sInstance; } -- (id)init { - if (self = [super init]) { - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(taskTimedOut:) name:MetabaseTaskTimedOutNotification object:nil]; - } - return self; -} - -- (void)dealloc { - [[NSNotificationCenter defaultCenter] removeObserver:self]; -} - - (void)applicationDidFinishLaunching:(NSNotification *)aNotification { sInstance = self; [[SUUpdater sharedUpdater] checkForUpdatesInBackground]; - self.task = [MetabaseTask task]; - self.healthChecker.port = self.task.port; + [self startMetabaseTask]; [self.healthChecker start]; } - (void)applicationDidBecomeActive:(NSNotification *)notification { // re-start the health checker if it's not checking like it should be : the HEALTH CHECKER HEALTH CHECKER - if (self.healthChecker.lastCheckTime) { - const CFTimeInterval timeSinceLastHealthCheck = CFAbsoluteTimeGetCurrent() - self.healthChecker.lastCheckTime; - if (timeSinceLastHealthCheck > 5.0f) { - NSLog(@"Last health check was %.0f ago, restarting health checker!", timeSinceLastHealthCheck); - [self.healthChecker start]; - } - } - // (re)start the health checker just to be extra double-safe it's still running + [self.healthChecker start]; } - (void)applicationWillTerminate:(NSNotification *)notification { - self.task = nil; + [self stopMetabaseTask]; } -#pragma mark - Notifications +#pragma mark - Static Methods + +- (void)startMetabaseTask { + self.task = [MetabaseTask task]; + self.healthChecker.port = self.task.port; +} -- (void)taskTimedOut:(NSNotification *)notification { - NSLog(@"Metabase task timed out. Restarting..."); - [self.healthChecker resetTimeout]; - self.task = [MetabaseTask task]; +- (void)stopMetabaseTask { + [self.task disableTerminationAlert]; + self.task = nil; } @@ -92,7 +77,9 @@ - (TaskHealthChecker *)healthChecker { - (void)setTask:(MetabaseTask *)task { [_task terminate]; _task = task; - [task launch]; + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ + [task launch]; + }); } - (NSUInteger)port { diff --git a/OSX/Metabase/Backend/MetabaseTask.h b/OSX/Metabase/Backend/MetabaseTask.h index f71efc48b2b6..7d1da0738046 100644 --- a/OSX/Metabase/Backend/MetabaseTask.h +++ b/OSX/Metabase/Backend/MetabaseTask.h @@ -16,6 +16,10 @@ - (void)launch; +/// Remove the task termination handler that pops up the 'Task died unexpectedly' alert. +/// For cases when we want to kill the Metabase task without freaking the user out, e.g. for Reset Password +- (void)disableTerminationAlert; + - (NSUInteger)port; @end diff --git a/OSX/Metabase/Backend/MetabaseTask.m b/OSX/Metabase/Backend/MetabaseTask.m index 40010f23bc5d..f55d6685abd8 100644 --- a/OSX/Metabase/Backend/MetabaseTask.m +++ b/OSX/Metabase/Backend/MetabaseTask.m @@ -11,8 +11,11 @@ @interface MetabaseTask () @property (nonatomic) NSUInteger port; +@property (nonatomic, strong) NSMutableArray *lastMessages; ///< 10 most recently logged messages. + @end + @implementation MetabaseTask + (MetabaseTask *)task { @@ -34,6 +37,12 @@ - (void)readHandleDidRead:(NSString *)message { regex = [NSRegularExpression regularExpressionWithPattern:@"\\[\\d+m" options:0 error:nil]; message = [regex stringByReplacingMatchesInString:message options:0 range:NSMakeRange(0, message.length) withTemplate:@""]; + + // add the message to the recently logged messages. If we now have more than 5 remove the oldest + [self.lastMessages addObject:message]; + if (self.lastMessages.count > 10) [self.lastMessages removeObjectAtIndex:0]; + + // log the message the normal way as well NSLog(@"%@", message); } @@ -63,7 +72,7 @@ - (void)launch { self.task = [[NSTask alloc] init]; self.task.launchPath = JREPath(); - self.task.environment = @{@"MB_DB_FILE": [DBPath() stringByAppendingString:@";AUTO_SERVER=TRUE"], + self.task.environment = @{@"MB_DB_FILE": DBPath(), @"MB_PLUGINS_DIR": PluginsDirPath(), @"MB_JETTY_PORT": @(self.port), @"MB_CLIENT": @"OSX"}; @@ -74,9 +83,15 @@ - (void)launch { __weak MetabaseTask *weakSelf = self; self.task.terminationHandler = ^(NSTask *task){ - NSLog(@"\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Task terminated with exit code %d !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!", task.terminationStatus); + NSLog(@"\n\n!!!!! Task terminated with exit code %d !!!!!\n\n", task.terminationStatus); + dispatch_async(dispatch_get_main_queue(), ^{ - if ([[NSAlert alertWithMessageText:@"Fatal Error" defaultButton:@"Restart" alternateButton:@"Quit" otherButton:nil informativeTextWithFormat:@"The Metabase server terminated unexpectedly."] runModal] == NSAlertDefaultReturn) { + if ([[NSAlert alertWithMessageText:@"Fatal Error" + defaultButton:@"Restart" + alternateButton:@"Quit" + otherButton:nil + informativeTextWithFormat:@"The Metabase server terminated unexpectedly.\n\nMessages:\n%@", [weakSelf.lastMessages componentsJoinedByString:@""]] // components should already have newline at end + runModal] == NSAlertDefaultReturn) { [weakSelf launch]; } else { exit(task.terminationStatus); @@ -94,6 +109,10 @@ - (void)terminate { _port = 0; } +- (void)disableTerminationAlert { + self.task.terminationHandler = nil; +} + #pragma mark - Getters / Setters @@ -105,5 +124,11 @@ - (NSUInteger)port { } return _port; } + + +- (NSMutableArray *)lastMessages { + if (!_lastMessages) _lastMessages = [NSMutableArray array]; + return _lastMessages; +} @end diff --git a/OSX/Metabase/Backend/ResetPasswordTask.m b/OSX/Metabase/Backend/ResetPasswordTask.m index 8c672066a914..1260ff631316 100644 --- a/OSX/Metabase/Backend/ResetPasswordTask.m +++ b/OSX/Metabase/Backend/ResetPasswordTask.m @@ -6,6 +6,7 @@ // Copyright (c) 2015 Metabase. All rights reserved. // +#import "AppDelegate.h" #import "ResetPasswordTask.h" @interface ResetPasswordTask () @@ -19,28 +20,29 @@ @implementation ResetPasswordTask - (void)resetPasswordForEmailAddress:(NSString *)emailAddress success:(void (^)(NSString *))successBlock error:(void (^)(NSString *))errorBlock { dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{ + // first, we need to stop the main Metabase task so we can access the DB + NSLog(@"Stopping Metabase task in order to reset password..."); + [[AppDelegate instance] stopMetabaseTask]; + self.task = [[NSTask alloc] init]; - NSString *dbPath = [DBPath() stringByAppendingString:@";IFEXISTS=TRUE;AUTO_SERVER=TRUE"]; - self.task.environment = @{@"MB_DB_FILE": dbPath, @"HOME": @"/Users/camsaul"}; - // time travelers from the future: this is hardcoded since I'm the only one who works on this. I give you permission to fix it - Cam #define DEBUG_RUN_LEIN_TASK 0 #if DEBUG_RUN_LEIN_TASK - self.task.environment = @{@"MB_DB_FILE": dbPath}; + self.task.environment = @{@"MB_DB_FILE": DBPath()}; self.task.currentDirectoryPath = @"/Users/cam/metabase"; self.task.launchPath = @"/usr/local/bin/lein"; self.task.arguments = @[@"run", @"reset-password", emailAddress]; - NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' lein run reset-password %@", dbPath, emailAddress); + NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' lein run reset-password %@", DBPath(), emailAddress); #else - self.task.environment = @{@"MB_DB_FILE": dbPath}; + self.task.environment = @{@"MB_DB_FILE": DBPath()}; self.task.launchPath = JREPath(); self.task.arguments = @[@"-Djava.awt.headless=true", // this prevents the extra java icon from popping up in the dock when running @"-Xverify:none", // disable bytecode verification for faster launch speed, not really needed here since JAR is packaged as part of signed .app @"-jar", UberjarPath(), @"reset-password", emailAddress]; - NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' %@ -jar %@ reset-password %@", dbPath, JREPath(), UberjarPath(), emailAddress); + NSLog(@"Launching ResetPasswordTask\nMB_DB_FILE='%@' %@ -jar %@ reset-password %@", DBPath(), JREPath(), UberjarPath(), emailAddress); #endif __weak ResetPasswordTask *weakSelf = self; @@ -54,6 +56,10 @@ - (void)resetPasswordForEmailAddress:(NSString *)emailAddress success:(void (^)( } else { errorBlock(weakSelf.output.length ? weakSelf.output : @"An unknown error has occured."); } + + // now restart the main Metabase task + NSLog(@"Reset password complete, restarting Metabase task..."); + [[AppDelegate instance] startMetabaseTask]; }); }; diff --git a/OSX/Metabase/Backend/TaskHealthChecker.h b/OSX/Metabase/Backend/TaskHealthChecker.h index 3d6f94e136b6..aea242abae48 100644 --- a/OSX/Metabase/Backend/TaskHealthChecker.h +++ b/OSX/Metabase/Backend/TaskHealthChecker.h @@ -8,17 +8,14 @@ static NSString * const MetabaseTaskBecameHealthyNotification = @"MetabaseTaskBecameHealthyNotification"; static NSString * const MetabaseTaskBecameUnhealthyNotification = @"MetabaseTaskBecameUnhealthyNotification"; -static NSString * const MetabaseTaskTimedOutNotification = @"MetabaseTaskTimedOutNotification"; /// Manages the MetabaseTask (server) and restarts it if it gets unresponsive @interface TaskHealthChecker : NSObject @property () NSUInteger port; +/// (re)start the health checker - (void)start; - (void)stop; -- (void)resetTimeout; - -- (CFAbsoluteTime)lastCheckTime; @end diff --git a/OSX/Metabase/Backend/TaskHealthChecker.m b/OSX/Metabase/Backend/TaskHealthChecker.m index cfb2221e83b6..1a3d13bb65bf 100644 --- a/OSX/Metabase/Backend/TaskHealthChecker.m +++ b/OSX/Metabase/Backend/TaskHealthChecker.m @@ -9,21 +9,16 @@ #import "TaskHealthChecker.h" /// Check out health every this many seconds -static const CGFloat HealthCheckIntervalSeconds = 2.0f; +static const CGFloat HealthCheckIntervalSeconds = 1.0f; /// This number should be lower than HealthCheckIntervalSeconds so requests don't end up piling up static const CGFloat HealthCheckRequestTimeout = 1.75f; -/// After this many seconds of being unhealthy, consider the task timed out so it can be killed -static const CFTimeInterval TimeoutIntervalSeconds = 60.0f; - @interface TaskHealthChecker () @property (strong, nonatomic) NSOperationQueue *healthCheckOperationQueue; @property (strong, nonatomic) NSTimer *healthCheckTimer; @property (nonatomic) BOOL healthy; -@property CFAbsoluteTime lastHealthyTime; -@property CFAbsoluteTime lastCheckTime; /// Set this to YES after server has started successfully one time /// we'll hold of on the whole killing the Metabase server until it launches one time, I guess @@ -39,17 +34,11 @@ - (void)dealloc { #pragma mark - Local Methods -- (void)resetTimeout { - self.lastHealthyTime = CFAbsoluteTimeGetCurrent(); -} - - (void)start { NSLog(@"(re)starting health checker @ 0x%zx...", (size_t)self); self.healthCheckOperationQueue = [[NSOperationQueue alloc] init]; self.healthCheckOperationQueue.maxConcurrentOperationCount = 1; - [self resetTimeout]; - dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{ self.healthCheckTimer = [NSTimer timerWithTimeInterval:HealthCheckIntervalSeconds target:self selector:@selector(checkHealth) userInfo:nil repeats:YES]; self.healthCheckTimer.tolerance = HealthCheckIntervalSeconds / 2.0f; @@ -64,8 +53,6 @@ - (void)stop { } - (void)checkHealth:(void(^)(BOOL healthy))completion { - self.lastCheckTime = CFAbsoluteTimeGetCurrent(); - /// Cancel any pending checks so they don't pile up indefinitely [self.healthCheckOperationQueue cancelAllOperations]; @@ -117,29 +104,11 @@ - (void)setHealthCheckOperationQueue:(NSOperationQueue *)healthCheckOperationQue } - (void)setHealthy:(BOOL)healthy { - if (healthy) { - self.lastHealthyTime = CFAbsoluteTimeGetCurrent(); - } else { - const CFTimeInterval timeSinceWasLastHealthy = CFAbsoluteTimeGetCurrent() - self.lastHealthyTime; - - if (timeSinceWasLastHealthy >= TimeoutIntervalSeconds) { - __weak TaskHealthChecker *weakSelf = self; - if (!self.hasEverBeenHealthy) { - NSLog(@"We've been waiting %.0f seconds, what's going on?", timeSinceWasLastHealthy); - return; - } - [[NSNotificationCenter defaultCenter] postNotificationName:MetabaseTaskTimedOutNotification object:weakSelf]; - } - } - if (_healthy == healthy) return; _healthy = healthy; - NSLog(@"\n\n" - "+--------------------------------------------------------------------+\n" - "| Server status has transitioned to: %@ |\n" - "+--------------------------------------------------------------------+\n\n", (healthy ? @"HEALTHY " : @"NOT HEALTHY")); - + NSLog(@"Server is now %@", healthy ? @"HEALTHY" : @"UNHEALTHY"); + __weak TaskHealthChecker *weakSelf = self; NSString *notification = healthy ? MetabaseTaskBecameHealthyNotification : MetabaseTaskBecameUnhealthyNotification; [[NSNotificationCenter defaultCenter] postNotificationName:notification object:weakSelf]; diff --git a/OSX/Metabase/UI/MainMenu.xib b/OSX/Metabase/UI/MainMenu.xib index be3bd67104bd..df9d9d4e6518 100644 --- a/OSX/Metabase/UI/MainMenu.xib +++ b/OSX/Metabase/UI/MainMenu.xib @@ -1,8 +1,9 @@ - + - - + + + @@ -174,16 +175,15 @@ - - - - + + + - + - + @@ -211,7 +211,7 @@ - + diff --git a/OSX/Metabase/UI/MainViewController.m b/OSX/Metabase/UI/MainViewController.m index 14263b876daf..14ce608c24ec 100644 --- a/OSX/Metabase/UI/MainViewController.m +++ b/OSX/Metabase/UI/MainViewController.m @@ -38,6 +38,8 @@ @interface MainViewController () @property (nonatomic) BOOL loading; +@property (nonatomic, strong) NSString *launchRoute; ///< redirect to this URL on launch if set. Used for password reset to take you to reset password page. + @end @implementation MainViewController @@ -91,7 +93,14 @@ - (void)dealloc { - (void)taskBecameHealthy:(NSNotification *)notification { dispatch_async(dispatch_get_main_queue(), ^{ - [self loadMainPage]; + + if (self.launchRoute) { + [self navigateToRoute:self.launchRoute]; + self.launchRoute = nil; + } else { + [self loadMainPage]; + } + dispatch_async(dispatch_get_main_queue(), ^{ self.loading = NO; }); @@ -107,12 +116,17 @@ - (void)taskBecameUnhealthy:(NSNotification *)notification { #pragma mark - Local Methods +- (void)navigateToRoute:(nonnull NSString *)route { + NSString *urlString = [BaseURL() stringByAppendingString:route]; + NSLog(@"Connecting to Metabase instance, navigating to page: %@", urlString); + NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:urlString]]; + request.cachePolicy = NSURLCacheStorageAllowedInMemoryOnly; + [self.webView.mainFrame loadRequest:request]; + +} + - (void)loadMainPage { - NSLog(@"Connecting to Metabase instance at: %@", BaseURL()); - - NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:BaseURL()]]; - request.cachePolicy = NSURLCacheStorageAllowedInMemoryOnly; - [self.webView.mainFrame loadRequest:request]; + [self navigateToRoute:@"/"]; } - (void)downloadWithMethod:(NSString *)methodString url:(NSString *)urlString params:(NSDictionary *)paramsDict extensions:(NSArray *)extensions { @@ -251,12 +265,9 @@ - (IBAction)resetPassword:(id)sender { - (void)resetPasswordWindowController:(ResetPasswordWindowController *)resetPasswordWindowController didFinishWithResetToken:(NSString *)resetToken { self.resetPasswordWindowController = nil; - - NSString *passwordResetURLString = [NSString stringWithFormat:@"%@/auth/reset_password/%@", BaseURL(), resetToken]; - NSLog(@"Navigating to password reset URL '%@'...", passwordResetURLString); - - NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:[NSURL URLWithString:passwordResetURLString]]; - [self.webView.mainFrame loadRequest:request]; + + // now tell the app to reroute to the reset password page once Metabase relauches + self.launchRoute = [@"/auth/reset_password/" stringByAppendingString:resetToken]; } diff --git a/Procfile b/Procfile index 513dddcb3848..91d50767bbcd 100644 --- a/Procfile +++ b/Procfile @@ -1 +1 @@ -web: ./bin/start +web: HEROKU=true ./bin/start diff --git a/README.md b/README.md index 2925f60e9e50..292677ef4e6b 100644 --- a/README.md +++ b/README.md @@ -6,18 +6,17 @@ Metabase is the easy, open source way for everyone in your company to ask questi [![Latest Release](https://img.shields.io/github/release/metabase/metabase.svg?label=latest%20release)](https://github.com/metabase/metabase/releases) [![GitHub license](https://img.shields.io/badge/license-AGPL-05B8CC.svg)](https://raw.githubusercontent.com/metabase/metabase/master/LICENSE.txt) [![Circle CI](https://circleci.com/gh/metabase/metabase.svg?style=svg&circle-token=3ccf0aa841028af027f2ac9e8df17ce603e90ef9)](https://circleci.com/gh/metabase/metabase) -[![Leiningen Dependencies Status](https://jarkeeper.com/metabase/metabase/status.svg)](https://jarkeeper.com/metabase/metabase) -[![NPM Dependencies Status](https://david-dm.org/metabase/metabase.svg)](https://david-dm.org/metabase/metabase) +[![Gitter chat](https://badges.gitter.im/metabase/metabase.png)](https://gitter.im/metabase/metabase) # Features -- 5 minute [setup](http://www.metabase.com/docs/latest/setting-up-metabase) (We're not kidding) -- Let anyone on your team [ask questions](http://www.metabase.com/docs/latest/users-guide/04-asking-questions) without knowing SQL -- Rich beautiful [dashboards](http://www.metabase.com/docs/latest/users-guide/06-sharing-answers) with auto refresh and fullscreen +- 5 minute [setup](http://metabase.com/docs/latest/setting-up-metabase.html) (We're not kidding) +- Let anyone on your team [ask questions](http://metabase.com/docs/latest/users-guide/04-asking-questions.html) without knowing SQL +- Rich beautiful [dashboards](http://metabase.com/docs/latest/users-guide/06-sharing-answers.html) with auto refresh and fullscreen - SQL Mode for analysts and data pros -- Create canonical [segments and metrics](http://www.metabase.com/docs/latest/administration-guide/07-segments-and-metrics) for your team to use -- Send data to Slack or email on a schedule with [Pulses](http://www.metabase.com/docs/latest/users-guide/10-pulses) -- View data in Slack anytime with [MetaBot](http://www.metabase.com/docs/latest/users-guide/11-metabot) -- [Humanize data](http://www.metabase.com/docs/latest/administration-guide/03-metadata-editing) for your team by renaming, annotating and hiding fields +- Create canonical [segments and metrics](http://metabase.com/docs/latest/administration-guide/07-segments-and-metrics.html) for your team to use +- Send data to Slack or email on a schedule with [Pulses](http://metabase.com/docs/latest/users-guide/10-pulses.html) +- View data in Slack anytime with [MetaBot](http://metabase.com/docs/latest/users-guide/11-metabot.html) +- [Humanize data](http://metabase.com/docs/latest/administration-guide/03-metadata-editing.html) for your team by renaming, annotating and hiding fields For more information check out [metabase.com](http://www.metabase.com) @@ -97,6 +96,11 @@ To get started with a development installation of the Metabase, follow the instr Then take a look at our [Contribution Guide](docs/contributing.md) for information about our process and where you can fit in! +Talk to other contributors [in our Gitter room](https://gitter.im/metabase/metabase). + +# Internationalization +We want Metabase to be avaliable in as many languages as possible. See what translations are avaliable and help contribute to internationalization using our project [over at POEditor](https://poeditor.com/join/project/ynjQmwSsGh) + # Extending and Deep Integrations Metabase also allows you to hit our Query API directly from Javascript to integrate the simple analytics we provide with your own application or third party services to do things like: diff --git a/app.json b/app.json index c7ec46163075..c9d6bcaf1133 100644 --- a/app.json +++ b/app.json @@ -5,7 +5,8 @@ "business intelligence", "analytics", "dashboard", - "charting" + "charting", + "metabase" ], "website": "http://www.metabase.com/", "repository": "https://github.com/metabase/metabase", diff --git a/bin/aws-eb-docker/.ebextensions/01_metabase.config b/bin/aws-eb-docker/.ebextensions/01_metabase.config index eea4d292fd34..a961774d2939 100644 --- a/bin/aws-eb-docker/.ebextensions/01_metabase.config +++ b/bin/aws-eb-docker/.ebextensions/01_metabase.config @@ -15,13 +15,14 @@ container_commands: #command: true #ignoreErrors: false - 01_server-name: - command: ".ebextensions/metabase_config/metabase-setup.sh server_name" - test: test $NGINX_SERVER_NAME + # do server_https first to avoid overwriting other config changes + 01_server_https: + command: ".ebextensions/metabase_config/metabase-setup.sh server_https" ignoreErrors: true - 02_server_https: - command: ".ebextensions/metabase_config/metabase-setup.sh server_https" + 02_server_name: + command: ".ebextensions/metabase_config/metabase-setup.sh server_name" + test: test $NGINX_SERVER_NAME ignoreErrors: true 03_log_x_real_ip: diff --git a/bin/build b/bin/build index aa4388776d26..b16fc136b90b 100755 --- a/bin/build +++ b/bin/build @@ -20,15 +20,25 @@ frontend-deps() { } frontend() { + frontend-deps echo "Running 'webpack' with NODE_ENV=production assemble and minify frontend assets..." && NODE_ENV=production ./node_modules/.bin/webpack --bail } frontend-fast() { + frontend-deps echo "Running 'webpack' with NODE_ENV=development to assemble frontend assets..." && NODE_ENV=development ./node_modules/.bin/webpack --bail --devtool eval } +translations() { + echo "Running './bin/i18n/build-translation-resources' to build translation resources..." + if ! ./bin/i18n/build-translation-resources; then + echo "Building translation resources failed, please install 'gettext', or build without translations by running './bin/build no-translations'." + exit 1 + fi +} + sample-dataset() { if [ -f resources/sample-dataset.db.mv.db ]; then echo "Sample Dataset already generated." @@ -44,7 +54,11 @@ uberjar() { } all() { - version && frontend-deps && frontend && sample-dataset && uberjar + version && translations && frontend && sample-dataset && uberjar +} + +no-translations() { + version && frontend && sample-dataset && uberjar } # Default to running all but let someone specify one or more sub-tasks to run instead if desired diff --git a/bin/build-for-test b/bin/build-for-test new file mode 100755 index 000000000000..9192d4a74ddc --- /dev/null +++ b/bin/build-for-test @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +set -eu + +VERSION_PROPERTY_NAME="src_hash" + +source-hash() { + # hash all the files that might change a backend-only uberjar build (for integration tests) + ( + find src project.clj resources/sample-dataset.db.mv.db -type f -print0 | xargs -0 shasum ; + find resources -type f \( -iname \*.clj -o -iname \*.edn -o -iname \*.yaml -o -iname \*.properties \) -not -name "version.properties" -print0 | xargs -0 shasum ; + ) | shasum | awk '{ print $1 }' +} + +uberjar-hash() { + # java -jar target/uberjar/metabase.jar version | grep -oE 'source_hash [a-f0-9]+' | awk '{ print $2 }' + # pulling the version.properties directly from the jar is much faster + unzip -c target/uberjar/metabase.jar version.properties | grep "$VERSION_PROPERTY_NAME" | cut -f2 -d= +} + +check-uberjar-hash() { + expected_hash=$(source-hash) + actual_hash=$(uberjar-hash) + if [ "$expected_hash" == "$actual_hash" ]; then + return 0 + else + return 1 + fi +} + +build-uberjar-for-test() { + ./bin/build version + echo "$VERSION_PROPERTY_NAME=$(source-hash)" >> resources/version.properties + ./bin/build uberjar +} + +if [ ! -f "target/uberjar/metabase.jar" ] || ! check-uberjar-hash; then + echo "Building uberjar for testing" + build-uberjar-for-test +else + echo "Uberjar already up to date for testing" +fi diff --git a/bin/ci b/bin/ci deleted file mode 100755 index 7b47bf54b409..000000000000 --- a/bin/ci +++ /dev/null @@ -1,192 +0,0 @@ -#!/usr/bin/env bash - -# this ensures any failures along the way result in a CI failure -set -eu - -node-0() { - is_enabled "drivers" && export ENGINES="h2,mongo,mysql,bigquery" || export ENGINES="h2" - if is_engine_enabled "mongo"; then - run_step install-mongodb - fi - MB_MYSQL_TEST_USER=ubuntu run_step lein-test -} -node-1() { - is_enabled "drivers" && export ENGINES="h2,sqlserver,oracle" || export ENGINES="h2" - if is_engine_enabled "oracle"; then - run_step install-oracle - fi - MB_DB_TYPE=postgres MB_DB_DBNAME=circle_test MB_DB_PORT=5432 MB_DB_USER=ubuntu MB_DB_HOST=localhost \ - run_step lein-test -} -node-2() { - is_enabled "drivers" && export ENGINES="h2,postgres,sqlite,presto" || export ENGINES="h2" - if is_engine_enabled "crate"; then - run_step install-crate - fi - if is_engine_enabled "presto"; then - run_step install-presto - fi - MB_ENCRYPTION_SECRET_KEY='Orw0AAyzkO/kPTLJRxiyKoBHXa/d6ZcO+p+gpZO/wSQ=' MB_DB_TYPE=mysql MB_DB_DBNAME=circle_test MB_DB_PORT=3306 MB_DB_USER=ubuntu MB_DB_HOST=localhost \ - MB_PRESTO_TEST_HOST=localhost MB_PRESTO_TEST_PORT=8080 MB_POSTGRESQL_TEST_USER=ubuntu \ - run_step lein-test -} -node-3() { - is_enabled "drivers" && export ENGINES="h2,redshift,druid,vertica" || export ENGINES="h2" - if is_engine_enabled "vertica"; then - run_step install-vertica - fi - # this is redundant with node 0 unless one of the non-H2 driver tests is enabled - if [ ENGINES != "h2" ]; then - run_step lein-test - fi -} -node-4() { - run_step lein bikeshed - run_step lein docstring-checker - run_step ./bin/reflection-linter -} -node-5() { - run_step lein eastwood - run_step yarn run lint - run_step yarn run flow - run_step yarn run test-unit - run_step yarn run test-karma -} -node-6() { - run_step ./bin/build version sample-dataset uberjar - run_step yarn run test-integrated -} - - -install-crate() { - sudo add-apt-repository ppa:crate/stable -y - sudo apt-get update - sudo apt-get install -y crate - # ulimit setting refused Crate service to start on CircleCI container - so comment it - sudo sed -i '/MAX_LOCKED_MEMORY/s/^/#/' /etc/init/crate.conf - echo "psql.port: 5200" | sudo tee -a /etc/crate/crate.yml - sudo service crate restart -} - -install-mongodb() { - sudo apt-get purge mongodb-org* - sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 - echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.0.list - sudo apt-get update - sudo apt-get install -y mongodb-org - sudo service mongod restart -} - -install-oracle() { - wget --output-document=plugins/ojdbc7.jar $ORACLE_JDBC_JAR -} - -install-vertica() { - wget --output-document=plugins/vertica-jdbc-7.1.2-0.jar $VERTICA_JDBC_JAR - docker run --detach --publish 5433:5433 sumitchawla/vertica - sleep 60 -} - -install-presto() { - docker run --detach --publish 8080:8080 wiill/presto-mb-ci - sleep 10 -} - -lein-test() { - lein test -} - -if [ -z ${CIRCLE_BRANCH_REGEX+x} ]; then - CIRCLE_BRANCH_REGEX='^master|release-.+$' -fi - -is_enabled() { - (echo "$CIRCLE_BRANCH" | grep -qE "$CIRCLE_BRANCH_REGEX") || - [[ "$CIRCLE_COMMIT_MESSAGE" == *"[ci $1]"* ]] || - [[ "$CIRCLE_COMMIT_MESSAGE" == *"[ci all]"* ]] -} - -is_engine_enabled() { - [[ "$ENGINES" == *"$1"* ]] -} - -# print a summary on exit -status=0 -summary="" - -# records the time and exit code of each step -run_step() { - status=0 - start=$(date +%s) - # run in the background then `wait` so fail_fast can interrupt it - "$@" & - wait $! || status=$? - elapsed=$(expr $(date +%s) - $start || true) - summary="${summary}status=$status time=$elapsed command=$@\n" - return $status -} - -summary() { - # if last status was failure then fail the rest of the nodes - if [ $status != 0 ]; then - fail_fast - fi - echo -e "========================================" - echo -en "$summary" - echo -e "========================================" -} - -trap summary EXIT - -fail_fast() { - echo -e "========================================" - echo -e "Failing fast! Stopping other nodes..." - # Touch a file to differentiate between a local failure and a - # failure triggered by another node - touch '/tmp/local-fail' - # ssh to the other CircleCI nodes and send SIGUSR1 to tell them to exit early - for (( i = 0; i < $CIRCLE_NODE_TOTAL; i++ )); do - if [ $i != $CIRCLE_NODE_INDEX ]; then - ssh node$i 'touch /tmp/fail; pkill -SIGUSR1 -f "bash ./bin/ci"' 2> /dev/null || true - fi - done -} - -exit_early() { - echo -e "========================================" - echo -e "Exited early! Did not necesssarily pass!" - pkill -TERM -P $$ || true - exit 0 -} - -trap exit_early SIGUSR1 - -if [ -z ${CIRCLE_SHA1+x} ]; then - export CIRCLE_SHA1="$(git rev-parse HEAD)" -fi - -if [ -z ${CIRCLE_BRANCH+x} ]; then - export CIRCLE_BRANCH="$(git rev-parse --abbrev-ref HEAD)" -fi - -export CIRCLE_COMMIT_MESSAGE="$(git log --format=oneline -n 1 $CIRCLE_SHA1)" - -# This local-fail check is to guard against two nodes failing at the -# same time. Both nodes ssh to each node and drop /tmp/fail. Those -# failing nodes then get here and see and the other node has told it -# to exit early. This results in both nodes exiting early, and thus -# not failing, causing the build to succeed -if [[ -f "/tmp/fail" && ! -f "/tmp/local-fail" ]]; then - exit_early -fi - -if [ -z ${CIRCLE_NODE_INDEX+x} ]; then - # If CIRCLE_NODE_INDEX isn't set, read node numbers from the args - # Useful for testing locally. - for i in "$@"; do - node-$i - done -else - # Normal mode on CircleCI - node-$CIRCLE_NODE_INDEX -fi diff --git a/bin/colopocalypse b/bin/colopocalypse new file mode 100755 index 000000000000..a9517c8c99f8 --- /dev/null +++ b/bin/colopocalypse @@ -0,0 +1,398 @@ +#!./node_modules/.bin/babel-node + +const glob = require("glob"); +const fs = require("fs"); +const path = require("path"); +const Color = require("color"); +const colorDiff = require("color-diff"); +const _ = require("underscore"); +const j = require("jscodeshift"); + +const { replaceStrings } = require("./lib/codemod"); + +const POSTCSS_CONFIG = require("../postcss.config.js"); +const cssVariables = + POSTCSS_CONFIG.plugins["postcss-cssnext"].features.customProperties.variables; +// console.log(cssVariables); + +// these are a bit liberal regexes but that's probably ok +const COLOR_REGEX = /(?:#[a-fA-F0-9]{3}(?:[a-fA-F0-9]{3})?\b|(?:rgb|hsl)a?\(\s*\d+\s*(?:,\s*\d+(?:\.\d+)?%?\s*){2,3}\))/g; +const COLOR_REGEX_WITH_LINE = /(?:#[a-fA-F0-9]{3}(?:[a-fA-F0-9]{3})?\b|(?:rgb|hsl)a?\(\s*\d+\s*(?:,\s*\d+(?:\.\d+)?%?\s*){2,3}\)).*/g; + +const CSS_SIMPLE_VAR_REGEX = /^var\(([^)]+)\)$/; +const CSS_COLOR_VAR_REGEX = /^color\(var\(([^)]+)\) shade\(([^)]+)\)\)$/; +const CSS_VAR_REGEX = /var\([^)]+\)|color\(var\([^)]+\) shade\([^)]+\)\)/g; + +const FILE_GLOB = "frontend/src/**/*.{css,js,jsx}"; +const FILE_GLOB_IGNORE = [ + "**/metabase/lib/colors.js", + "**/metabase/css/core/colors.css", + "**/metabase/auth/components/AuthScene.jsx", + "**/metabase/icon_paths.js", + // // recast messes up these file and they don't have any colors so just ignore them: + // "**/metabase/query_builder/components/FieldList.jsx", + // "**/metabase/query_builder/components/filters/FilterPopover.jsx", + // "**/metabase/visualizations/components/TableInteractive.jsx", +]; + +const COLORS_CSS_PATH = "frontend/src/metabase/css/core/colors.css"; +const COLORS_JS_PATH = "frontend/src/metabase/lib/colors.js"; + +const varForName = name => `--color-${name}`; + +const colors = { + // themeable colors + + brand: "#509EE3", + + accent1: "#9CC177", + accent2: "#A989C5", + accent3: "#EF8C8C", + accent4: "#F9D45C", + + accent5: "#F1B556", + accent6: "#A6E7F3", + accent7: "#7172AD", + + // general purpose + + white: "#FFFFFF", + black: "#2E353B", + + // semantic colors + + success: "#84BB4C", + error: "#ED6E6E", + warning: "#F9CF48", + + "text-dark": "#2E353B", // "black" + "text-medium": "#93A1AB", + "text-light": "#DCE1E4", + "text-white": "#FFFFFF", // "white" + + "bg-black": "#2E353B", // "black" + "bg-dark": "#93A1AB", + "bg-medium": "#EDF2F5", + "bg-light": "#F9FBFC", + "bg-white": "#FFFFFF", // "white" + + shadow: "#F4F5F6", + border: "#D7DBDE", +}; + +function paletteForColors(colors) { + return Object.entries(colors).map(([name, colorValue]) => { + const color = Color(colorValue); + return { + name, + color, + R: color.red(), + G: color.green(), + B: color.blue(), + }; + }); +} + +const PRIMARY_AND_SECONDARY_NAMES = [ + "brand", + "accent1", + "accent2", + "accent3", + "accent4", +]; +const TEXT_COLOR_NAMES = [ + "text-dark", + "text-medium", + "text-light", + "text-white", +]; +const BACKGROUND_COLOR_NAMES = [ + "bg-black", + "bg-dark", + "bg-medium", + "bg-light", + "bg-white", +]; +const SEMANTIC_NAMES = ["success", "error", "warning"]; + +const PALETTE_FOREGROUND = paletteForColors( + _.pick( + colors, + ...TEXT_COLOR_NAMES, + ...PRIMARY_AND_SECONDARY_NAMES, + ...SEMANTIC_NAMES, + ), +); +const PALETTE_BACKGROUND = paletteForColors( + _.pick( + colors, + ...BACKGROUND_COLOR_NAMES, + ...PRIMARY_AND_SECONDARY_NAMES, + ...SEMANTIC_NAMES, + ), +); +const PALETTE_BORDER = paletteForColors( + _.pick(colors, "border", ...PRIMARY_AND_SECONDARY_NAMES), +); +const PALETTE_SHADOW = paletteForColors(_.pick(colors, "shadow")); + +// basically everything except border/shadow +const PALETTE_OTHER = paletteForColors( + _.pick( + colors, + ...TEXT_COLOR_NAMES, + ...BACKGROUND_COLOR_NAMES, + ...PRIMARY_AND_SECONDARY_NAMES, + ...SEMANTIC_NAMES, + ), +); + +function paletteForCSSProperty(property) { + if (property) { + if (property === "color" || /text|font/i.test(property)) { + return PALETTE_FOREGROUND; + } else if (/bg|background/i.test(property)) { + return PALETTE_BACKGROUND; + } else if (/border/i.test(property)) { + return PALETTE_BORDER; + } else if (/shadow/i.test(property)) { + return PALETTE_SHADOW; + } + } + if (property != undefined) { + console.log("unknown pallet for property", property); + } + return PALETTE_OTHER; +} + +function getBestCandidate(color, palette) { + const closest = colorDiff.closest( + { R: color.red(), G: color.green(), B: color.blue() }, + palette, + ); + let bestName = closest.name; + let bestColor = closest.color; + if (color.alpha() < 1) { + bestColor = bestColor.alpha(color.alpha()); + } + return [bestName, bestColor]; +} + +function toJSValue(newColorName, newColor) { + if (newColor.alpha() < 1) { + return newColor.string(); + } else { + return newColor.hex(); + } +} + +function toCSSValue(newColorName, newColor) { + if (newColor.alpha() < 1) { + return `color(var(${varForName(newColorName)}) alpha(-${Math.round( + 100 * (1 - newColor.alpha()), + )}%))`; + } else { + return `var(${varForName(newColorName)})`; + } +} + +function lineAtIndex(lines, index) { + let charIndex = 0; + for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { + charIndex += lines[lineIndex].length + 1; + if (charIndex >= index) { + return lines[lineIndex]; + } + } +} + +function lineUpToIndex(lines, index) { + let charIndex = 0; + for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) { + const lineStart = charIndex; + charIndex += lines[lineIndex].length + 1; + if (charIndex >= index) { + return lines[lineIndex].slice(0, index - lineStart); + } + } +} + +function cssPropertyAtIndex(lines, index) { + const line = lineAtIndex(lines, index); + const prefix = lineUpToIndex(lines, index); + if (line) { + const match = + // matches property names at the beginning of the line + line.match(/^\s*([a-zA-Z0-9-]+):/) || + // matches property names leading up to the rule value + prefix.match(/(^|[^a-zA-Z0-9-])([a-zA-Z0-9-]+)\s*:\s*"?$/); + if (match) { + return match[1].trim(); + } else { + console.warn("no property", line); + } + } else { + console.warn("no line at that index! this should not happen"); + } +} + +function replaceCSSColorValues(content) { + const lines = content.split("\n"); + return content.replace(COLOR_REGEX, (color, index) => { + const palette = paletteForCSSProperty(cssPropertyAtIndex(lines, index)); + const [newColorName, newColor] = getBestCandidate(Color(color), palette); + return toCSSValue(newColorName, newColor); + }); +} + +function replaceJSColorValues(content) { + if (COLOR_REGEX.test(content)) { + // console.log("processing"); + return replaceStrings(content, COLOR_REGEX, (value, propertyName) => { + const palette = paletteForCSSProperty(propertyName); + const [newColorName, newColor] = getBestCandidate(Color(value), palette); + // console.log(value, propertyName, "=>", newColorName); + // return j.identifier(newColorName.replace(/\W/g, "_")); + // return j.stringLiteral(toJSValue(newColorName, newColor)); + return j.memberExpression( + j.identifier("colors"), + /\W/.test(newColorName) + ? j.literal(newColorName) + : j.identifier(newColorName), + ); + }); + } else { + // console.log("skipping"); + return content; + } +} + +function replaceCSSColorVariables(content) { + const lines = content.split("\n"); + return content.replace(CSS_VAR_REGEX, (variable, index) => { + const match = variable.match(/^var\(--color-(.*)\)$/); + if (match && colors[match[1]]) { + // already references a color, don't change it + return variable; + } + const color = resolveCSSVariableColor(variable); + if (color) { + const palette = paletteForCSSProperty(cssPropertyAtIndex(lines, index)); + const [newColorName, newColor] = getBestCandidate(Color(color), palette); + return toCSSValue(newColorName, newColor); + } else { + return variable; + } + }); +} + +function resolveCSSVariableColor(value) { + try { + if (value) { + if (COLOR_REGEX.test(value)) { + return Color(value); + } + const colorVarMatch = value.match(CSS_COLOR_VAR_REGEX); + if (colorVarMatch) { + const color = resolveCSSVariableColor(cssVariables[colorVarMatch[1]]); + if (color) { + const shade = parseFloat(colorVarMatch[2]) / 100; + return Color(color).mix(Color("black"), shade); + } + } + const varMatch = value.match(CSS_SIMPLE_VAR_REGEX); + if (varMatch) { + const color = resolveCSSVariableColor(cssVariables[varMatch[1]]); + if (color) { + return color; + } + } + } + } catch (e) { + console.warn(e); + } + return null; +} + +function processFiles(files) { + for (const file of files) { + let content = fs.readFileSync(file, "utf-8"); + try { + if (/\.css/.test(file)) { + content = replaceCSSColorVariables(replaceCSSColorValues(content)); + } else if (/\.jsx?/.test(file)) { + let newContent = replaceJSColorValues(content); + if (newContent !== content && !/\/colors.js/.test(file)) { + newContent = ensureHasColorsImport(newContent); + } + content = newContent; + } else { + console.warn("unknown file type", file); + } + fs.writeFileSync(file, content); + } catch (e) { + console.log("failed to process", file, e); + } + } + + // do this last so we don't replace them + prependCSSVariablesBlock(); + prependJSVariablesBlock(); +} + +function ensureHasColorsImport(content) { + // TODO: implement + return content; +} + +function prependCSSVariablesBlock() { + const colorsVarsBlock = ` +/* NOTE: DO NOT ADD COLORS WITHOUT EXTREMELY GOOD REASON AND DESIGN REVIEW + * NOTE: KEEP SYNCRONIZED WITH COLORS.JS + */ +:root { +${Object.entries(colors) + .map(([name, color]) => ` ${varForName(name)}: ${color};`) + .join("\n")} +}\n\n`; + + const content = fs.readFileSync(COLORS_CSS_PATH, "utf-8"); + if (content.indexOf("NOTE: DO NOT ADD COLORS") < 0) { + fs.writeFileSync(COLORS_CSS_PATH, colorsVarsBlock + content); + } +} + +function prependJSVariablesBlock() { + // TODO: remove window.colors and inject `import colors from "metabase/lib/colors";` in each file where it's required + const colorsVarsBlock = ` +// NOTE: DO NOT ADD COLORS WITHOUT EXTREMELY GOOD REASON AND DESIGN REVIEW +// NOTE: KEEP SYNCRONIZED WITH COLORS.CSS +const colors = window.colors = ${JSON.stringify(colors, null, 2)}; +export default colors;\n\n`; + + const content = fs.readFileSync(COLORS_JS_PATH, "utf-8"); + if (content.indexOf("NOTE: DO NOT ADD COLORS") < 0) { + const anchor = "export const brand = "; + fs.writeFileSync( + COLORS_JS_PATH, + content.replace(anchor, colorsVarsBlock + anchor), + ); + } +} + +function run() { + const fileGlob = process.argv[2] || FILE_GLOB; + glob( + path.join(__dirname, "..", fileGlob), + { ignore: FILE_GLOB_IGNORE }, + (err, files) => { + if (err) { + console.error(err); + } else { + processFiles(files); + } + }, + ); +} + +run(); diff --git a/bin/docker/build_image.sh b/bin/docker/build_image.sh index 83cb7b729003..25007e8b6e5b 100755 --- a/bin/docker/build_image.sh +++ b/bin/docker/build_image.sh @@ -1,4 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash + +set -e BASEDIR=$(dirname $0) PROJECT_ROOT="$BASEDIR/../.." @@ -26,8 +28,8 @@ if [ "$4" == "--latest" ]; then LATEST="YES" fi -if [ "$PUBLISH" == "YES" ] && [ -z "$DOCKERHUB_EMAIL" -o -z "$DOCKERHUB_USERNAME" -o -z "$DOCKERHUB_PASSWORD" ]; then - echo "In order to publish an image to Dockerhub you must set \$DOCKERHUB_EMAIL, \$DOCKERHUB_USERNAME and \$DOCKERHUB_PASSWORD before running." +if [ "$PUBLISH" == "YES" ] && [ -z "$DOCKERHUB_USERNAME" -o -z "$DOCKERHUB_PASSWORD" ]; then + echo "In order to publish an image to Dockerhub you must set \$DOCKERHUB_USERNAME and \$DOCKERHUB_PASSWORD before running." exit 1 fi @@ -77,7 +79,7 @@ if [ "$PUBLISH" == "YES" ]; then echo "Publishing image ${DOCKER_IMAGE} to Dockerhub" # make sure that we are logged into dockerhub - docker login --email="${DOCKERHUB_EMAIL}" --username="${DOCKERHUB_USERNAME}" --password="${DOCKERHUB_PASSWORD}" + docker login --username="${DOCKERHUB_USERNAME}" --password="${DOCKERHUB_PASSWORD}" # push the built image to dockerhub docker push ${DOCKER_IMAGE} @@ -99,4 +101,3 @@ fi rm -f ${BASEDIR}/metabase.jar echo "Done" - diff --git a/bin/docker/run_metabase.sh b/bin/docker/run_metabase.sh index 8d719dab26f6..915c463a4d50 100755 --- a/bin/docker/run_metabase.sh +++ b/bin/docker/run_metabase.sh @@ -110,7 +110,11 @@ export MB_DB_FILE=$new_db_dir/$(basename $db_file) chown metabase:metabase $new_db_dir $new_db_dir/* 2>/dev/null # all that fussing makes this safe # Setup Java Options -JAVA_OPTS="${JAVA_OPTS} -Dlogfile.path=target/log -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -server" +JAVA_OPTS="${JAVA_OPTS} -XX:+IgnoreUnrecognizedVMOptions" +JAVA_OPTS="${JAVA_OPTS} -Dfile.encoding=UTF-8" +JAVA_OPTS="${JAVA_OPTS} -Dlogfile.path=target/log" +JAVA_OPTS="${JAVA_OPTS} -server" +JAVA_OPTS="${JAVA_OPTS} --add-modules=java.xml.bind" # needed for Java 9 (Oracle VM only) because java.xml.bind is no longer on SE classpath by default since it's EE if [ ! -z "$JAVA_TIMEZONE" ]; then JAVA_OPTS="${JAVA_OPTS} -Duser.timezone=${JAVA_TIMEZONE}" diff --git a/bin/i18n/build-translation-frontend-resource b/bin/i18n/build-translation-frontend-resource new file mode 100755 index 000000000000..6ac27035b3eb --- /dev/null +++ b/bin/i18n/build-translation-frontend-resource @@ -0,0 +1,72 @@ +#!/usr/bin/env node + +// This program compiles a ".po" translations file to a JSON version suitable for use on the frontend +// It removes strings that aren't used on the frontend, and other extraneous information like comments + +const fs = require("fs"); +const _ = require("underscore"); +const gParser = require("gettext-parser"); + +// NOTE: this function replace xgettext "{0}" style references with c-3po "${ 0 }" style references +function replaceReferences(str) { + return str.replace(/\{ *(\d+) *\}/g, "${ $1 }"); +} + +if (process.argv.length !== 4) { + console.log( + "USAGE: build-translation-frontend-resource input.po output.json" + ); + process.exit(1); +} + +const inputFile = process.argv[2]; +const outputFile = process.argv[3]; + +console.log(`Compiling ${inputFile} for frontend...`); + +const translationObject = gParser.po.parse(fs.readFileSync(inputFile, "utf-8")); + +// NOTE: unsure why the headers are duplicated in a translation for "", but we don't need it +delete translationObject.translations[""][""]; + +let fuzzyCount = 0; +let emptyCount = 0; +for (const id in translationObject.translations[""]) { + const translation = translationObject.translations[""][id]; + const { reference, flag } = translation.comments || {}; + // delete the translation, we'll add it back in if needed + delete translationObject.translations[""][id]; + if ( + // only include translations used on the frontend + !/(^|\n)frontend\//.test(reference) + ) { + continue; + } + // don't include empty translations + if (_.isEqual(translation.msgstr, [""])) { + emptyCount++; + continue; + } + // don't include fuzzy translations + if (flag === "fuzzy") { + fuzzyCount++; + continue; + } + // remove comments + delete translation.comments; + // delete msgid since it's redundant, we have to add it back in on the frontend though + delete translation.msgid; + // replace references in translations + translation.msgstr = translation.msgstr.map(str => replaceReferences(str)); + // replace references in msgid + translationObject.translations[""][replaceReferences(id)] = translation; +} + +if (emptyCount > 0) { + console.warn(`+ Warning: removed ${emptyCount} empty translations`); +} +if (fuzzyCount > 0) { + console.warn(`+ Warning: removed ${fuzzyCount} fuzzy translations`); +} + +fs.writeFileSync(outputFile, JSON.stringify(translationObject), "utf-8"); diff --git a/bin/i18n/build-translation-resources b/bin/i18n/build-translation-resources new file mode 100755 index 000000000000..a968e3169974 --- /dev/null +++ b/bin/i18n/build-translation-resources @@ -0,0 +1,48 @@ +#!/bin/sh + +set -eu + +# gettext installed via homebrew is "keg-only", add it to the PATH +if [ -d "/usr/local/opt/gettext/bin" ]; then + export PATH="/usr/local/opt/gettext/bin:$PATH" +fi + +POT_NAME="locales/metabase.pot" +LOCALES=$(find locales -type f -name "*.po" -exec basename {} .po \;) + +if [ -z "$LOCALES" ]; then + LOCALES_QUOTED="" +else + LOCALES_QUOTED=" $(echo "$LOCALES" | awk '{ printf "\"%s\" ", $0 }')" +fi + +FRONTEND_LANG_DIR="resources/frontend_client/app/locales" + +# backend +# NOTE: include "en" even though we don't have a .po file for it because it's the default? +cat << EOF > "resources/locales.clj" +{ + :locales #{"en"$LOCALES_QUOTED} + :packages ["metabase"] + :bundle "metabase.Messages" +} +EOF + +mkdir -p "$FRONTEND_LANG_DIR" + +for LOCALE in $LOCALES; do + LOCALE_FILE="locales/$LOCALE.po" + # frontend + # NOTE: just copy these for now, but eventially precompile from .po to .json + ./bin/i18n/build-translation-frontend-resource \ + "$LOCALE_FILE" \ + "$FRONTEND_LANG_DIR/$LOCALE.json" + + # backend + msgfmt \ + --java2 \ + -d "resources" \ + -r "metabase.Messages" \ + -l "$LOCALE" \ + "$LOCALE_FILE" +done diff --git a/bin/i18n/update-translation b/bin/i18n/update-translation new file mode 100755 index 000000000000..afa3da9a36af --- /dev/null +++ b/bin/i18n/update-translation @@ -0,0 +1,22 @@ +#!/bin/sh + +set -eu + +# gettext installed via homebrew is "keg-only", add it to the PATH +if [ -d "/usr/local/opt/gettext/bin" ]; then + export PATH="/usr/local/opt/gettext/bin:$PATH" +fi + +POT_NAME="locales/metabase.pot" +PO_NAME="locales/$1.po" + +if [ $# -lt 1 ]; then + echo "USAGE: update-translation en_US" + exit 1 +fi + +if [ -f "$PO_NAME" ]; then + exec msgmerge -U "$PO_NAME" "$POT_NAME" +else + exec msginit -i "$POT_NAME" -o "$PO_NAME" -l "$1" +fi diff --git a/bin/i18n/update-translation-template b/bin/i18n/update-translation-template new file mode 100755 index 000000000000..15eba26961d1 --- /dev/null +++ b/bin/i18n/update-translation-template @@ -0,0 +1,59 @@ +#!/bin/sh + +set -eu + +# gettext installed via homebrew is "keg-only", add it to the PATH +if [ -d "/usr/local/opt/gettext/bin" ]; then + export PATH="/usr/local/opt/gettext/bin:$PATH" +fi + +# check xgettext is installed +if ! command -v xgettext > /dev/null; then + echo 'Please install the "xgettext" command (e.x. `brew install gettext`)' + exit 1 +fi + +POT_NAME="locales/metabase.pot" +POT_BACKEND_NAME="locales/metabase-backend.pot" +POT_FRONTEND_NAME="locales/metabase-frontend.pot" + +mkdir -p "locales" + +# update frontend pot + +# NOTE: about twice as fast to call babel directly rather than a full webpack build +BABEL_ENV=extract ./node_modules/.bin/babel -q -x .js,.jsx -o /dev/null frontend/src +# BABEL_ENV=extract BABEL_DISABLE_CACHE=1 yarn run build + +# NOTE: replace c-3po's "${ 0 }" style references with xgettext "{0}" style references for consistency +sed -i".bak" -E 's/\$\{ *([0-9]+) *\}/{\1}/g' "$POT_FRONTEND_NAME" +rm "$POT_FRONTEND_NAME.bak" + +# update backend pot + +# xgettext before 0.19 does not understand --add-location=file. Even CentOS +# 7 ships with an older gettext. We will therefore generate full location +# info on those systems, and only file names where xgettext supports it +LOC_OPT=$(xgettext --add-location=file -f - /dev/null 2>&1 && echo --add-location=file || echo --add-location) + +find src -name "*.clj" | xgettext \ + --from-code=UTF-8 \ + --language=lisp \ + --copyright-holder='Metabase ' \ + --package-name="metabase" \ + --msgid-bugs-address="docs@metabase.com" \ + -k \ + -kmark:1 -ki18n/mark:1 \ + -ktrs:1 -ki18n/trs:1 \ + -ktru:1 -ki18n/tru:1 \ + -ktrun:1,2 -ki18n/trun:1,2 \ + -ktrsn:1,2 -ki18n/trsn:1,2 \ + $LOC_OPT \ + --add-comments --sort-by-file \ + -o $POT_BACKEND_NAME -f - + +sed -i".bak" 's/charset=CHARSET/charset=UTF-8/' "$POT_BACKEND_NAME" +rm "$POT_BACKEND_NAME.bak" + +# merge frontend and backend pots +msgcat "$POT_FRONTEND_NAME" "$POT_BACKEND_NAME" > "$POT_NAME" diff --git a/bin/i18n/update-translations b/bin/i18n/update-translations new file mode 100755 index 000000000000..85dfefe8e29f --- /dev/null +++ b/bin/i18n/update-translations @@ -0,0 +1,7 @@ +#!/bin/sh + +set -eu + +./bin/i18n/update-translation-template + +find locales -name "*.po" -exec sh -c './bin/i18n/update-translation $(basename {} .po)' \; diff --git a/bin/lib/codemod.js b/bin/lib/codemod.js new file mode 100644 index 000000000000..7ef4bfe676fd --- /dev/null +++ b/bin/lib/codemod.js @@ -0,0 +1,140 @@ +const j = require("jscodeshift"); + +function getPropertyName(path) { + const parent = path.parentPath.value; + if (parent.type === "Property") { + if (parent.key.type === "Identifier") { + return parent.key.name; + } else if (parent.key.type === "Literal") { + return parent.key.value; + } + } + if (parent.type === "JSXAttribute") { + return parent.name.name; + } +} + +function splitMatches(string, regex) { + const results = []; + let current = 0; + string.replace(regex, (match, index) => { + results.push(string.slice(current, index)); + results.push(match); + current = index + match.length; + }); + results.push(string.slice(current)); + return results; +} + +function extractMatches( + string, + regex, + replacer = str => j.stringLiteral(str), + quasis = [], + expressions = [], +) { + const components = splitMatches(string, regex); + for (let cIndex = 0; cIndex < components.length; cIndex++) { + if (cIndex % 2) { + expressions.push(replacer(components[cIndex])); + } else { + const quasi = j.templateElement( + { cooked: components[cIndex], raw: components[cIndex] }, + false, + ); + quasis.push(quasi); + } + } + return components.length > 1; +} + +function makeTemplate(quasis, expressions) { + if ( + quasis.length === 2 && + quasis[0].value.raw === "" && + quasis[1].value.raw === "" + ) { + return expressions[0]; + } else { + return j.templateLiteral(quasis, expressions); + } +} + +exports.replaceStrings = function replaceStrings(source, regex, replacer) { + const root = j(source, { parser: require("flow-parser") }); + root + .find(j.Literal) + .filter( + path => + // match only string literals + typeof path.value.value === "string" && + // don't match strings that are property keys + !( + path.parentPath.value.type && path.parentPath.value.key == path.value + ), + ) + .replaceWith(path => { + const stringValue = path.value.value; + const propertyName = getPropertyName(path); + + const quasis = []; + const expressions = []; + if ( + extractMatches( + stringValue, + regex, + str => replacer(str, propertyName), + quasis, + expressions, + ) + ) { + const value = makeTemplate(quasis, expressions); + // wrap non string literals in JSXExpressionContainer + if ( + path.parentPath.value.type === "JSXAttribute" && + (value.type !== "Literal" || typeof value.value !== "string") + ) { + return j.jsxExpressionContainer(value); + } else { + return value; + } + } else { + return path.value; + } + }); + root + .find(j.TemplateLiteral) + // .filter(path => typeof path.value.value.raw === "string") + .replaceWith(path => { + const propertyName = getPropertyName(path); + + let modified = false; + const quasis = []; + const expressions = []; + + for (let qIndex = 0; qIndex < path.value.quasis.length; qIndex++) { + const quasiValue = path.value.quasis[qIndex].value.raw; + if ( + extractMatches( + quasiValue, + regex, + str => replacer(str, propertyName), + quasis, + expressions, + ) + ) { + modified = true; + } + if (qIndex < path.value.expressions.length) { + expressions.push(path.value.expressions[qIndex]); + } + } + + if (modified) { + return makeTemplate(quasis, expressions); + } else { + return path.value; + } + }); + return root.toSource(); +}; diff --git a/bin/reflection-linter b/bin/reflection-linter index c661f4b91ef5..86adf6b88d32 100755 --- a/bin/reflection-linter +++ b/bin/reflection-linter @@ -1,13 +1,13 @@ -#!/usr/bin/env bash +#! /usr/bin/env bash -echo -e "\e[1;34mChecking for reflection warnings. This may take a few minutes, so sit tight...\e[0m" +printf "\e[1;34mChecking for reflection warnings. This may take a few minutes, so sit tight...\e[0m\n" -warnings=`lein check-reflection-warnings 2>&1 | grep Reflection | grep metabase | uniq` +warnings=`lein with-profile +ci check-reflection-warnings 2>&1 | grep Reflection | grep metabase | sort | uniq` if [ ! -z "$warnings" ]; then - echo -e "\e[1;31mYour code has cased introduced some reflection warnings.\e[0m 😞" + printf "\e[1;31mYour code has cased introduced some reflection warnings.\e[0m 😞\n" echo "$warnings"; exit -1; fi -echo -e "\e[1;32mNo reflection warnings! Success.\e[0m" +printf "\e[1;32mNo reflection warnings! Success.\e[0m\n" diff --git a/bin/start b/bin/start index 8301a047b22a..4ea1d92aeba6 100755 --- a/bin/start +++ b/bin/start @@ -82,4 +82,43 @@ if [ ! -z "$RDS_HOSTNAME" ]; then export MB_DB_PORT=$RDS_PORT fi -exec java -Dfile.encoding=UTF-8 $JAVA_OPTS -jar ./target/uberjar/metabase.jar +# Determine whether we're on Heroku on a free, hobby, 1x dyno or 2x dyno +# +# We set $HEROKU in the Procfile, so we know we're on Heroku when started from the +# Procfile. +# +# We need to override the $JAVA_OPTS and give it a slightly lower memory limit +# because Heroku tends to think we can use more memory than we actually can. + +if [ -n "$HEROKU" ]; then + echo " -> Heroku detected" + if [ `ulimit -u` = 256 ]; then + # free, hobby or 1x dyno, it defaults to giving us 300m but that still ends + # up going over the 512MB limit for the dyno. + echo " => 1x dyno" + JAVA_OPTS="$JAVA_OPTS -Xmx248m" # This seems to be the right amount that prevents the dyno from going over the quota + fi + if [ `ulimit -u` = 512 ]; then + # 2x dyno, it defaults to giving us 800m but that still ends + # up going over the 1024MB limit for the dyno. + echo " => 2x dyno" + JAVA_OPTS="$JAVA_OPTS -Xmx496m" # This seems to be the right amount that prevents the dyno from going over the quota + fi + + # Set a few other additional options to minimize memory usage as well. + JAVA_OPTS="$JAVA_OPTS -XX:-UseGCOverheadLimit" # Disable limit to amount of time spent in GC. Better slow than not working at all + JAVA_OPTS="$JAVA_OPTS -XX:+UseConcMarkSweepGC" # ConcMarkSweepGC seems to cause less OOM issues in my testing on low-mem Heroku envs + JAVA_OPTS="$JAVA_OPTS -XX:+CMSClassUnloadingEnabled" # Not 100% sure this does anything in Java 8 but if it does, we want to enable it + JAVA_OPTS="$JAVA_OPTS -XX:+UseCompressedOops" # Use 32-bit pointers. Reduces memory usage and GC events + JAVA_OPTS="$JAVA_OPTS -XX:+UseCompressedClassPointers" # Same as above. See also http://blog.leneghan.com/2012/03/reducing-java-memory-usage-and-garbage.html +fi + +# Other Java options +JAVA_OPTS="$JAVA_OPTS -XX:+IgnoreUnrecognizedVMOptions" # Don't barf if we see an option we don't understand (e.g. Java 9 option on Java 7/8) +JAVA_OPTS="$JAVA_OPTS -Djava.awt.headless=true" # don't try to start AWT. Not sure this does anything but better safe than wasting memory +JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8" # Use UTF-8 +JAVA_OPTS="$JAVA_OPTS --add-modules=java.xml.bind" # Enable access to java.xml.bind module (Java 9) + +echo "Using these JAVA_OPTS: ${JAVA_OPTS}" + +exec java $JAVA_OPTS -jar ./target/uberjar/metabase.jar diff --git a/bin/version b/bin/version index 57e96205b733..d23c5f3eb292 100755 --- a/bin/version +++ b/bin/version @@ -1,6 +1,6 @@ #!/usr/bin/env bash -VERSION="v0.26.1" +VERSION="v0.31.0-snapshot" # dynamically pull more interesting stuff from latest git commit HASH=$(git show-ref --head --hash=7 head) # first 7 letters of hash should be enough; that's what GitHub uses diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 42dfbb60e542..000000000000 --- a/circle.yml +++ /dev/null @@ -1,44 +0,0 @@ -machine: - timezone: - America/Los_Angeles - java: - version: - openjdk7 - node: - version: 6.7.0 - services: - - docker -dependencies: - override: - - lein deps - - npm install -g 'yarn@>=0.16.0' - # Forces the Sauce Connect binary to be downloaded during dependencies phase so it's cached - - SAUCE_CONNECT_DOWNLOAD_ON_INSTALL=true yarn - - mkdir plugins - cache_directories: - - "~/.yarn" - - "~/.yarn-cache" -database: - post: - # MySQL doesn't load named timezone information automatically, you have to run this command to load it - # TODO - we only really need to do this step if we're testing against MySQL - - mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql -u ubuntu mysql -test: - override: - - ./bin/ci: - parallel: true -deployment: - master: - branch: master - commands: - - ./bin/deploy-webhook $DEPLOY_WEBHOOK -general: - artifacts: - - target/uberjar/metabase.jar - - screenshots -experimental: - notify: - branches: - only: - - master - - /release-.*/ diff --git a/docs/administration-guide/01-managing-databases.md b/docs/administration-guide/01-managing-databases.md index fd6a932cbcda..acea451c050a 100644 --- a/docs/administration-guide/01-managing-databases.md +++ b/docs/administration-guide/01-managing-databases.md @@ -1,10 +1,10 @@ ## Managing Databases -If you already connected your database during the installation, you’ve probably a covered a lot of this info. But if you need to add another database or manage the settings of the one you already have connected, just click the circle with your initials in the top right of Metabase and select the **Admin Panel**. +If you already connected your database during the installation, you’ve probably a covered a lot of this territory. But if you need to add another database or manage the settings of one you've already have connected, just click the settings icon in the top right of Metabase and select **Admin**. ![profiledropdown](images/ProfileDropdown.png) -Cool, now you’re in the admin panel. Next, select **Databases** from the menu bar at the top of the screen to see your databases. +Cool, now you’re in the administration section of Metabase. Next, select **Databases** from the menu bar at the top of the screen to see your databases. ### Adding a Database Connection @@ -24,6 +24,7 @@ Now you’ll see a list of your databases. To connect another database to Metaba * [Vertica](databases/vertica.md) * Presto * Google Analytics +* [SparkSQL](databases/spark.md) To add a database, you'll need its connection information. diff --git a/docs/administration-guide/02-setting-up-email.md b/docs/administration-guide/02-setting-up-email.md index 98bd2bbc926b..c3398b24a39c 100644 --- a/docs/administration-guide/02-setting-up-email.md +++ b/docs/administration-guide/02-setting-up-email.md @@ -45,5 +45,5 @@ You should see this form: --- -## Next: editing your metadata -Taking just a few minutes to edit and add info to your database’s metadata can greatly enhance your experience with Metabase. Let’s learn [how to edit your metadata](03-metadata-editing.md). +## Next: setting up Slack +If you want to use Slack to enhance the Metabase experience then lets do that now. Let’s learn [how to setup Slack](09-setting-up-slack.md). diff --git a/docs/administration-guide/03-metadata-editing.md b/docs/administration-guide/03-metadata-editing.md index ecda1bad02a5..bff10187eaf2 100644 --- a/docs/administration-guide/03-metadata-editing.md +++ b/docs/administration-guide/03-metadata-editing.md @@ -91,8 +91,15 @@ Another option is custom remapping, which is currently only possible for numeric ![Remapping form](./images/remapping/custom-mapping.png) +### Picking the filter UI for a field + +Metabase will automatically try to pick the best kind of filter interface for each field based on that field's type and the number of different values in it. Fields with only a few possible choices, like a `Gender` field, will display a dropdown list by default when filtering on them; fields with more than 100 possible selections will show a search box with autocomplete. + +If Metabase picked the wrong kind of filter UI for one of your fields, you can manually change it. You can choose from a drop down list, a search box, or just a plain input box: + +![Filter options](./images/filter-options.png) --- -## Next: managing users -Let’s learn how to add, remove, and edit users in the [managing users section](04-managing-users.md). +## Next: creating segments and metrics +Learn how to create canonical definitions of your commonly used [segments and metrics](07-segments-and-metrics.md). diff --git a/docs/administration-guide/04-managing-users.md b/docs/administration-guide/04-managing-users.md index 013985fe52d4..ee0f2e8e0fbc 100644 --- a/docs/administration-guide/04-managing-users.md +++ b/docs/administration-guide/04-managing-users.md @@ -1,6 +1,6 @@ ## Managing User Accounts -To start managing users, first go to the **Admin Panel** by clicking on the dropdown menu in the top right of Metabase and selecting Admin Panel. +To start managing users, first go to the **Metabase Admin** section by clicking on the dropdown menu in the top right of Metabase and selecting Admin. ![Profile dropdown](images/ProfileDropdown.png) @@ -13,11 +13,13 @@ To add a new user account, click **Add person** in the upper right corner. You If you’ve already [configured Metabase to use email](02-setting-up-email.md), Metabase will send the new user an invite email. Otherwise, it’ll give you a temporary password that you’ll have to send to the person you’re inviting by hand. -### Removing a user -To delete a user's account, click on the three dots icon on the right of a user’s row and select **Remove** from the dropdown. Deleting an account will mark it as inactive and prevent it from being used in the future - but it *won’t* delete that user's saved questions or dashboards. +### Deactivating a user +To deactivate a user's account, click on the three dots icon on the right of a user’s row and select **Deactivate** from the dropdown. Deactivating an account will mark it as inactive and prevent the user from logging in - but it *won’t* delete that user's saved questions or dashboards. ![Remove a user](images/RemoveUser.png) +To reactivate a deactivated user, click on the Deactivated tab at the top of the list of your users to see the list of inactive users. Click on the icon on the far right to reactivate that user, allowing them to log in to Metabase again. + ### Editing a user You can edit a user’s name and email address by clicking the three dots icon and choosing **Edit Details**. Note: be careful when changing a user’s email address, because *this will change the address they’ll use to log in to Metabase*. @@ -25,11 +27,16 @@ You can edit a user’s name and email address by clicking the three dots icon a A user can always reset their password using the forgot password link on the login screen, but if you want to do this for them, just click the three dots icon and choose Reset Password. If you haven’t configured your email settings yet, you’ll be given a temporary password that you’ll have to share with that user. Otherwise, they’ll receive a password reset email. ### Changing a user’s role -Right now, the only role a user can have is either User or Admin. The only difference is that Admins can access the Admin Panel and make changes there. +Right now, the only special role a user can have is Admin. The only difference is that Admins can access the Admin Panel and make changes there, and can set [permissions on collections](06-collections.md). + +To make a user an admin, click on the Groups dropdown and click the check mark next to the Administrators group. + +### Adding users to Groups +Adding users to groups allow you to assign [data access](05-setting-permissions.md) and [collection permissions](06-collections.md) to them. The next two articles in this guide will teach you how to set those permissions, but to add users to one or more groups, just click the Groups dropdown and click the checkboxes next to the group(s) you want to add the user to. -To change a user’s role, just click on it to open a dropdown and make your selection. +Check out this article for more on [creating and managing user groups](05-setting-permissions.md). --- -## Next: setting data permissions -Find out how to create user groups and define what data they can access in the [next section](05-setting-permissions.md). +## Next: Single Sign-On +Learn how to [configure Single Sign-On](10-single-sign-on.md) to let users sign in or sign up with just a click. diff --git a/docs/administration-guide/05-setting-permissions.md b/docs/administration-guide/05-setting-permissions.md index 82662c684e25..ad54d5234401 100644 --- a/docs/administration-guide/05-setting-permissions.md +++ b/docs/administration-guide/05-setting-permissions.md @@ -8,6 +8,8 @@ Metabase uses a group-based approach to set permissions and restrictions on your A user can be a member of multiple groups, and if one of the groups they’re in has access to a particular database or table, but another group they’re a member of does not, then they **will** have access to that database. +In addition to setting permissions on your databases and tables, you can also [set access permissions on the collections](06-collections.md) where your dashboards, questions, and pulses are saved. Collection permissions are not set from the Admin section; set them by clicking on the edit/pencil icon in the top-right of the screen when viewing a collection. + ### Groups To view and manage your groups, go to the Admin Panel, click on the People section, and then click on Groups from the side menu. @@ -20,7 +22,10 @@ You’ll notice that you already have two default groups: Administrators and All You’ll also see that you’re a member of the **Administrators** group — that’s why you were able to go to the Admin Panel in the first place. So, to make someone an admin of Metabase you just need to add them to this group. Metabase admins can log into the Admin Panel and make changes there, and they always have unrestricted access to all data that you have in your Metabase instance. So be careful who you add to the Administrator group! -The **All Users** group is another special one. Every Metabase user is always a member of this group, though they can also be a member of as many other groups as you want. We recommend using the All Users group as a way to set default access levels for new Metabase users. If you have [Google single sign-on](10-single-sign-on.md) enabled, new users who join that way will be automatically added to the All Users group. (**Important note:** as we mentioned above, a user is given the *most permissive* setting she has for a given database/schema/table across *all* groups she is in. Because of that, it is important that your All Users group should never have *greater* access for an item than a group for which you're trying to restrict access — otherwise the more permissive setting will win out.) +The **All Users** group is another special one. Every Metabase user is always a member of this group, though they can also be a member of as many other groups as you want. We recommend using the All Users group as a way to set default access levels for new Metabase users. If you have [Google single sign-on](10-single-sign-on.md) enabled, new users who join that way will be automatically added to the All Users group. + +#### An important note on the All Users group +As we mentioned above, a user is given the *most permissive* setting she has for a given database/schema/table across *all* groups she is in. Because of that, it is important that your All Users group should never have *greater* access for an item than a group for which you're trying to restrict access — otherwise the more permissive setting will win out. This goes for both data access as well as [collection permission](06-collections.md) settings. If you’ve set up the [Slack integration](09-setting-up-slack.md) and enabled [Metabot](../users-guide/11-metabot.md), you’ll also see a special **Metabot** group, which will allow you to restrict which questions your users will be able to access in Slack via Metabot. @@ -77,4 +82,4 @@ Pulses act a bit differently with regard to permissions. When a user creates a n --- ## Next: custom segments and metrics -Learn how to create collections of questions to organize things and decide who gets to see what in the [next section](06-collections.md). +Metabase lets you create and set permissions on collections of dashboards and questions. [Learn how](06-collections.md). diff --git a/docs/administration-guide/06-collections.md b/docs/administration-guide/06-collections.md index f7e504ca7974..f9f92c831902 100644 --- a/docs/administration-guide/06-collections.md +++ b/docs/administration-guide/06-collections.md @@ -1,44 +1,63 @@ ## Creating Collections for Your Saved Questions --- -Collections are a great way to organize your saved questions and decide who gets to see and edit things. Collections could be things like, "Important Metrics," "Marketing KPIs," or "Questions about users." Multiple [user groups](05-setting-permissions.md) can be given access to the same collections, so we don't necessarily recommend naming collections after user groups. +![Collection detail](images/collections/collection-detail.png) + +Collections are a great way to organize your dashboards, saved questions, and pulses, and to decide who gets to see and edit things. Collections could be things like, "Important Metrics," "Product Team," "Marketing KPIs," or "Questions about users." Collections can even contain other collections, allowing you to create an organizational structure that fits your team. You can also choose which user groups should have what level of access to your collections (more on that below). + +Metabase starts out with a default top-level collection which is called "Our analytics," which every other collection is saved inside of. This page will teach you how to create and manage your collections. For more information on organizing saved questions and using collections, [check out this section of the User's Guide](../users-guide/06-sharing-answers.md). ### Creating and editing collections -Only administrators of Metabase can create and edit collections. From the Questions section of Metabase, click on the `Create a collection` button. Give your collection a name, choose a color for it, and give it a description if you'd like. +If a user has Curate access for a collection, they can create new sub-collections inside it and edit the contents of the collection. From the detail view of any collection, click on the `Create a collection` button to make a new one. Give your collection a name, choose where it should live, and give it a description if you'd like. + +![Create collection](images/collections/create-collection.png) + +By default, new collections will have the same permissions settings as the collection it was created in (its "parent" collection), but you can change those settings from the Edit menu. + +### Pinning things in collections +![Pins](images/collections/pinned-items.png) + +One great feature in Metabase is that you can pin the most important couple of items in each of your collections to the top. Pinning an item in a collection turns it into a big, eye-catching card that will help make sure that folks who are browsing your Metabase instance will always know what's most important. -![Permissions empty state](images/collections/collections-empty-state.png) +Any user with curate permissions for a collection can pin items in it, making it easy to delegate curation responsibilities to other members of your team. To pin something, you can either click and drag it to the top of the page, or click on its menu and choose the pin action. (Note that collections themselves can't be pinned.) ### Setting permissions for collections -Collection permissions are similar to data permissions. Rather than going to the Admin Panel, you set permissions on collections by clicking on the lock icon in the top-right of the Questions screen or the top-right of a collection screen. +Collection permissions are similar to [data access permissions](05-setting-permissions.md). Rather than going to the Admin Panel, you set permissions on collections by clicking on the sharing icon in the top-right of the screen while viewing the collection and clicking on `Edit permissions`. Only Administrators can edit collection permissions. Each [user group](05-setting-permissions.md) can have either View, Curate, or No access to a collection: -![Permissions grid](images/collections/permission-grid.png) +- **View access:** the user can see all the questions, dashboards, and pulses in the collection. If the user does not have permission to view some or all of the questions included in a given dashboard or pulse then those questions will not be visible to them; but any questions that are saved in this collection *will* be visible to them, *even if the user doesn't have access to the underlying data used to in the question.** +- **Curate access:** the user can edit, move, and archive items saved in this collection, and can save or move new items into it. They can also create new sub-collections within this collection. In order to archive a sub-collection within this collection, they'll need to have Curate access for it and any and all collections within it. +- **No access:** the user won't see this collection listed, and doesn't have access to any of the items saved within it. -You'll see a table with your user groups along the top and all your collections down along the left. A user group can have View access, Curate access, or no access to a given collection. +![Permissions](images/collections/collection-permissions.png) -- View access: can see all the questions in the collection, **even if the user doesn't have access to the underlying data used to create the question.** -- Curate access: can additionally move questions in or out of the collection, and edit the questions in the collection. -- No access: won't see the collection listed on the Questions page, and can't see questions from this collection in dashboards or when creating a Pulse. +If you want to see the bigger picture of what permissions your user groups have for all your collections, just click the link that says `See all collection permissions`. You'll see a table with your user groups along the top and all your collections down along the left. Click the `View collections` link under any collection that contains more collections to zoom in and see its contents: -Just like with data access permissions, collection permissions are *additive*, meaning that if a user belongs to more than one group, if one of their groups has a more restrictive setting for a collection than another one of their groups, they'll be given the *more permissive* setting. This is especially important to remember when dealing with the All Users group: since all users are members of this group, if you give the All Users group Curate access to a collection, then *all* users will be given that access for that collection, even if they also belong to a group with *less* access than that. +![Full permissions grid](images/collections/permission-grid.png) -### The "Everything Else" section -If a question isn't saved within a collection, it will be placed in the Everything Else section of the main Questions page. **All your Metabase users can see questions in this section**, provided they have data access permission. +Just like with data access permissions, collection permissions are *additive*, meaning that if a user belongs to more than one group, if one of their groups has a more restrictive setting for a collection than another one of their groups, they'll be given the *more permissive* setting. This is especially important to remember when dealing with the All Users group: since all users are members of this group, if you give the All Users group Curate access to a collection, then *all* users will be given Curate access for that collection, even if they also belong to a group with *less* access than that. -### Archiving collections -You can archive collections similarly to how you can archive questions. Click the archive icon in the top-right of the collection screen to archive it. This will also archive all questions in the collection, and importantly it will also remove all of those questions from all dashboards and Pulses that use those questions. So be careful! +### Permissions and sub-collections +One nuance with how collections permissions work has to do with sub-collections. A user group can be given access to a collection located somewhere within one more more sub-collections *without* having to have access to every collection "above" it. E.g., if a user group had access to the "Super Secret Collection" that's saved several layers deep within a "Marketing" collection that the group does *not* have access to, the "Super Secret Collection" would show up at the top-most level that the group *does* have access to. + +### Personal collections +![Personal collections](images/collections/personal-collections.png) + +Each user has a personal collection where they're always allowed to save things, even if they don't have Curate permissions for any other collections. Administrators can see and edit the contents of every user's personal collection (even those belonging to other Administrators) by clicking on the "All personal collections" link from the "Our analytics" collection. -To restore a collection and its contents, click the `View Archive` icon in the top-right of the main Questions screen to see the archive, then hover over an item to reveal the `Unarchive` icon on the far right of the item. Questions within archived collections are not individually listed in the archive, so if you want to unarchive a specific question from an archived collection, you have to unarchive that whole collection. +A personal collection works just like any other collection except that its permissions can't be changed. If a sub-collection within a personal collection is moved to a different collection, it will inherit the permissions of that collection. -### What about labels? -Older versions of Metabase provided labels as a way to organize and filter saved questions. If you were already using labels, you'll still be able to edit and use them for now from the Labels dropdown on lists of saved questions. However, **labels will be removed from Metabase in an upcoming version.** If your instance of Metabase was not using labels previously, you won't see the label tools at all anymore. +![Personal collection detail](images/collections/personal-collection-detail.png) + +### Archiving collections +Users with curate permission for a collection can archive collections. Click the edit icon in the top-right of the collection screen and select `Archive this collection` to archive it. This will also archive all questions, dashboards, pulses, and all other sub-collections and their contents. Importantly, this will also remove any archived questions from all dashboards and Pulses that use them. -What should you do if you want to prepare for the impending removal of labels? We recommend creating collections that match your most important labels, and moving the matching labeled questions into those collections. +**Note:** the "Our analytics" collection and personal collections can't be archived. -If you don't want to remove all the labels from your questions yet, we recommend at least ensuring that none of your questions have more than a single label. That way, if in the future we provide a migration tool that converts labels to collections automatically, there won't be any ambiguity with your labels. +You can always *unarchive* things by clicking on the More menu from a collection and selecting `View the archive`, then clicking the un-archive button next to an archived item. Questions within archived collections are not individually listed in the archive, so if you want to unarchive a specific question from an archived collection, you have to unarchive that whole collection. --- -## Next: custom segments and metrics -Learn how to define custom segments and commonly referenced metrics in the [next section](07-segments-and-metrics.md). +## Next: data sandboxing +To set even more advanced data permissions based on user attributes, check out [data sandboxes](17-data-sandboxes.md). diff --git a/docs/administration-guide/07-segments-and-metrics.md b/docs/administration-guide/07-segments-and-metrics.md index 312c5ed34d05..7a9a78176b04 100644 --- a/docs/administration-guide/07-segments-and-metrics.md +++ b/docs/administration-guide/07-segments-and-metrics.md @@ -27,7 +27,7 @@ A custom metric is an easy way to refer to a computed number that you reference So, you create a custom metric in a very similar way to how you create segments: start by clicking on the **Add a Metric** link from a table’s detail view in the Admin Panel. ![Add metric](images/AddMetric.png) -Here your presented with a slightly different version of the query builder, which only lets you select filters and aggregations. Filters are optional: a metric only requires an aggregation on a field. Note that you can use segments in the definition of metrics — pretty cool, right? Go ahead and select your filters, if any, and choose your aggregation. Give your metric a name and a description, and click **Save changes** when you’re done. Just like with segments, you can use the **Preview** button to see how your metric looks in the query builder before you save it. +Here you're presented with a slightly different version of the query builder, which only lets you select filters and aggregations. Filters are optional: a metric only requires an aggregation on a field. Note that you can use segments in the definition of metrics — pretty cool, right? Go ahead and select your filters, if any, and choose your aggregation. Give your metric a name and a description, and click **Save changes** when you’re done. Just like with segments, you can use the **Preview** button to see how your metric looks in the query builder before you save it. ![abc](images/CreateMetric.png) Your new metric will now be available from the View dropdown in the query builder, under **Common Metrics**. @@ -46,5 +46,5 @@ Lastly, you can also view the revision history for each segment and metric from --- -## Next: configuring Metabase -There are a few other settings you configure in Metabase. [Learn how](08-configuration-settings.md). +## Next: managing user accounts +Let's learn how to [create and manage accounts for your users](04-managing-users.md). diff --git a/docs/administration-guide/08-configuration-settings.md b/docs/administration-guide/08-configuration-settings.md index 363b52dbf94a..0bad0e6362fb 100644 --- a/docs/administration-guide/08-configuration-settings.md +++ b/docs/administration-guide/08-configuration-settings.md @@ -9,14 +9,19 @@ How you’d like to refer to this instance of Metabase. The base URL of this Metabase instance. The base URL is used in emails to allow users to click through to their specific instance. Make sure to include http:// or https:// to make sure it’s reachable. ### Report Timezone -The **report timezone** sets the default time zone for displaying times. The timezone is used when breaking out data by dates. +The **report timezone** sets the default time zone for displaying times. The timezone is used when breaking out data by dates. -*Setting the default timezone will not change the timezone of any data in your database*. If the underlying times in your database aren't assigned to a timezone, Metabase will use the report timezone as the default timezone. +*Setting the default timezone will not change the timezone of any data in your database*. If the underlying times in your database aren't assigned to a timezone, Metabase will use the report timezone as the default timezone. ### Anonymous Tracking This option turns determines whether or not you allow anonymous data about your usage of Metabase to be sent back to us to help us improve the product. *Your database’s data is never tracked or sent*. +### Friendly Table and Field Names +By default, Metabase attempts to make field and table names more readable by changing things like `somehorriblename` to `Some Horrible Name`. This does not work well for languages other than English, or for fields that have lots of abbreviations or codes in them. If you'd like to turn this setting off, you can do so from the Admin Panel under Settings > General > Friendly Table and Field Names. + +To manually fix field or table names if they still look wrong, you can go to the Metadata section of the Admin Panel, select the database that contains the table or field you want to edit, select the table, and then edit the name(s) in the input boxes that appear. + --- -## Next: Setting up Slack -If you want to use Slack to enhance the Metabase experience then lets do that now. Let’s learn [how to setup Slack](09-setting-up-slack.md). +## Next: caching query results +Metabase makes it easy to [automatically cache results](14-caching.md) for queries that take a long time to run. diff --git a/docs/administration-guide/09-setting-up-slack.md b/docs/administration-guide/09-setting-up-slack.md index 023a0e36436a..1127a2893b8c 100644 --- a/docs/administration-guide/09-setting-up-slack.md +++ b/docs/administration-guide/09-setting-up-slack.md @@ -15,13 +15,13 @@ You should see this form: Then just click on the large and conveniently placed button `Create a Slack Bot User for Metabot` which will open a new browser tab and send you over to Slack to create the Bot user account. -Click over to the tab that was opened and you'll now be on the Slack Bot creation page. +Click over to the tab that was opened and you'll now be on the Slack Bot creation page. ![Slack API Auth](images/SlackAPIAuth.png) Now give the Bot user a helpful name (we suggest `Metabot`) and click the `Add bot integration` button and a bot user will be generated for you. Look for the Bot's API token in the next page. It will look like `xoxp-etc-etc-etc` and all you need to do is copy that value and head back to Metabase. -Paste the value into the text box for `Slack API Token` and click the button to save your changes. +Paste the value into the text box for `Slack API Token` and click the button to save your changes. Now go to Slack and create a new channel named `metabase_files`. Due to the Slack api, we'll need this to attach graphs to pulses and Metabot answers. @@ -29,5 +29,5 @@ That's it! Metabase will automatically run a quick test to check that the API t --- -## Next: Single Sign-On -Learn how to [configure Single Sign-On](10-single-sign-on.md) to let users sign in or sign up with just a click. +## Next: configuring Metabase +There are a few other settings you configure in Metabase. [Learn how](08-configuration-settings.md). diff --git a/docs/administration-guide/10-single-sign-on.md b/docs/administration-guide/10-single-sign-on.md index 142030502425..43f3920a78fe 100644 --- a/docs/administration-guide/10-single-sign-on.md +++ b/docs/administration-guide/10-single-sign-on.md @@ -4,6 +4,8 @@ Enabling Google Sign-In or LDAP lets your team log in with a click instead of us ![Authentication](./images/authentication.png) +If you'd like to have your users authenticate with SAML, we offer a paid feature that lets you do just that. [Learn more about authenticating with SAML](16-authenticating-with-saml.md) + As time goes on we may add other auth providers. If you have a service you’d like to see work with Metabase please let us know by [filing an issue](http://github.com/metabase/metabase/issues/new). ### Enabling Google Sign-In @@ -37,13 +39,13 @@ Click the `Configure` button in the LDAP section of the Authentication page, and Click the toggle at the top of the form to enable LDAP, then fill in the form with the information about your LDAP server. -Metabase will pull out three main attributes from your LDAP directory - email (defaulting to the `mail` attribute), first name (defaulting to the `givenName` attribute) and last name (defaulting to the `sn` attribute). If your LDAP setup uses other attributes for these, you can edit this under the "Attributes" portion of the form. +Metabase will pull out three main attributes from your LDAP directory - email (defaulting to the `mail` attribute), first name (defaulting to the `givenName` attribute) and last name (defaulting to the `sn` attribute). If your LDAP setup uses other attributes for these, you can edit this under the "Attributes" portion of the form. ![Attributes](./images/ldap-attributes.png) -If you have user groups in Metabase you are using to control access, it is often tedious to have to manually assign a user to a group after they're logged in via SSO. You can take advantage of the groups your LDAP directory uses by enabling Group Mappings, and specifying which LDAP group corresponds to which user group on your Metabase server. +If you have user groups in Metabase you are using to control access, it is often tedious to have to manually assign a user to a group after they're logged in via SSO. You can take advantage of the groups your LDAP directory uses by enabling Group Mappings, and specifying which LDAP group corresponds to which user group on your Metabase server. --- -## Next: Creating a Getting Started Guide -Learn how to easily [make a Getting Started Guide](11-getting-started-guide.md) for your team. +## Next: Authenticating with SAML +If you use a SAML-based identity provider for SSO, [learn how to connect it to Metabase](16-authenticating-with-saml.md). \ No newline at end of file diff --git a/docs/administration-guide/11-getting-started-guide.md b/docs/administration-guide/11-getting-started-guide.md index 1ed5c5fd879a..e4cbfcf74d2a 100644 --- a/docs/administration-guide/11-getting-started-guide.md +++ b/docs/administration-guide/11-getting-started-guide.md @@ -1,23 +1,3 @@ -## Creating a Getting Started Guide +## Getting Started Guide -In most places we’ve worked, there’s typically an email or a Google doc that that gets forwarded around to new hires that describes how to use the analytics systems available. Some more sophisticated setups use an internal wiki or other website that has an inventory of what’s available. But we believe that the best way to keep information like this current is to have it be documented in the application itself. Metabase now lets you create a cheatsheet to help new users know which dashboards, metrics, and reports are the most important. It also provides a place to document caveats for use, advice on who to contact for help, and more. - -To get started, click on the `Guide` link in the main nav. You'll see a screen like this: - -![Blank guide](images/gsg/blank-guide.png) - -Before you've even created your guide, this page gives you some links that you can use to explore the data you have in Metabase. But for now, click the button to begin making your guide. Now you'll see a list of sections that you can include in your guide: - -![Sections](images/gsg/sections.png) - -You can highlight your company's most important dashboard, [metrics](07-segments-and-metrics.md) that you commonly refer to (and the dimensions by which they're most often grouped), and tables and [segments](07-segments-and-metrics.md) that are useful or interesting. There's also a place to write a little bit more about "gotchas" or caveats with your data that your users should know about before they start exploring things and drawing conclusions. Lastly, you can optionally include an email address for your users to contact in case they're still confused about things. - -If you click on a section, it'll expand and let you select the items that you want to include in that section: - -![Picking items](images/gsg/pick-items.png) - -To remove an item you've added, just click the X icon in the top-right of the item. When you're all done adding things to your guide, click the `Save` button in the blue bar at the top of the screen. To make edits to your guide, simply click the `Edit` link in the top-right of the guide. Once you click `Save`, you'll see your brand new Getting Started Guide! - -![Finished Guide](images/gsg/finished-guide.png) - -Clicking on the title of an item you've included in your guide will take you to the item itself. Clicking on `Learn more` will take you to a more detailed entry about that item so you can explore it and related items in more detail. +The Getting Started Guide was removed from Metabase in version 0.30. Instead, a great way of helping your teammates find their way around Metabase is by pinning the most important dashboards or questions in each of your collections to make it clear what's most important. diff --git a/docs/administration-guide/12-public-links.md b/docs/administration-guide/12-public-links.md index eca21a5d07e1..490b29fb5f70 100644 --- a/docs/administration-guide/12-public-links.md +++ b/docs/administration-guide/12-public-links.md @@ -12,4 +12,7 @@ Next, exit the Admin Panel and go to the dashboard or question that you want to ### Copy, paste, and share! Now just copy and share the public link URL with whomever you please. If you want to embed your dashboard or question in a simple web page or blog post, then copy and paste the iframe snippet to your destination of choice. +--- + +## Next: embedding dashboards and charts in other applications If you're trying to do more complex, integrated embedding in your own web application, then you can check out the [documentation for that feature](13-embedding.md). diff --git a/docs/administration-guide/13-embedding.md b/docs/administration-guide/13-embedding.md index 950d545bfe4f..92f6caf1196e 100644 --- a/docs/administration-guide/13-embedding.md +++ b/docs/administration-guide/13-embedding.md @@ -30,9 +30,9 @@ You can also see all questions and dashboards that have been marked as "Embeddab Once you've enabled the embedding feature on your Metabase instance, you should then go to the individual questions and dashboards you wish to embed to set them up for embedding. -### Embedding Charts and Dashboards +### Embedding charts and dashboards -To mark a given question or dashboard, click on the sharing icon +To make a question or dashboard embeddable, click the sharing icon on it: ![Share icon](images/embedding/02-share-icon.png) @@ -51,12 +51,12 @@ Importantly, you will need to hit "Publish" when you first set up a chart or da We provide code samples for common front end template languages as well as some common back-end web frameworks and languages. You may also use these as starting points for writing your own versions in other platforms. -### Embedding Charts and Dashboards with locked parameters +### Embedding charts and dashboards with locked parameters If you wish to have a parameter locked down to prevent your embedding application's end users from seeing other users' data, you can mark parameters as "Locked."Once a parameter is marked as Locked, it is not displayed as a filter widget, and must be set by the embedding application's server code. ![Locked parameters](images/embedding/06-locked.png) -### Resizing Dashboards to fit their content +### Resizing dashboards to fit their content Dashboards are a fixed aspect ratio, so if you'd like to ensure they're automatically sized vertically to fit their contents you can use the [iFrame Resizer](https://github.com/davidjbradshaw/iframe-resizer) script. Metabase serves a copy for convenience: ``` @@ -65,3 +65,7 @@ Dashboards are a fixed aspect ratio, so if you'd like to ensure they're automati ### Reference applications To see concrete examples of how to embed Metabase in applications under a number of common frameworks, check out our [reference implementations](https://github.com/metabase/embedding-reference-apps) on Github. + + +## Premium embedding +If you'd like to embed Metabase dashboards or charts in your application without the "Powered by Metabase" attribution, you can purchase premium embedding from the Metabase store. [Find out more here](https://store.metabase.com/product/embedding). diff --git a/docs/administration-guide/14-caching.md b/docs/administration-guide/14-caching.md index 963a0bd28bd0..0ffb783c20bc 100644 --- a/docs/administration-guide/14-caching.md +++ b/docs/administration-guide/14-caching.md @@ -19,3 +19,8 @@ Instead of setting an absolute number of minutes or seconds for a cached result #### Max cache entry size Lastly, you can set the maximum size of each question's cache in kilobytes, to prevent them from taking up too much space on your server. + +--- + +## Next: customize how Metabase looks +With just a few clicks, you can [change Metabase's color palette, logo, and more](15-whitelabeling.md). diff --git a/docs/administration-guide/15-whitelabeling.md b/docs/administration-guide/15-whitelabeling.md new file mode 100644 index 000000000000..080ab1135499 --- /dev/null +++ b/docs/administration-guide/15-whitelabeling.md @@ -0,0 +1,23 @@ +## White labeling Metabase + +**This feature is available in the Enterprise edition of Metabase** + +White labeling lets you customize the way Metabase looks so that it matches your company’s branding. Go to the Admin Panel and click on the White Labeling section to start customizing things. Here’s what you can do: + +### Change the name of the application +You can change every place in the app that says “Metabase” to something like “Acme Analytics,” or whatever you want to call your Metabase app. + +### Logo +You can replace Metabase’s familiar, tasteful, inspired-yet-not-threateningly-avant-garde dotted M logo with your very own. For things to work best, the logo you upload should be an SVG file that looks good when it’s around 60px tall. (In other words, ask the nearest designer for help.) + +### Change the color palette +You can customize the colors that Metabase uses throughout the app: +* **Primary color:** by default, this is the lovely blue that is used for the nav bar, links, buttons, and more. +* **Nav bar color:** you can optionally specify a separate color for the nav bar; otherwise it defaults to the Primary color. +* **Accent colors:** these are the colors that are used for some icons, secondary buttons, elements of the graphical query builder, and chart colors. +* **Additional chart colors:** as the name implies, these are additional colors that are included in the charting options to give your lines, bars, and pie slices some extra pizazz. + +--- + +## Next: editing your metadata +Taking just a few minutes to edit and add info to your database’s metadata can greatly enhance your experience with Metabase. Let’s learn [how to edit your metadata](09-setting-up-slack.md). diff --git a/docs/administration-guide/16-authenticating-with-saml.md b/docs/administration-guide/16-authenticating-with-saml.md new file mode 100644 index 000000000000..67f05ae5560d --- /dev/null +++ b/docs/administration-guide/16-authenticating-with-saml.md @@ -0,0 +1,39 @@ +## Authenticating with SAML + +**This feature is available in the Enterprise edition of Metabase** + +Connecting Metabase to your SAML identity provider lets your team access Metabase with ease through SSO. + +### Enabling SAML authentication +First, head over to the Settings section of the Admin Panel, then click on the Authentication tab. Click the `Configure` button in the SAML section of the Authentication page, and you'll see this form: + +![SAML form](images/saml-form.png) + +Click the toggle at the top of the form to enable SAML authentication, then fill in the form with the information about your identity provider. **Make sure to turn this on**, otherwise SAML-based authentication won't work, even if all of your settings are right. + +Here's a breakdown of each of the settings: + +**Identity Provider (IDP) URI:** This is where Metabase will redirect login requests. That is, it's where your users go to log in to your SSO. + +**Identity Provider Certificate:** This is a an encoded certificate that we will use when connecting to the IDP provider URI. This will look like a big blob of text that you'll want to copy and paste carefully — the spacing is important! + +#### Settings for signing SSO requests (optional) +These are additional settings you can fill in to sign SSO requests to ensure they don’t get tampered with. + +**SAML keystore path:** the absolute path to the keystore file to use for signing SAML requests. + +**SAML keystore password:** if it wasn't already self-evident, this is just the password for opening the keystore. + +**SAML keystore alias:** the alias for the key that Metabase should use for signing SAML requests. + +#### Settings for user attribute configuration (optional) +These settings allow Metabase to automatically get each user's email address and first and last name. + +The settings that Metabase defaults to here might work for you out of the box, but you can override them if you know that your settings are different. + +Each of these input boxes needs a URI that points to the location of a SAML attribute. + +--- + +## Next: Authenticating with JWT +If SAML isn't your bag, you can also use a JSON web token based identify provider. [Learn how](18-authenticating-with-jwt.md). diff --git a/docs/administration-guide/17-data-sandboxes.md b/docs/administration-guide/17-data-sandboxes.md new file mode 100644 index 000000000000..b4d1188c57bf --- /dev/null +++ b/docs/administration-guide/17-data-sandboxes.md @@ -0,0 +1,111 @@ +## Sandboxing your data + +**This feature is available in the Enterprise edition of Metabase** + +Say you have users who you want to be able to log into your Metabase instance, but who should only be able to view data that pertains to them. For example, you might have some customers or partners who you want to let view your Orders table, but you only want them to see their orders. Metabase has a feature called sandboxing that lets you do just that. + + +The way it works is that you pick a table that you want to sandbox for users in a certain group, then customize how exactly you want to filter that table for those users. For this to work in most cases you’ll first need to add attributes to your users so that Metabase will know how to filter things for them specifically. + +### Getting user attributes +There are two ways to add attributes to your users: + +1. Get them automatically by connecting to your SAML Single Sign-On (SSO) provider. For instructions on connecting SAML SSO to Metabase, [check out this article](16-authenticating-with-saml.md). +2. You can also add attributes manually to a user by going to the People section of the Admin Panel, and clicking on the “…” menu on the far right of a user’s name in the table you’ll see there. Click on Edit Details from that menu to add and edit a user’s attributes. + +Now that your users have attributes, you’ll be able to sandbox tables, and automatically filter them based on these user attributes. + +### Filtering a sandboxed table +Metabase gives you two options for filtering a sandboxed table: + +#### Option 1: filter using a column in the table +The simplest way to filter a sandboxed table is to pick a column in the sandboxed table and match it up with a user attribute so that any time a user with sandboxed access to this table views it, they’ll only see rows in the table where that column’s value is equal to the value that user has for that attribute. + +#### Option 2: create a custom view of the table with a saved question +If you’re trying to do something more custom or complex, Metabase also gives you the option of creating a custom view for a sandboxed table using a saved question. You can also use variables in a saved SQL/native question and map those to user attributes to do even more sophisticated filtering. As an example, you might have columns in your Orders table that you don’t want any of your customers to see, so you could create a SQL-based saved question which only returns the columns you want them to see. That question could also have a variable in its `where` clause that you could map to a user attribute, like `where orders.user_id = {user_id_attr_var}` to additionally filter the question based on each user’s user ID attribute. + +#### An example setup +That was a mouthful, so here’s an example. We’ll sandbox our Orders table so that any user in our Customers group will only be able to see rows in the Orders table where the Customer ID column matches the user’s customer_id attribute. + +First we’ve made sure our example user has an attribute that we’ll be able to use in our filter: + +![User details](images/sandboxing/edit-user-details.png) + +Then we’ll head over to the Permissions section of the Admin Panel, and we’ll click on View Tables next to the Sample Dataset to see the permissions our user groups have for the tables in this database. We want to give the Customers group sandboxed access to the Orders table, so we’ll click on that box in the permissions grid and choose “Grant sandboxed access:” + +![Grant sandboxed access](images/sandboxing/grant-sandboxed-access.png) + +Metabase will ask us first if we want to restrict this user group to “limited access” to this database. That just means they won’t have full access to all of the tables in this database, which is exactly what we want. + +![Confirm modal](images/sandboxing/change-access-confirm-modal.png) + +Next we’ll see a worksheet that will ask us how we want to filter this table for these users. We’ll leave it on the default selection. Below that, there’s an area where we get to add our filters. We want to filter using the User ID column in the Orders table where the column equals each user’s user_id attribute. So we’ll select that column and that user attribute from the dropdown menus. At the bottom of the worksheet, there’s a summary of how things will work. + +![Sandbox settings](images/sandboxing/select-user-attribute.png) + +We’ll click Done, then we’ll click Save Changes at the top of the screen to save the changes we’ve made to our permissions. If we ever want to edit how this table should be filtered for users in this group, we can just click on the blue box and select “Edit sandboxed access.” + +![Edit access](images/sandboxing/edit-sandboxed-access.png) + +To test this out, we’ll open up a new incognito browser window and log in with our test user account. We’ll click on the Sample Dataset on the home page and then pick the Orders table. As you can see here, this user correctly only sees orders where the User ID column is equal to 1, because that’s what this user’s user_id attribute is. + +![Filtered table](images/sandboxing/filtered-table.png) + +If this user views any charts, dashboards, or even automated x-ray explorations that include this sandboxed Orders data, those will also be correctly filtered to only show the data they’re allowed to see. + +Another great thing about sandboxing is that this user can still use all of the easy and powerful exploration and charting features of Metabase to explore this sandboxed data. For example, they can create a chart like this one to see a breakdown of their orders by product type: + +![Filtered pie chart](images/sandboxing/filtered-pie-chart.png) + + +#### Advanced sandbox examples +As we mentioned above, the second way you can create a sandbox is by using a saved question to define a customized view of a table to display. When a user with sandboxed access to a table queries that table, behind the scenes they'll really be using that saved question as the source data for their query. + +**Example 1: hiding specific columns** +In this example I have a table called `People` that I want users in my Marketing team to be able to see, but I don't want them to see most of these sensitive columns that have personal information in them: + +![Original People table](images/sandboxing/advanced-example-1-people-table.png) + +So what I can do is create a query that only returns the columns in that table that I *do* want them to see, like this: + +![Filtering question](images/sandboxing/advanced-example-1-filtering-question.png) + +Now, when I go to the Permissions section and grant this group sandboxed access to this table, I'll select the second option and select the saved question I just created, like so: + +![Sandbox options](images/sandboxing/advanced-example-1-sandbox-modal.png) + +To verify things are working correctly, I'll log in as a test user in the Marketing group, and when I go to open up the `People` table, you'll see that I actually am shown the results of the filtering question instead: + +![Sandboxed results](images/sandboxing/advanced-example-1-results.png) + +**Note:** this filtering will also happen when a user with sandboxed access goes to look at a chart that uses data from the sandboxed table. If the chart uses any columns that aren't included in the sandboxed version of the table, the chart will not load for that user. + +**Example 2: using variables in a saved question** +To create even more powerful and nuanced filters, you can use variables in a filtering question in conjunction with user attributes. + +In this example, I'm going to give users in a specific group access to my `Orders` table, but I'll filter out which columns they can see, and I'll also make it so that they only see rows where the "Customer ID" column equals the user's `customer_id` attribute. + +Here's the table I'm going to filter: + +![Original Orders table](images/sandboxing/advanced-example-2-orders-table.png) + +The filtering question that I'll create will exclude columns that I don't want these users to see, and I'll also add in an optional `WHERE` clause which defines a variable, `cid`, that I can then reference in my sandbox. Here's what it looks like: + +![Filtering question](images/sandboxing/advanced-example-2-filtering-question.png) + +Going back over to the Permissions section, when I open up the sandboxed access modal and select the second option and select my filtering question, I'll see an additional section which allows me to map the variable I defined in my question with a user attribute: + +![Sandboxing options](images/sandboxing/advanced-example-2-sandboxing-options.png) + +My user's attribute is defined like this, and I got here by clicking on the `…` icon next to this user's name in the People section: + +![User attributes](images/sandboxing/advanced-example-2-user-attributes.png) + +Now, when I log in as this user and look at the `Orders` table, I only see the columns I included in the filtering question, and the rows are filtered as I specified in my `WHERE` clause: + +![Results](images/sandboxing/advanced-example-2-results.png) + +--- + +## Next: sharing and embedding with public links +Want to share certain dashboards or questions with the world? You can do that with [public links](12-public-links.md). diff --git a/docs/administration-guide/18-authenticating-with-jwt.md b/docs/administration-guide/18-authenticating-with-jwt.md new file mode 100644 index 000000000000..8cb426ee6635 --- /dev/null +++ b/docs/administration-guide/18-authenticating-with-jwt.md @@ -0,0 +1,32 @@ +## JWT-based Authentication + +**This feature is available in the Enterprise edition of Metabase** + +You can connect Metabase to your JWT-based identity provider to allow your Metabase users to authenticate through it. + +### Enabling JWT authentication +First, navigate to the Settings section of the Admin area, then click on the Authentication tab. Click the `Configure` button in the JWT section of this page, and you'll see this form: + +![SAML form](images/JWT-auth-form.png) + +Click the toggle at the top of the form to enable JWT-based authentication, then fill in the form with the information about your identity provider. **Make sure to turn this on**, otherwise JWT authentication won't work, even if all of your settings are right. + +Here's a breakdown of each of the settings: + +**Identity Provider URI:** This is where Metabase will redirect login requests. That is, it's where your users go to log in through your identify provider. + +**String Used by the JWT Signing Key:** This is a string used to seed the private key that is used to validate JWT messages. + +#### User attribute configuration (optional) +These are additional settings you can fill in to pass user attributes to Metabase. + +**Email attribute:** the key to retrieve each JWT user's email address. + +**First Name attribute:** the key to retrieve each JWT user's first name. + +**Last Name attribute:** if you guessed that this is the key to retrieve each JWT user's last name, well then you have been paying attention. + +--- + +## Next: setting data permissions +Find out how to create user groups and define what data they can access in the [next section](05-setting-permissions.md). diff --git a/docs/administration-guide/databases/bigquery.md b/docs/administration-guide/databases/bigquery.md index 58f3576f6678..833636f59522 100644 --- a/docs/administration-guide/databases/bigquery.md +++ b/docs/administration-guide/databases/bigquery.md @@ -21,9 +21,12 @@ Starting in v0.15.0 Metabase provides a driver for connecting to BigQuery direct Metabase will now begin inspecting your BigQuery Dataset and finding any tables and fields to build up a sense for the schema. Give it a little bit of time to do its work and then you're all set to start querying. -## Using Standard SQL +## Using Legacy SQL -By default, Metabase tells BigQuery to interpret queries as [Legacy SQL](https://cloud.google.com/bigquery/docs/reference/legacy-sql). If you prefer using -[Standard SQL](https://cloud.google.com/bigquery/docs/reference/standard-sql/) instead, you can tell Metabase to do so by including a `#standardSQL` directive at the beginning of your query: +As of version 0.30.0, Metabase tells BigQuery to interpret SQL queries as [Standard SQL](https://cloud.google.com/bigquery/docs/reference/standard-sql/). If you prefer using [Legacy SQL](https://cloud.google.com/bigquery/docs/reference/legacy-sql) instead, you can tell Metabase to do so by including a `#legacySQL` directive at the beginning of your query, for example: -![Enabling Standard SQL](../images/bigquery_standard_sql.png) +```sql +#legacySQL +SELECT * +FROM [my_dataset.my_table] +``` diff --git a/docs/administration-guide/databases/oracle.md b/docs/administration-guide/databases/oracle.md index 7c696262f560..7232641bfa64 100644 --- a/docs/administration-guide/databases/oracle.md +++ b/docs/administration-guide/databases/oracle.md @@ -36,3 +36,24 @@ If you're running Metabase from the Mac App, the plugins directory defaults to ` ``` Finally, you can choose a custom plugins directory if the default doesn't suit your needs by setting the environment variable `MB_PLUGINS_DIR`. + + +### Adding Additional Dependencies with Java 9 + +Java version 9 has introduced a new module system that places some additional restrictions on class loading. To use +Metabase drivers that require extra external dependencies, you'll need to include them as part of the classpath at +launch time. Run Metabase as follows: + +```bash +# Unix +java -cp metabase.jar:plugins/* metabase.core +``` + +On Windows, use a semicolon instead: + +```powershell +# Windows +java -cp metabase.jar;plugins/* metabase.core +``` + +The default Docker images use Java 8 so this step is only needed when running the JAR directly. diff --git a/docs/administration-guide/databases/spark.md b/docs/administration-guide/databases/spark.md new file mode 100644 index 000000000000..40dee1ed3787 --- /dev/null +++ b/docs/administration-guide/databases/spark.md @@ -0,0 +1,74 @@ +## Working with SparkSQL in Metabase + +Starting in v0.29.0, Metabase provides a driver for connecting to SparkSQL databases. Under the hood, Metabase uses SparkSQL's +JDBC driver and other dependencies; due to the sheer size of this dependency, we can't include it as part of Metabase. Luckily, downloading it yourself + and making it available to Metabase is straightforward and only takes a few minutes. + +### Downloading the SparkSQL JDBC Driver JAR + +You can download the required dependencies [here](https://s3.amazonaws.com/sparksql-deps/metabase-sparksql-deps-1.2.1.spark2-standalone.jar). + +### Adding the SparkSQL JDBC Driver JAR to the Metabase Plugins Directory + +Metabase will automatically make the SparkSQL driver available if it finds the SparkSQL dependencies JAR in the Metabase plugins +directory when it starts up. All you need to do is create the directory, move the JAR you just downloaded into it, and restart +Metabase. + +By default, the plugins directory is called `plugins`, and lives in the same directory as the Metabase JAR. + +For example, if you're running Metabase from a directory called `/app/`, you should move the SparkSQL dependencies JAR to +`/app/plugins/`: + +```bash +# example directory structure for running Metabase with SparkSQL support +/app/metabase.jar +/app/plugins/metabase-sparksql-deps-1.2.1.spark2-standalone.jar +``` + +If you're running Metabase from the Mac App, the plugins directory defaults to `~/Library/Application Support/Metabase/Plugins/`: + +```bash +# example directory structure for running Metabase Mac App with SparkSQL support +/Users/camsaul/Library/Application Support/Metabase/Plugins/metabase-sparksql-deps-1.2.1.spark2-standalone.jar +``` + +Finally, you can choose a custom plugins directory if the default doesn't suit your needs by setting the environment variable +`MB_PLUGINS_DIR`. + + +### Adding Additional Dependencies with Java 9 + +Java version 9 has introduced a new module system that places some additional restrictions on class loading. To use +Metabase drivers that require extra external dependencies, you'll need to include them as part of the classpath at +launch time. Run Metabase as follows: + +```bash +# Unix +java -cp metabase.jar:plugins/* metabase.core +``` + +On Windows, use a semicolon instead: + +```powershell +# Windows +java -cp metabase.jar;plugins/* metabase.core +``` + +The default Docker images use Java 8 so this step is only needed when running the JAR directly. + + +### Using SparkSQL with a Custom Metabase Build + +The SparkSQL dependencies JAR contains additional classes inside the `metabase` Java package, the same package +the core Metabase code lives in. When multiple JARs include classes in the same package, Java requires them to +be signed with the same signing certificate. The official Metabase JAR and SparkSQL dependencies JAR are signed +with the same certificate, so everything works as expected. + +If you build a custom Metabase JAR, however, Java will refuse to load the SparkSQL dependencies JAR provided +above, because your JAR will not be signed with the same certificate (if you signed it at all). You will need to +build the SparkSQL dependencies JAR yourself, and, if applicable, sign it with the same certificate you signed +your custom Metabase JAR with. + +The SparkSQL dependencies project can be found at +[https://github.com/metabase/sparksql-deps](https://github.com/metabase/sparksql-deps). Instructions for building +the JAR are provided in the README. diff --git a/docs/administration-guide/databases/vertica.md b/docs/administration-guide/databases/vertica.md index a7b1491abccb..4448051aa0d5 100644 --- a/docs/administration-guide/databases/vertica.md +++ b/docs/administration-guide/databases/vertica.md @@ -36,5 +36,25 @@ If you're running Metabase from the Mac App, the plugins directory defaults to ` /Users/camsaul/Library/Application Support/Metabase/Plugins/vertica-jdbc-8.0.0-0.jar ``` -If you are running the Docker image or you want to use another directory for plugins, you should then specify a custom plugins directory by setting the environment variable `MB_PLUGINS_DIR`. +If you are running the Docker image or you want to use another directory for plugins, you should then specify a custom plugins directory by setting the environment variable `MB_PLUGINS_DIR`. + +### Adding Additional Dependencies with Java 9 + +Java version 9 has introduced a new module system that places some additional restrictions on class loading. To use +Metabase drivers that require extra external dependencies, you'll need to include them as part of the classpath at +launch time. Run Metabase as follows: + +```bash +# Unix +java -cp metabase.jar:plugins/* metabase.core +``` + +On Windows, use a semicolon instead: + +```powershell +# Windows +java -cp metabase.jar;plugins/* metabase.core +``` + +The default Docker images use Java 8 so this step is only needed when running the JAR directly. diff --git a/docs/administration-guide/images/JWT-auth-form.png b/docs/administration-guide/images/JWT-auth-form.png new file mode 100644 index 000000000000..7961969a6fe7 Binary files /dev/null and b/docs/administration-guide/images/JWT-auth-form.png differ diff --git a/docs/administration-guide/images/ProfileDropdown.png b/docs/administration-guide/images/ProfileDropdown.png index fff0346f64e1..0ee4f72d9c5c 100644 Binary files a/docs/administration-guide/images/ProfileDropdown.png and b/docs/administration-guide/images/ProfileDropdown.png differ diff --git a/docs/administration-guide/images/RemoveUser.png b/docs/administration-guide/images/RemoveUser.png index c453c5138d22..89eafb83b49c 100644 Binary files a/docs/administration-guide/images/RemoveUser.png and b/docs/administration-guide/images/RemoveUser.png differ diff --git a/docs/administration-guide/images/collections/collection-detail.png b/docs/administration-guide/images/collections/collection-detail.png new file mode 100644 index 000000000000..5dd7fe927776 Binary files /dev/null and b/docs/administration-guide/images/collections/collection-detail.png differ diff --git a/docs/administration-guide/images/collections/collection-permissions.png b/docs/administration-guide/images/collections/collection-permissions.png new file mode 100644 index 000000000000..f03d4b6c6578 Binary files /dev/null and b/docs/administration-guide/images/collections/collection-permissions.png differ diff --git a/docs/administration-guide/images/collections/collections-empty-state.png b/docs/administration-guide/images/collections/collections-empty-state.png deleted file mode 100644 index 6fb4d376bdd1..000000000000 Binary files a/docs/administration-guide/images/collections/collections-empty-state.png and /dev/null differ diff --git a/docs/administration-guide/images/collections/create-collection.png b/docs/administration-guide/images/collections/create-collection.png new file mode 100644 index 000000000000..390c573dad30 Binary files /dev/null and b/docs/administration-guide/images/collections/create-collection.png differ diff --git a/docs/administration-guide/images/collections/permission-grid.png b/docs/administration-guide/images/collections/permission-grid.png index f8190d72b12f..673084902d24 100644 Binary files a/docs/administration-guide/images/collections/permission-grid.png and b/docs/administration-guide/images/collections/permission-grid.png differ diff --git a/docs/administration-guide/images/collections/personal-collection-detail.png b/docs/administration-guide/images/collections/personal-collection-detail.png new file mode 100644 index 000000000000..a167776a850a Binary files /dev/null and b/docs/administration-guide/images/collections/personal-collection-detail.png differ diff --git a/docs/administration-guide/images/collections/personal-collections.png b/docs/administration-guide/images/collections/personal-collections.png new file mode 100644 index 000000000000..bd557deaa6a1 Binary files /dev/null and b/docs/administration-guide/images/collections/personal-collections.png differ diff --git a/docs/administration-guide/images/collections/pinned-items.png b/docs/administration-guide/images/collections/pinned-items.png new file mode 100644 index 000000000000..0453c64cb496 Binary files /dev/null and b/docs/administration-guide/images/collections/pinned-items.png differ diff --git a/docs/administration-guide/images/filter-options.png b/docs/administration-guide/images/filter-options.png new file mode 100644 index 000000000000..6e728d8dc939 Binary files /dev/null and b/docs/administration-guide/images/filter-options.png differ diff --git a/docs/administration-guide/images/saml-form.png b/docs/administration-guide/images/saml-form.png new file mode 100644 index 000000000000..716eefc3682d Binary files /dev/null and b/docs/administration-guide/images/saml-form.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-1-filtering-question.png b/docs/administration-guide/images/sandboxing/advanced-example-1-filtering-question.png new file mode 100644 index 000000000000..708108abcf2d Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-1-filtering-question.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-1-people-table.png b/docs/administration-guide/images/sandboxing/advanced-example-1-people-table.png new file mode 100644 index 000000000000..e9a4c6dcbcc1 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-1-people-table.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-1-results.png b/docs/administration-guide/images/sandboxing/advanced-example-1-results.png new file mode 100644 index 000000000000..a30269f8d0f5 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-1-results.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-1-sandbox-modal.png b/docs/administration-guide/images/sandboxing/advanced-example-1-sandbox-modal.png new file mode 100644 index 000000000000..4a37f0c51604 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-1-sandbox-modal.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-2-filtering-question.png b/docs/administration-guide/images/sandboxing/advanced-example-2-filtering-question.png new file mode 100644 index 000000000000..b7eed672a763 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-2-filtering-question.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-2-orders-table.png b/docs/administration-guide/images/sandboxing/advanced-example-2-orders-table.png new file mode 100644 index 000000000000..34b20e2e57c9 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-2-orders-table.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-2-results.png b/docs/administration-guide/images/sandboxing/advanced-example-2-results.png new file mode 100644 index 000000000000..765e380ab49c Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-2-results.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-2-sandboxing-options.png b/docs/administration-guide/images/sandboxing/advanced-example-2-sandboxing-options.png new file mode 100644 index 000000000000..6712c45019b9 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-2-sandboxing-options.png differ diff --git a/docs/administration-guide/images/sandboxing/advanced-example-2-user-attributes.png b/docs/administration-guide/images/sandboxing/advanced-example-2-user-attributes.png new file mode 100644 index 000000000000..395041bc3748 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/advanced-example-2-user-attributes.png differ diff --git a/docs/administration-guide/images/sandboxing/change-access-confirm-modal.png b/docs/administration-guide/images/sandboxing/change-access-confirm-modal.png new file mode 100644 index 000000000000..78dc47f560e9 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/change-access-confirm-modal.png differ diff --git a/docs/administration-guide/images/sandboxing/edit-sandboxed-access.png b/docs/administration-guide/images/sandboxing/edit-sandboxed-access.png new file mode 100644 index 000000000000..9f023f85b009 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/edit-sandboxed-access.png differ diff --git a/docs/administration-guide/images/sandboxing/edit-user-details.png b/docs/administration-guide/images/sandboxing/edit-user-details.png new file mode 100644 index 000000000000..d631deebc8e3 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/edit-user-details.png differ diff --git a/docs/administration-guide/images/sandboxing/filtered-pie-chart.png b/docs/administration-guide/images/sandboxing/filtered-pie-chart.png new file mode 100644 index 000000000000..3de98721c37c Binary files /dev/null and b/docs/administration-guide/images/sandboxing/filtered-pie-chart.png differ diff --git a/docs/administration-guide/images/sandboxing/filtered-table.png b/docs/administration-guide/images/sandboxing/filtered-table.png new file mode 100644 index 000000000000..3c6aa4176408 Binary files /dev/null and b/docs/administration-guide/images/sandboxing/filtered-table.png differ diff --git a/docs/administration-guide/images/sandboxing/grant-sandboxed-access.png b/docs/administration-guide/images/sandboxing/grant-sandboxed-access.png new file mode 100644 index 000000000000..f6d36359dfda Binary files /dev/null and b/docs/administration-guide/images/sandboxing/grant-sandboxed-access.png differ diff --git a/docs/administration-guide/images/sandboxing/select-user-attribute.png b/docs/administration-guide/images/sandboxing/select-user-attribute.png new file mode 100644 index 000000000000..df7ccd78c59f Binary files /dev/null and b/docs/administration-guide/images/sandboxing/select-user-attribute.png differ diff --git a/docs/administration-guide/start.md b/docs/administration-guide/start.md index bf6b44a2e719..72db64ccd767 100644 --- a/docs/administration-guide/start.md +++ b/docs/administration-guide/start.md @@ -2,23 +2,35 @@ Are you in charge of managing Metabase for your organization? Then you're in the right spot. You are the chosen one. -**This guide will teach you about:** - +**Getting things set up:** * [Connecting Metabase to databases in your organization](01-managing-databases.md) * [Enabling features that send email (SMTP)](02-setting-up-email.md) +* [Setting up Slack integration](09-setting-up-slack.md) +* [Configuring settings](08-configuration-settings.md) +* [Caching query results](14-caching.md) +* [Customizing how Metabase looks with white labeling*](15-whitelabeling.md) + +**Curating your data:** * [Editing your database metadata](03-metadata-editing.md) +* [Creating segments and metrics](07-segments-and-metrics.md) + +**Adding and managing users:** * [Managing user accounts](04-managing-users.md) +* [Authenticating with Google Sign-In or LDAP](10-single-sign-on.md) +* [Authenticating with SAML*](16-authenticating-with-saml.md) +* [Authenticating with JWT*](18-authenticating-with-jwt.md) + +**Setting permissions and access:** * [Setting data permissions](05-setting-permissions.md) * [Creating and managing collections](06-collections.md) -* [Creating segments and metrics](07-segments-and-metrics.md) -* [Configuring settings](08-configuration-settings.md) -* [Setting up Slack integration](09-setting-up-slack.md) -* [Authenticating with Google Sign-In or LDAP](10-single-sign-on.md) -* [Creating a Getting Started Guide for your team](11-getting-started-guide.md) +* [Sandboxing data based on user attributes*](17-data-sandboxes.md) + +**Embedding and sharing with public links:** * [Sharing dashboards and questions with public links](12-public-links.md) * [Embedding Metabase in other Applications](13-embedding.md) -* [Caching query results](14-caching.md) -First things first, you'll need to install Metabase. If you haven’t done that yet, our [Installation Guide](../operations-guide/start.md#installing-and-running-metabase) will help you through the process. +** - Available in the Enterprise edition of Metabase* + +First things first, you'll need to install Metabase. If you haven’t done that yet, our [Installation Guide](../operations-guide/start.html#installing-and-running-metabase) will help you through the process. -Already done with that? Then let’s start with going over [connecting Metabase to your database](01-managing-databases.md). +Already done with that? Then let’s start by going over how to [connect Metabase to your database](01-managing-databases.md). diff --git a/docs/api-documentation.md b/docs/api-documentation.md index cc23fffd5a56..d9adda78e32a 100644 --- a/docs/api-documentation.md +++ b/docs/api-documentation.md @@ -1,4 +1,4 @@ -# API Documentation for Metabase v0.24.0-snapshot +# API Documentation for Metabase v0.30.0-snapshot ## `GET /api/activity/` @@ -10,6 +10,246 @@ Get recent activity. Get the list of 10 things the current user has been viewing most recently. +## `DELETE /api/alert/:id` + +Delete an Alert. (DEPRECATED -- don't delete a Alert anymore -- archive it instead.) + +##### PARAMS: + +* **`id`** + + +## `GET /api/alert/` + +Fetch all alerts + +##### PARAMS: + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). + + +## `GET /api/alert/question/:id` + +Fetch all questions for the given question (`Card`) id + +##### PARAMS: + +* **`id`** + + +## `POST /api/alert/` + +Create a new Alert. + +##### PARAMS: + +* **`alert_condition`** value must be one of: `goal`, `rows`. + +* **`card`** value must be a map with the keys `id`, `include_csv`, and `include_xls`. + +* **`channels`** value must be an array. Each value must be a map. The array cannot be empty. + +* **`alert_first_only`** value must be a boolean. + +* **`alert_above_goal`** value may be nil, or if non-nil, value must be a boolean. + +* **`new-alert-request-body`** + + +## `PUT /api/alert/:id` + +Update a `Alert` with ID. + +##### PARAMS: + +* **`id`** + +* **`alert_condition`** value may be nil, or if non-nil, value must be one of: `goal`, `rows`. + +* **`card`** value may be nil, or if non-nil, value must be a map with the keys `id`, `include_csv`, and `include_xls`. + +* **`channels`** value may be nil, or if non-nil, value must be an array. Each value must be a map. The array cannot be empty. + +* **`alert_first_only`** value may be nil, or if non-nil, value must be a boolean. + +* **`alert_above_goal`** value may be nil, or if non-nil, value must be a boolean. + +* **`archived`** value may be nil, or if non-nil, value must be a boolean. + +* **`alert-updates`** + + +## `PUT /api/alert/:id/unsubscribe` + +Unsubscribes a user from the given alert + +##### PARAMS: + +* **`id`** + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query` + +Return an automagic dashboard for entity `entity` with id `ìd`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`show`** invalid show value + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query` + +Return an automagic dashboard analyzing cell in automagic dashboard for entity `entity` + defined by + query `cell-querry`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`cell-query`** value couldn't be parsed as base64 encoded JSON + +* **`show`** invalid show value + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/compare/:comparison-entity/:comparison-entity-id-or-query` + +Return an automagic comparison dashboard for cell in automagic dashboard for entity `entity` + with id `ìd` defined by query `cell-querry`; compared with entity `comparison-entity` with id + `comparison-entity-id-or-query.`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`cell-query`** value couldn't be parsed as base64 encoded JSON + +* **`show`** invalid show value + +* **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc" + +* **`comparison-entity-id-or-query`** + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule` + +Return an automagic dashboard analyzing cell in question with id `id` defined by + query `cell-querry` using rule `rule`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`cell-query`** value couldn't be parsed as base64 encoded JSON + +* **`prefix`** invalid value for prefix + +* **`rule`** invalid value for rule name + +* **`show`** invalid show value + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/cell/:cell-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` + +Return an automagic comparison dashboard for cell in automagic dashboard for entity `entity` + with id `ìd` defined by query `cell-querry` using rule `rule`; compared with entity + `comparison-entity` with id `comparison-entity-id-or-query.`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`cell-query`** value couldn't be parsed as base64 encoded JSON + +* **`prefix`** invalid value for prefix + +* **`rule`** invalid value for rule name + +* **`show`** invalid show value + +* **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc" + +* **`comparison-entity-id-or-query`** + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/compare/:comparison-entity/:comparison-entity-id-or-query` + +Return an automagic comparison dashboard for entity `entity` with id `ìd` compared with entity + `comparison-entity` with id `comparison-entity-id-or-query.` + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`show`** invalid show value + +* **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc" + +* **`comparison-entity-id-or-query`** + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule` + +Return an automagic dashboard for entity `entity` with id `ìd` using rule `rule`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`prefix`** invalid value for prefix + +* **`rule`** invalid value for rule name + +* **`show`** invalid show value + + +## `GET /api/automagic-dashboards/:entity/:entity-id-or-query/rule/:prefix/:rule/compare/:comparison-entity/:comparison-entity-id-or-query` + +Return an automagic comparison dashboard for entity `entity` with id `ìd` using rule `rule`; + compared with entity `comparison-entity` with id `comparison-entity-id-or-query.`. + +##### PARAMS: + +* **`entity`** Invalid entity type + +* **`entity-id-or-query`** + +* **`prefix`** invalid value for prefix + +* **`rule`** invalid value for rule name + +* **`show`** invalid show value + +* **`comparison-entity`** Invalid comparison entity type. Can only be one of "table", "segment", or "adhoc" + +* **`comparison-entity-id-or-query`** + + +## `GET /api/automagic-dashboards/database/:id/candidates` + +Return a list of candidates for automagic dashboards orderd by interestingness. + +##### PARAMS: + +* **`id`** + + ## `DELETE /api/card/:card-id/favorite` Unfavorite a Card. @@ -21,7 +261,7 @@ Unfavorite a Card. ## `DELETE /api/card/:card-id/public_link` -Delete the publically-accessible link to this Card. +Delete the publicly-accessible link to this Card. You must be a superuser to do this. @@ -32,7 +272,7 @@ You must be a superuser to do this. ## `DELETE /api/card/:id` -Delete a `Card`. +Delete a Card. (DEPRECATED -- don't delete a Card anymore -- archive it instead.) ##### PARAMS: @@ -41,17 +281,9 @@ Delete a `Card`. ## `GET /api/card/` -Get all the `Cards`. Option filter param `f` can be used to change the set of Cards that are returned; default is `all`, - but other options include `mine`, `fav`, `database`, `table`, `recent`, `popular`, and `archived`. See corresponding implementation - functions above for the specific behavior of each filter option. :card_index: - - Optionally filter cards by LABEL or COLLECTION slug. (COLLECTION can be a blank string, to signify cards with *no collection* should be returned.) - - NOTES: - - * Filtering by LABEL is considered *deprecated*, as `Labels` will be removed from an upcoming version of Metabase in favor of `Collections`. - * LABEL and COLLECTION params are mutually exclusive; if both are specified, LABEL will be ignored and Cards will only be filtered by their `Collection`. - * If no `Collection` exists with the slug COLLECTION, this endpoint will return a 404. +Get all the Cards. Option filter param `f` can be used to change the set of Cards that are returned; default is + `all`, but other options include `mine`, `fav`, `database`, `table`, `recent`, `popular`, and `archived`. See + corresponding implementation functions above for the specific behavior of each filter option. :card_index: ##### PARAMS: @@ -59,10 +291,6 @@ Get all the `Cards`. Option filter param `f` can be used to change the set of Ca * **`model_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -* **`label`** value may be nil, or if non-nil, value must be a non-blank string. - -* **`collection`** value may be nil, or if non-nil, value must be a string. - ## `GET /api/card/:id` @@ -73,16 +301,26 @@ Get `Card` with ID. * **`id`** +## `GET /api/card/:id/related` + +Return related entities. + +##### PARAMS: + +* **`id`** + + ## `GET /api/card/embeddable` -Fetch a list of Cards where `enable_embedding` is `true`. The cards can be embedded using the embedding endpoints and a signed JWT. +Fetch a list of Cards where `enable_embedding` is `true`. The cards can be embedded using the embedding endpoints + and a signed JWT. You must be a superuser to do this. ## `GET /api/card/public` -Fetch a list of Cards with public UUIDs. These cards are publically-accessible *if* public sharing is enabled. +Fetch a list of Cards with public UUIDs. These cards are publicly-accessible *if* public sharing is enabled. You must be a superuser to do this. @@ -93,45 +331,39 @@ Create a new `Card`. ##### PARAMS: -* **`dataset_query`** +* **`visualization_settings`** value must be a map. * **`description`** value may be nil, or if non-nil, value must be a non-blank string. -* **`display`** value must be a non-blank string. +* **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. -* **`name`** value must be a non-blank string. +* **`result_metadata`** value may be nil, or if non-nil, value must be an array of valid results column metadata maps. -* **`visualization_settings`** value must be a map. +* **`metadata_checksum`** value may be nil, or if non-nil, value must be a non-blank string. * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. +* **`name`** value must be a non-blank string. -## `POST /api/card/:card-id/favorite` - -Favorite a Card. - -##### PARAMS: +* **`dataset_query`** -* **`card-id`** +* **`display`** value must be a non-blank string. -## `POST /api/card/:card-id/labels` +## `POST /api/card/:card-id/favorite` -Update the set of `Labels` that apply to a `Card`. - (This endpoint is considered DEPRECATED as Labels will be removed in a future version of Metabase.) +Favorite a Card. ##### PARAMS: * **`card-id`** -* **`label_ids`** value must be an array. Each value must be an integer greater than zero. - ## `POST /api/card/:card-id/public_link` -Generate publically-accessible links for this Card. Returns UUID to be used in public links. - (If this Card has already been shared, it will return the existing public link rather than creating a new one.) - Public sharing must be enabled. +Generate publicly-accessible links for this Card. Returns UUID to be used in public links. (If this Card has + already been shared, it will return the existing public link rather than creating a new one.) Public sharing must + be enabled. You must be a superuser to do this. @@ -155,7 +387,8 @@ Run the query associated with a Card. ## `POST /api/card/:card-id/query/:export-format` -Run the query associated with a Card, and return its results as a file in the specified format. Note that this expects the parameters as serialized JSON in the 'parameters' parameter +Run the query associated with a Card, and return its results as a file in the specified format. Note that this + expects the parameters as serialized JSON in the 'parameters' parameter ##### PARAMS: @@ -168,8 +401,8 @@ Run the query associated with a Card, and return its results as a file in the sp ## `POST /api/card/collections` -Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS into a `Collection` with COLLECTION_ID, - or remove them from any Collections by passing a `null` COLLECTION_ID. +Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS into a `Collection` with + COLLECTION_ID, or remove them from any Collections by passing a `null` COLLECTION_ID. ##### PARAMS: @@ -178,6 +411,15 @@ Bulk update endpoint for Card Collections. Move a set of `Cards` with CARD_IDS i * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. +## `POST /api/card/related` + +Return related entities for an ad-hoc query. + +##### PARAMS: + +* **`query`** + + ## `PUT /api/card/:id` Update a `Card`. @@ -190,10 +432,18 @@ Update a `Card`. * **`archived`** value may be nil, or if non-nil, value must be a boolean. +* **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`result_metadata`** value may be nil, or if non-nil, value must be an array of valid results column metadata maps. + +* **`metadata_checksum`** value may be nil, or if non-nil, value must be a non-blank string. + * **`enable_embedding`** value may be nil, or if non-nil, value must be a boolean. * **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. +* **`card-updates`** + * **`name`** value may be nil, or if non-nil, value must be a non-blank string. * **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map. @@ -207,11 +457,11 @@ Update a `Card`. ## `GET /api/collection/` -Fetch a list of all Collections that the current user has read permissions for. - This includes `:can_write`, which means whether the current user is allowed to add or remove Cards to this Collection; keep in mind - that regardless of this status you must be a superuser to modify properties of Collections themselves. +Fetch a list of all Collections that the current user has read permissions for (`:can_write` is returned as an + additional property of each Collection so you can tell which of these you have write permissions for.) - By default, this returns non-archived Collections, but instead you can show archived ones by passing `?archived=true`. + By default, this returns non-archived Collections, but instead you can show archived ones by passing + `?archived=true`. ##### PARAMS: @@ -220,12 +470,28 @@ Fetch a list of all Collections that the current user has read permissions for. ## `GET /api/collection/:id` -Fetch a specific (non-archived) Collection, including cards that belong to it. +Fetch a specific Collection with standard details added + +##### PARAMS: + +* **`id`** + + +## `GET /api/collection/:id/items` + +Fetch a specific Collection's items with the following options: + + * `model` - only include objects of a specific `model`. If unspecified, returns objects of all models + * `archived` - when `true`, return archived objects *instead* of unarchived ones. Defaults to `false`. ##### PARAMS: * **`id`** +* **`model`** value may be nil, or if non-nil, value must be one of: `card`, `collection`, `dashboard`, `pulse`. + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). + ## `GET /api/collection/graph` @@ -234,12 +500,35 @@ Fetch a graph of all Collection Permissions. You must be a superuser to do this. +## `GET /api/collection/root` + +Return the 'Root' Collection object with standard details added + + +## `GET /api/collection/root/items` + +Fetch objects that the current user should see at their root level. As mentioned elsewhere, the 'Root' Collection + doesn't actually exist as a row in the application DB: it's simply a virtual Collection where things with no + `collection_id` exist. It does, however, have its own set of Permissions. + + This endpoint will actually show objects with no `collection_id` for Users that have Root Collection + permissions, but for people without Root Collection perms, we'll just show the objects that have an effective + location of `/`. + + This endpoint is intended to power a 'Root Folder View' for the Current User, so regardless you'll see all the + top-level objects you're allowed to access. + +##### PARAMS: + +* **`model`** value may be nil, or if non-nil, value must be one of: `card`, `collection`, `dashboard`, `pulse`. + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). + + ## `POST /api/collection/` Create a new Collection. -You must be a superuser to do this. - ##### PARAMS: * **`name`** value must be a non-blank string. @@ -248,25 +537,29 @@ You must be a superuser to do this. * **`description`** value may be nil, or if non-nil, value must be a non-blank string. +* **`parent_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -## `PUT /api/collection/:id` -Modify an existing Collection, including archiving or unarchiving it. +## `PUT /api/collection/:id` -You must be a superuser to do this. +Modify an existing Collection, including archiving or unarchiving it, or moving it. ##### PARAMS: * **`id`** -* **`name`** value must be a non-blank string. +* **`name`** value may be nil, or if non-nil, value must be a non-blank string. -* **`color`** value must be a string that matches the regex `^#[0-9A-Fa-f]{6}$`. +* **`color`** value may be nil, or if non-nil, value must be a string that matches the regex `^#[0-9A-Fa-f]{6}$`. * **`description`** value may be nil, or if non-nil, value must be a non-blank string. * **`archived`** value may be nil, or if non-nil, value must be a boolean. +* **`parent_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`collection-updates`** + ## `PUT /api/collection/graph` @@ -281,7 +574,7 @@ You must be a superuser to do this. ## `DELETE /api/dashboard/:dashboard-id/public_link` -Delete the publically-accessible link to this Dashboard. +Delete the publicly-accessible link to this Dashboard. You must be a superuser to do this. @@ -341,6 +634,15 @@ Get `Dashboard` with ID. * **`id`** +## `GET /api/dashboard/:id/related` + +Return related entities. + +##### PARAMS: + +* **`id`** + + ## `GET /api/dashboard/:id/revisions` Fetch `Revisions` for `Dashboard` with ID. @@ -352,14 +654,16 @@ Fetch `Revisions` for `Dashboard` with ID. ## `GET /api/dashboard/embeddable` -Fetch a list of Dashboards where `enable_embedding` is `true`. The dashboards can be embedded using the embedding endpoints and a signed JWT. +Fetch a list of Dashboards where `enable_embedding` is `true`. The dashboards can be embedded using the embedding + endpoints and a signed JWT. You must be a superuser to do this. ## `GET /api/dashboard/public` -Fetch a list of Dashboards with public UUIDs. These dashboards are publically-accessible *if* public sharing is enabled. +Fetch a list of Dashboards with public UUIDs. These dashboards are publicly-accessible *if* public sharing is + enabled. You must be a superuser to do this. @@ -372,16 +676,22 @@ Create a new `Dashboard`. * **`name`** value must be a non-blank string. +* **`description`** value may be nil, or if non-nil, value must be a string. + * **`parameters`** value must be an array. Each value must be a map. +* **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`dashboard`** ## `POST /api/dashboard/:dashboard-id/public_link` -Generate publically-accessible links for this Dashboard. Returns UUID to be used in public links. - (If this Dashboard has already been shared, it will return the existing public link rather than creating a new one.) - Public sharing must be enabled. +Generate publicly-accessible links for this Dashboard. Returns UUID to be used in public links. (If this + Dashboard has already been shared, it will return the existing public link rather than creating a new one.) Public + sharing must be enabled. You must be a superuser to do this. @@ -398,7 +708,7 @@ Add a `Card` to a `Dashboard`. * **`id`** -* **`cardId`** value must be an integer greater than zero. +* **`cardId`** value may be nil, or if non-nil, value must be an integer greater than zero. * **`parameter_mappings`** value must be an array. Each value must be a map. @@ -427,12 +737,33 @@ Revert a `Dashboard` to a prior `Revision`. * **`revision_id`** value must be an integer greater than zero. +## `POST /api/dashboard/save` + +Save a denormalized description of dashboard. + +##### PARAMS: + +* **`dashboard`** + + +## `POST /api/dashboard/save/collection/:parent-collection-id` + +Save a denormalized description of dashboard into collection with ID `:parent-collection-id`. + +##### PARAMS: + +* **`parent-collection-id`** + +* **`dashboard`** + + ## `PUT /api/dashboard/:id` Update a `Dashboard`. - Usually, you just need write permissions for this Dashboard to do this (which means you have appropriate permissions for the Cards belonging to this Dashboard), - but to change the value of `enable_embedding` you must be a superuser. + Usually, you just need write permissions for this Dashboard to do this (which means you have appropriate + permissions for the Cards belonging to this Dashboard), but to change the value of `enable_embedding` you must be a + superuser. ##### PARAMS: @@ -444,16 +775,20 @@ Update a `Dashboard`. * **`archived`** value may be nil, or if non-nil, value must be a boolean. +* **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. + * **`show_in_getting_started`** value may be nil, or if non-nil, value must be a boolean. * **`enable_embedding`** value may be nil, or if non-nil, value must be a boolean. +* **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`dash-updates`** + * **`name`** value may be nil, or if non-nil, value must be a non-blank string. * **`caveats`** value may be nil, or if non-nil, value must be a string. -* **`dashboard`** - * **`embedding_params`** value may be nil, or if non-nil, value must be a valid embedding params map. * **`id`** @@ -491,11 +826,15 @@ Delete a `Database`. ## `GET /api/database/` -Fetch all `Databases`. +Fetch all `Databases`. `include_tables` means we should hydrate the Tables belonging to each DB. `include_cards` here + means we should also include virtual Table entries for saved Questions, e.g. so we can easily use them as source + Tables in queries. Default for both is `false`. ##### PARAMS: -* **`include_tables`** +* **`include_tables`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). + +* **`include_cards`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). ## `GET /api/database/:id` @@ -551,6 +890,32 @@ Get metadata about a `Database`, including all of its `Tables` and `Fields`. * **`id`** +## `GET /api/database/:id/schema/:schema` + +Returns a list of tables for the given database `id` and `schema` + +##### PARAMS: + +* **`id`** + +* **`schema`** + + +## `GET /api/database/:id/schemas` + +Returns a list of all the schemas found for the database `id` + +##### PARAMS: + +* **`id`** + + +## `GET /api/database/:virtual-db/metadata` + +Endpoint that provides metadata for the Saved Questions 'virtual' database. Used for fooling the frontend + and allowing it to treat the Saved Questions virtual DB just like any other database. + + ## `POST /api/database/` Add a new `Database`. @@ -565,12 +930,49 @@ You must be a superuser to do this. * **`details`** value must be a map. -* **`is_full_sync`** +* **`is_full_sync`** value may be nil, or if non-nil, value must be a boolean. + +* **`is_on_demand`** value may be nil, or if non-nil, value must be a boolean. + +* **`schedules`** value may be nil, or if non-nil, value must be a valid map of schedule maps for a DB. + + +## `POST /api/database/:id/discard_values` + +Discards all saved field values for this `Database`. + +You must be a superuser to do this. + +##### PARAMS: + +* **`id`** + + +## `POST /api/database/:id/rescan_values` + +Trigger a manual scan of the field values for this `Database`. + +You must be a superuser to do this. + +##### PARAMS: + +* **`id`** ## `POST /api/database/:id/sync` -Update the metadata for this `Database`. +Update the metadata for this `Database`. This happens asynchronously. + +##### PARAMS: + +* **`id`** + + +## `POST /api/database/:id/sync_schema` + +Trigger a manual update of the schema metadata for this `Database`. + +You must be a superuser to do this. ##### PARAMS: @@ -584,6 +986,19 @@ Add the sample dataset as a new `Database`. You must be a superuser to do this. +## `POST /api/database/validate` + +Validate that we can connect to a database given a set of details. + +You must be a superuser to do this. + +##### PARAMS: + +* **`engine`** value must be a valid database engine. + +* **`details`** value must be a map. + + ## `PUT /api/database/:id` Update a `Database`. @@ -592,21 +1007,25 @@ You must be a superuser to do this. ##### PARAMS: -* **`id`** +* **`engine`** value may be nil, or if non-nil, value must be a valid database engine. -* **`name`** value must be a non-blank string. +* **`schedules`** value may be nil, or if non-nil, value must be a valid map of schedule maps for a DB. -* **`engine`** value must be a valid database engine. +* **`points_of_interest`** value may be nil, or if non-nil, value must be a string. -* **`details`** value must be a map. +* **`description`** value may be nil, or if non-nil, value must be a string. + +* **`name`** value may be nil, or if non-nil, value must be a non-blank string. + +* **`caveats`** value may be nil, or if non-nil, value must be a string. * **`is_full_sync`** -* **`description`** +* **`details`** value may be nil, or if non-nil, value must be a map. -* **`caveats`** +* **`id`** -* **`points_of_interest`** +* **`is_on_demand`** ## `POST /api/dataset/` @@ -615,7 +1034,9 @@ Execute a query and retrieve the results in the usual format. ##### PARAMS: -* **`database`** +* **`database`** value must be an integer. + +* **`query`** ## `POST /api/dataset/:export-format` @@ -640,6 +1061,13 @@ Get historical query execution duration. * **`query`** +## `DELETE /api/email/` + +Clear all email related settings. You must be a superuser to ddo this + +You must be a superuser to do this. + + ## `POST /api/email/test` Send a test email. You must be a superuser to do this. @@ -671,6 +1099,50 @@ Fetch a Card via a JSON Web Token signed with the `embedding-secret-key`. * **`token`** +## `GET /api/embed/card/:token/field/:field-id/remapping/:remapped-id` + +Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with + embedded Cards. + +##### PARAMS: + +* **`token`** + +* **`field-id`** + +* **`remapped-id`** + +* **`value`** value must be a non-blank string. + + +## `GET /api/embed/card/:token/field/:field-id/search/:search-field-id` + +Search for values of a Field that is referenced by an embedded Card. + +##### PARAMS: + +* **`token`** + +* **`field-id`** + +* **`search-field-id`** + +* **`value`** value must be a non-blank string. + +* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. + + +## `GET /api/embed/card/:token/field/:field-id/values` + +Fetch FieldValues for a Field that is referenced by an embedded Card. + +##### PARAMS: + +* **`token`** + +* **`field-id`** + + ## `GET /api/embed/card/:token/query` Fetch the results of running a Card using a JSON Web Token signed with the `embedding-secret-key`. @@ -684,71 +1156,174 @@ Fetch the results of running a Card using a JSON Web Token signed with the `embe * **`token`** -* **`&`** +* **`&`** + +* **`query-params`** + + +## `GET /api/embed/card/:token/query/:export-format` + +Like `GET /api/embed/card/query`, but returns the results as a file in the specified format. + +##### PARAMS: + +* **`token`** + +* **`export-format`** value must be one of: `csv`, `json`, `xlsx`. + +* **`&`** + +* **`query-params`** + + +## `GET /api/embed/dashboard/:token` + +Fetch a Dashboard via a JSON Web Token signed with the `embedding-secret-key`. + + Token should have the following format: + + {:resource {:dashboard }} + +##### PARAMS: + +* **`token`** + + +## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` + +Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the `embedding-secret-key` + +##### PARAMS: + +* **`token`** + +* **`dashcard-id`** + +* **`card-id`** + +* **`&`** + +* **`query-params`** + + +## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id/:export-format` + +Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the `embedding-secret-key` + return the data in one of the export formats + +##### PARAMS: + +* **`token`** + +* **`export-format`** value must be one of: `csv`, `json`, `xlsx`. + +* **`dashcard-id`** + +* **`card-id`** + +* **`&`** + +* **`query-params`** + + +## `GET /api/embed/dashboard/:token/field/:field-id/remapping/:remapped-id` + +Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with + embedded Dashboards. + +##### PARAMS: + +* **`token`** + +* **`field-id`** + +* **`remapped-id`** + +* **`value`** value must be a non-blank string. + + +## `GET /api/embed/dashboard/:token/field/:field-id/search/:search-field-id` + +Search for values of a Field that is referenced by a Card in an embedded Dashboard. + +##### PARAMS: + +* **`token`** + +* **`field-id`** -* **`query-params`** +* **`search-field-id`** +* **`value`** value must be a non-blank string. -## `GET /api/embed/card/:token/query/:export-format` +* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. -Like `GET /api/embed/card/query`, but returns the results as a file in the specified format. + +## `GET /api/embed/dashboard/:token/field/:field-id/values` + +Fetch FieldValues for a Field that is used as a param in an embedded Dashboard. ##### PARAMS: * **`token`** -* **`export-format`** value must be one of: `csv`, `json`, `xlsx`. +* **`field-id`** -* **`&`** -* **`query-params`** +## `DELETE /api/field/:id/dimension` +Remove the dimension associated to field at ID -## `GET /api/embed/dashboard/:token` +##### PARAMS: -Fetch a Dashboard via a JSON Web Token signed with the `embedding-secret-key`. +* **`id`** - Token should have the following format: - {:resource {:dashboard }} +## `GET /api/field/:id` + +Get `Field` with ID. ##### PARAMS: -* **`token`** +* **`id`** -## `GET /api/embed/dashboard/:token/dashcard/:dashcard-id/card/:card-id` +## `GET /api/field/:id/related` -Fetch the results of running a Card belonging to a Dashboard using a JSON Web Token signed with the `embedding-secret-key`. +Return related entities. - Token should have the following format: +##### PARAMS: - {:resource {:dashboard } - :params } +* **`id`** - Additional dashboard parameters can be provided in the query string, but params in the JWT token take precedence. -##### PARAMS: +## `GET /api/field/:id/remapping/:remapped-id` -* **`token`** +Fetch remapped Field values. -* **`dashcard-id`** +##### PARAMS: -* **`card-id`** +* **`id`** -* **`&`** +* **`remapped-id`** -* **`query-params`** +* **`value`** -## `GET /api/field/:id` +## `GET /api/field/:id/search/:search-id` -Get `Field` with ID. +Search for values of a Field that match values of another Field when breaking out by the ##### PARAMS: * **`id`** +* **`search-id`** + +* **`value`** value must be a non-blank string. + +* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. + ## `GET /api/field/:id/summary` @@ -761,99 +1336,119 @@ Get the count and distinct count of `Field` with ID. ## `GET /api/field/:id/values` -If `Field`'s special type derives from `type/Category`, or its base type is `type/Boolean`, return - all distinct values of the field, and a map of human-readable values defined by the user. +If a Field's value of `has_field_values` is `list`, return a list of all the distinct values of the Field, and (if + defined by a User) a map of human-readable remapped values. ##### PARAMS: * **`id`** -## `POST /api/field/:id/value_map_update` +## `GET /api/field/field-literal%2C:field-name%2Ctype%2F:field-type/values` -Update the human-readable values for a `Field` whose special type is `category`/`city`/`state`/`country` - or whose base type is `type/Boolean`. +Implementation of the field values endpoint for fields in the Saved Questions 'virtual' DB. This endpoint is just a + convenience to simplify the frontend code. It just returns the standard 'empty' field values response. ##### PARAMS: -* **`id`** - -* **`values_map`** value must be a map. +* **`_`** -## `PUT /api/field/:id` +## `POST /api/field/:id/dimension` -Update `Field` with ID. +Sets the dimension for the given field at ID ##### PARAMS: * **`id`** -* **`caveats`** value may be nil, or if non-nil, value must be a non-blank string. +* **`type`** value must be one of: `external`, `internal`. -* **`description`** value may be nil, or if non-nil, value must be a non-blank string. +* **`name`** value must be a non-blank string. -* **`display_name`** value may be nil, or if non-nil, value must be a non-blank string. +* **`human_readable_field_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -* **`fk_target_field_id`** value may be nil, or if non-nil, value must be an integer. -* **`points_of_interest`** value may be nil, or if non-nil, value must be a non-blank string. +## `POST /api/field/:id/discard_values` -* **`special_type`** value may be nil, or if non-nil, value must be a valid field type. +Discard the FieldValues belonging to this Field. Only applies to fields that have FieldValues. If this Field's + Database is set up to automatically sync FieldValues, they will be recreated during the next cycle. -* **`visibility_type`** value may be nil, or if non-nil, value must be one of: `details-only`, `hidden`, `normal`, `retired`, `sensitive`. +You must be a superuser to do this. +##### PARAMS: -## `GET /api/geojson/:key` +* **`id`** -Fetch a custom GeoJSON file as defined in the `custom-geojson` setting. (This just acts as a simple proxy for the file specified for KEY). -##### PARAMS: +## `POST /api/field/:id/rescan_values` -* **`key`** value must be a non-blank string. +Manually trigger an update for the FieldValues for this Field. Only applies to Fields that are eligible for + FieldValues. +You must be a superuser to do this. -## `GET /api/getting-started/` +##### PARAMS: -Fetch basic info for the Getting Started guide. +* **`id`** -## `DELETE /api/label/:id` +## `POST /api/field/:id/values` -[DEPRECATED] Delete a `Label`. :label: +Update the fields values and human-readable values for a `Field` whose special type is + `category`/`city`/`state`/`country` or whose base type is `type/Boolean`. The human-readable values are optional. ##### PARAMS: * **`id`** +* **`value-pairs`** value must be an array. Each value must be an array. + + +## `PUT /api/field/:id` + +Update `Field` with ID. -## `GET /api/label/` +##### PARAMS: -[DEPRECATED] List all `Labels`. :label: +* **`visibility_type`** value may be nil, or if non-nil, value must be one of: `details-only`, `hidden`, `normal`, `retired`, `sensitive`. +* **`display_name`** value may be nil, or if non-nil, value must be a non-blank string. -## `POST /api/label/` +* **`points_of_interest`** value may be nil, or if non-nil, value must be a non-blank string. -[DEPRECATED] Create a new `Label`. :label: +* **`description`** value may be nil, or if non-nil, value must be a non-blank string. -##### PARAMS: +* **`special_type`** value may be nil, or if non-nil, value must be a valid field type. -* **`name`** value must be a non-blank string. +* **`has_field_values`** value may be nil, or if non-nil, value must be one of: `auto-list`, `list`, `none`, `search`. + +* **`caveats`** value may be nil, or if non-nil, value must be a non-blank string. + +* **`fk_target_field_id`** value may be nil, or if non-nil, value must be an integer greater than zero. -* **`icon`** value may be nil, or if non-nil, value must be a non-blank string. +* **`id`** -## `PUT /api/label/:id` +## `GET /api/geojson/:key` -[DEPRECATED] Update a `Label`. :label: +Fetch a custom GeoJSON file as defined in the `custom-geojson` setting. (This just acts as a simple proxy for the + file specified for KEY). ##### PARAMS: -* **`id`** +* **`key`** value must be a non-blank string. -* **`name`** value may be nil, or if non-nil, value must be a non-blank string. -* **`icon`** value may be nil, or if non-nil, value must be a non-blank string. +## `PUT /api/ldap/settings` + +Update LDAP related settings. You must be a superuser to do this. + +You must be a superuser to do this. + +##### PARAMS: + +* **`settings`** value must be a map. ## `DELETE /api/metric/:id` @@ -889,6 +1484,15 @@ You must be a superuser to do this. * **`id`** +## `GET /api/metric/:id/related` + +Return related entities. + +##### PARAMS: + +* **`id`** + + ## `GET /api/metric/:id/revisions` Fetch `Revisions` for `Metric` with ID. @@ -1059,14 +1663,14 @@ You must be a superuser to do this. ## `PUT /api/permissions/graph` -Do a batch update of Permissions by passing in a modified graph. This should return the same graph, - in the same format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever neccesary. - This modified graph must correspond to the `PermissionsGraph` schema. - If successful, this endpoint returns the updated permissions graph; use this as a base for any further modifications. +Do a batch update of Permissions by passing in a modified graph. This should return the same graph, in the same + format, that you got from `GET /api/permissions/graph`, with any changes made in the wherever neccesary. This + modified graph must correspond to the `PermissionsGraph` schema. If successful, this endpoint returns the updated + permissions graph; use this as a base for any further modifications. - Revisions to the permissions graph are tracked. If you fetch the permissions graph and some other third-party modifies it before you can submit - you revisions, the endpoint will instead make no changes andr eturn a 409 (Conflict) response. In this case, you should fetch the updated graph - and make desired changes to that. + Revisions to the permissions graph are tracked. If you fetch the permissions graph and some other third-party + modifies it before you can submit you revisions, the endpoint will instead make no changes and return a + 409 (Conflict) response. In this case, you should fetch the updated graph and make desired changes to that. You must be a superuser to do this. @@ -1138,16 +1742,62 @@ Fetch the results of running a Card belonging to a Dashboard you're considering ## `GET /api/public/card/:uuid` -Fetch a publically-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. +Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth + credentials. Public sharing must be enabled. + +##### PARAMS: + +* **`uuid`** + + +## `GET /api/public/card/:uuid/field/:field-id/remapping/:remapped-id` + +Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with public + Cards. + +##### PARAMS: + +* **`uuid`** + +* **`field-id`** + +* **`remapped-id`** + +* **`value`** value must be a non-blank string. + + +## `GET /api/public/card/:uuid/field/:field-id/search/:search-field-id` + +Search for values of a Field that is referenced by a public Card. ##### PARAMS: * **`uuid`** +* **`field-id`** + +* **`search-field-id`** + +* **`value`** value must be a non-blank string. + +* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. + + +## `GET /api/public/card/:uuid/field/:field-id/values` + +Fetch FieldValues for a Field that is referenced by a public Card. + +##### PARAMS: + +* **`uuid`** + +* **`field-id`** + ## `GET /api/public/card/:uuid/query` -Fetch a publically-accessible Card an return query results as well as `:card` information. Does not require auth credentials. Public sharing must be enabled. +Fetch a publicly-accessible Card an return query results as well as `:card` information. Does not require auth + credentials. Public sharing must be enabled. ##### PARAMS: @@ -1158,7 +1808,8 @@ Fetch a publically-accessible Card an return query results as well as `:card` in ## `GET /api/public/card/:uuid/query/:export-format` -Fetch a publically-accessible Card and return query results in the specified format. Does not require auth credentials. Public sharing must be enabled. +Fetch a publicly-accessible Card and return query results in the specified format. Does not require auth + credentials. Public sharing must be enabled. ##### PARAMS: @@ -1171,7 +1822,7 @@ Fetch a publically-accessible Card and return query results in the specified for ## `GET /api/public/dashboard/:uuid` -Fetch a publically-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. +Fetch a publicly-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. ##### PARAMS: @@ -1180,7 +1831,8 @@ Fetch a publically-accessible Dashboard. Does not require auth credentials. Publ ## `GET /api/public/dashboard/:uuid/card/:card-id` -Fetch the results for a Card in a publically-accessible Dashboard. Does not require auth credentials. Public sharing must be enabled. +Fetch the results for a Card in a publicly-accessible Dashboard. Does not require auth credentials. Public + sharing must be enabled. ##### PARAMS: @@ -1191,9 +1843,53 @@ Fetch the results for a Card in a publically-accessible Dashboard. Does not requ * **`parameters`** value may be nil, or if non-nil, value must be a valid JSON string. +## `GET /api/public/dashboard/:uuid/field/:field-id/remapping/:remapped-id` + +Fetch remapped Field values. This is the same as `GET /api/field/:id/remapping/:remapped-id`, but for use with public + Dashboards. + +##### PARAMS: + +* **`uuid`** + +* **`field-id`** + +* **`remapped-id`** + +* **`value`** value must be a non-blank string. + + +## `GET /api/public/dashboard/:uuid/field/:field-id/search/:search-field-id` + +Search for values of a Field that is referenced by a Card in a public Dashboard. + +##### PARAMS: + +* **`uuid`** + +* **`field-id`** + +* **`search-field-id`** + +* **`value`** value must be a non-blank string. + +* **`limit`** value may be nil, or if non-nil, value must be a valid integer greater than zero. + + +## `GET /api/public/dashboard/:uuid/field/:field-id/values` + +Fetch FieldValues for a Field that is referenced by a Card in a public Dashboard. + +##### PARAMS: + +* **`uuid`** + +* **`field-id`** + + ## `GET /api/public/oembed` -oEmbed endpoint used to retrieve embed code and metadata for a (public) Metabase URL. +oEmbed endpoint used to retreive embed code and metadata for a (public) Metabase URL. ##### PARAMS: @@ -1208,7 +1904,7 @@ oEmbed endpoint used to retrieve embed code and metadata for a (public) Metabase ## `DELETE /api/pulse/:id` -Delete a `Pulse`. +Delete a Pulse. (DEPRECATED -- don't delete a Pulse anymore -- archive it instead.) ##### PARAMS: @@ -1217,7 +1913,11 @@ Delete a `Pulse`. ## `GET /api/pulse/` -Fetch all `Pulses` +Fetch all Pulses + +##### PARAMS: + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). ## `GET /api/pulse/:id` @@ -1231,12 +1931,12 @@ Fetch `Pulse` with ID. ## `GET /api/pulse/form_input` -Provides relevant configuration information and user choices for creating/updating `Pulses`. +Provides relevant configuration information and user choices for creating/updating Pulses. ## `GET /api/pulse/preview_card/:id` -Get HTML rendering of a `Card` with ID. +Get HTML rendering of a Card with `id`. ##### PARAMS: @@ -1245,7 +1945,7 @@ Get HTML rendering of a `Card` with ID. ## `GET /api/pulse/preview_card_info/:id` -Get JSON object containing HTML rendering of a `Card` with ID and other information. +Get JSON object containing HTML rendering of a Card with `id` and other information. ##### PARAMS: @@ -1254,7 +1954,7 @@ Get JSON object containing HTML rendering of a `Card` with ID and other informat ## `GET /api/pulse/preview_card_png/:id` -Get PNG rendering of a `Card` with ID. +Get PNG rendering of a Card with `id`. ##### PARAMS: @@ -1269,11 +1969,15 @@ Create a new `Pulse`. * **`name`** value must be a non-blank string. -* **`cards`** value must be an array. Each value must be a map. The array cannot be empty. +* **`cards`** value must be an array. Each value must satisfy one of the following requirements: 1) value must be a map with the following keys `(collection_id, description, display, id, include_csv, include_xls, name)` 2) value must be a map with the keys `id`, `include_csv`, and `include_xls`. The array cannot be empty. * **`channels`** value must be an array. Each value must be a map. The array cannot be empty. -* **`skip_if_empty`** value must be a boolean. +* **`skip_if_empty`** value may be nil, or if non-nil, value must be a boolean. + +* **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. ## `POST /api/pulse/test` @@ -1284,28 +1988,38 @@ Test send an unsaved pulse. * **`name`** value must be a non-blank string. -* **`cards`** value must be an array. Each value must be a map. The array cannot be empty. +* **`cards`** value must be an array. Each value must satisfy one of the following requirements: 1) value must be a map with the following keys `(collection_id, description, display, id, include_csv, include_xls, name)` 2) value must be a map with the keys `id`, `include_csv`, and `include_xls`. The array cannot be empty. * **`channels`** value must be an array. Each value must be a map. The array cannot be empty. -* **`skip_if_empty`** value must be a boolean. +* **`skip_if_empty`** value may be nil, or if non-nil, value must be a boolean. + +* **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`collection_position`** value may be nil, or if non-nil, value must be an integer greater than zero. ## `PUT /api/pulse/:id` -Update a `Pulse` with ID. +Update a Pulse with `id`. ##### PARAMS: * **`id`** -* **`name`** value must be a non-blank string. +* **`name`** value may be nil, or if non-nil, value must be a non-blank string. -* **`cards`** value must be an array. Each value must be a map. The array cannot be empty. +* **`cards`** value may be nil, or if non-nil, value must be an array. Each value must satisfy one of the following requirements: 1) value must be a map with the following keys `(collection_id, description, display, id, include_csv, include_xls, name)` 2) value must be a map with the keys `id`, `include_csv`, and `include_xls`. The array cannot be empty. -* **`channels`** value must be an array. Each value must be a map. The array cannot be empty. +* **`channels`** value may be nil, or if non-nil, value must be an array. Each value must be a map. The array cannot be empty. + +* **`skip_if_empty`** value may be nil, or if non-nil, value must be a boolean. + +* **`collection_id`** value may be nil, or if non-nil, value must be an integer greater than zero. + +* **`archived`** value may be nil, or if non-nil, value must be a boolean. -* **`skip_if_empty`** value must be a boolean. +* **`pulse-updates`** ## `GET /api/revision/` @@ -1332,6 +2046,17 @@ Revert an object to a prior revision. * **`revision_id`** value must be an integer. +## `GET /api/search/` + +Search Cards, Dashboards, Collections and Pulses for the substring `q`. + +##### PARAMS: + +* **`q`** value may be nil, or if non-nil, value must be a non-blank string. + +* **`archived`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). + + ## `DELETE /api/segment/:id` Delete a `Segment`. @@ -1361,6 +2086,15 @@ You must be a superuser to do this. * **`id`** +## `GET /api/segment/:id/related` + +Return related entities. + +##### PARAMS: + +* **`id`** + + ## `GET /api/segment/:id/revisions` Fetch `Revisions` for `Segment` with ID. @@ -1448,7 +2182,7 @@ Login. ##### PARAMS: -* **`email`** value must be a valid email address. +* **`username`** value must be a non-blank string. * **`password`** value must be a non-blank string. @@ -1508,6 +2242,17 @@ You must be a superuser to do this. * **`key`** value must be a non-blank string. +## `PUT /api/setting/` + +Update multiple `Settings` values. You must be a superuser to do this. + +You must be a superuser to do this. + +##### PARAMS: + +* **`settings`** + + ## `PUT /api/setting/:key` Create/update a `Setting`. You must be a superuser to do this. @@ -1538,6 +2283,8 @@ Special endpoint for creating the first user during setup. * **`engine`** +* **`schedules`** value may be nil, or if non-nil, value must be a valid map of schedule maps for a DB. + * **`allow_tracking`** value may be nil, or if non-nil, value must satisfy one of the following requirements: 1) value must be a boolean. 2) value must be a valid boolean string ('true' or 'false'). * **`email`** value must be a valid email address. @@ -1556,6 +2303,8 @@ Special endpoint for creating the first user during setup. * **`details`** +* **`is_on_demand`** + * **`last_name`** value must be a non-blank string. @@ -1567,10 +2316,6 @@ Validate that we can connect to a database given a set of details. * **`engine`** value must be a valid database engine. -* **`host`** - -* **`port`** - * **`details`** * **`token`** Token does not match the setup token. @@ -1619,8 +2364,8 @@ Get all foreign keys whose destination is a `Field` that belongs to this `Table` Get metadata about a `Table` useful for running queries. Returns DB, fields, field FKs, and field values. - By passing `include_sensitive_fields=true`, information *about* sensitive `Fields` will be returned; in no case - will any of its corresponding values be returned. (This option is provided for use in the Admin Edit Metadata page). + By passing `include_sensitive_fields=true`, information *about* sensitive `Fields` will be returned; in no case will + any of its corresponding values be returned. (This option is provided for use in the Admin Edit Metadata page). ##### PARAMS: @@ -1629,6 +2374,54 @@ Get metadata about a `Table` useful for running queries. * **`include_sensitive_fields`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). +## `GET /api/table/:id/related` + +Return related entities. + +##### PARAMS: + +* **`id`** + + +## `GET /api/table/card__:id/fks` + +Return FK info for the 'virtual' table for a Card. This is always empty, so this endpoint + serves mainly as a placeholder to avoid having to change anything on the frontend. + + +## `GET /api/table/card__:id/query_metadata` + +Return metadata for the 'virtual' table for a Card. + +##### PARAMS: + +* **`id`** + + +## `POST /api/table/:id/discard_values` + +Discard the FieldValues belonging to the Fields in this Table. Only applies to fields that have FieldValues. If + this Table's Database is set up to automatically sync FieldValues, they will be recreated during the next cycle. + +You must be a superuser to do this. + +##### PARAMS: + +* **`id`** + + +## `POST /api/table/:id/rescan_values` + +Manually trigger an update for the FieldValues for the Fields belonging to this Table. Only applies to Fields that + are eligible for FieldValues. + +You must be a superuser to do this. + +##### PARAMS: + +* **`id`** + + ## `PUT /api/table/:id` Update `Table` with ID. @@ -1639,24 +2432,25 @@ Update `Table` with ID. * **`display_name`** value may be nil, or if non-nil, value must be a non-blank string. -* **`entity_type`** value may be nil, or if non-nil, value must be one of: `event`, `person`, `photo`, `place`. +* **`entity_type`** value may be nil, or if non-nil, value must be a valid entity type (keyword or string). * **`visibility_type`** value may be nil, or if non-nil, value must be one of: `cruft`, `hidden`, `technical`. -* **`description`** +* **`description`** value may be nil, or if non-nil, value must be a non-blank string. -* **`caveats`** +* **`caveats`** value may be nil, or if non-nil, value must be a non-blank string. -* **`points_of_interest`** +* **`points_of_interest`** value may be nil, or if non-nil, value must be a non-blank string. -* **`show_in_getting_started`** +* **`show_in_getting_started`** value may be nil, or if non-nil, value must be a boolean. ## `GET /api/tiles/:zoom/:x/:y/:lat-field-id/:lon-field-id/:lat-col-idx/:lon-col-idx/` -This endpoints provides an image with the appropriate pins rendered given a MBQL QUERY (passed as a GET query string param). - We evaluate the query and find the set of lat/lon pairs which are relevant and then render the appropriate ones. - It's expected that to render a full map view several calls will be made to this endpoint in parallel. +This endpoints provides an image with the appropriate pins rendered given a MBQL QUERY (passed as a GET query + string param). We evaluate the query and find the set of lat/lon pairs which are relevant and then render the + appropriate ones. It's expected that to render a full map view several calls will be made to this endpoint in + parallel. ##### PARAMS: @@ -1690,7 +2484,13 @@ You must be a superuser to do this. ## `GET /api/user/` -Fetch a list of all active `Users` for the admin People page. +Fetch a list of `Users` for the admin People page or for Pulses. By default returns only active users. If + `include_deactivated` is true, return all Users (active and inactive). (Using `include_deactivated` requires + superuser permissions.) + +##### PARAMS: + +* **`include_deactivated`** value may be nil, or if non-nil, value must be a valid boolean string ('true' or 'false'). ## `GET /api/user/:id` @@ -1709,7 +2509,7 @@ Fetch the current `User`. ## `POST /api/user/` -Create a new `User`, or or reactivate an existing one. +Create a new `User`, return a 400 if the email address is already taken You must be a superuser to do this. @@ -1723,6 +2523,8 @@ You must be a superuser to do this. * **`password`** +* **`login_attributes`** value may be nil, or if non-nil, value must be a map with each value either a string or number. + ## `POST /api/user/:id/send_invite` @@ -1737,13 +2539,13 @@ You must be a superuser to do this. ## `PUT /api/user/:id` -Update a `User`. +Update an existing, active `User`. ##### PARAMS: * **`id`** -* **`email`** value must be a valid email address. +* **`email`** value may be nil, or if non-nil, value must be a valid email address. * **`first_name`** value may be nil, or if non-nil, value must be a non-blank string. @@ -1751,6 +2553,8 @@ Update a `User`. * **`is_superuser`** +* **`login_attributes`** value may be nil, or if non-nil, value must be a map with each value either a string or number. + ## `PUT /api/user/:id/password` @@ -1774,6 +2578,17 @@ Indicate that a user has been informed about the vast intricacies of 'the' Query * **`id`** +## `PUT /api/user/:id/reactivate` + +Reactivate user at `:id` + +You must be a superuser to do this. + +##### PARAMS: + +* **`id`** + + ## `GET /api/util/logs` Logs. @@ -1783,7 +2598,7 @@ You must be a superuser to do this. ## `GET /api/util/random_token` -Return a cryptographically secure random 32-byte token, encoded as a hexadecimal string. +Return a cryptographically secure random 32-byte token, encoded as a hexidecimal string. Intended for use when creating a value for `embedding-secret-key`. @@ -1801,4 +2616,4 @@ Endpoint that checks if the supplied password meets the currently configured pas ##### PARAMS: -* **`password`** Insufficient password strength +* **`password`** Insufficient password strength \ No newline at end of file diff --git a/docs/contributing.md b/docs/contributing.md index fc7f8ea40009..632bb4ecea3f 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -6,7 +6,7 @@ In this guide, we'll discuss how Metabase is built. This should give you a good ## What we're trying to build -Metabase is all about letting non-technical users get access to the their organization's data. We're trying to maximize the amount of power that can be comfortably used by someone who understands their business, is quantitatively bent, but probably only comfortable with Excel. +Metabase is all about letting non-technical users get access to the their organization's data. We're trying to maximize the amount of power that can be comfortably used by someone who understands their business, is quantitatively bent, but probably only comfortable with Excel. It's important to keep in mind these goals of the Metabase project. Many times proposals will be marked "Out of Scope" or otherwise deprioritized. This doesn't mean the proposal isn't useful, or that we wouldn't be interested in seeing it done as a side project or as an experimental branch. However, it does mean that we won't point the core team or contributors to it in the near term. Issues that are slightly out of scope will be kept open in case there is community support (and ideally contributions). @@ -15,7 +15,7 @@ To get a sense for the end goals, make sure to read the [Zen of Metabase](../zen ## Our product process: -The core team runs a pretty well defined product process. It is actively being tweaked, but the below is a pretty faithful description of it at the time of writing. You should have a clear idea of how we work before jumping in with a PR. +The core team runs a pretty well defined product process. It is actively being tweaked, but the below is a pretty faithful description of it at the time of writing. You should have a clear idea of how we work before jumping in with a PR. ### A) Identify product needs from the community @@ -29,22 +29,25 @@ We typically will collect a group of issues or suggestions into a new topline fe Once a feature has been defined, typically it will be taken on by a product designer. Here, they will produce low fi mocks, get feedback from our users and community, and iterate. -Once the main UX flows have been dialed in, there will be a hi-fidelity visual design. +Once the main UX flows have been dialed in, there will be a hi-fidelity visual design. Features that are ready for design are are tagged [Design Needed](https://github.com/metabase/metabase/labels/Design%2FNeeded). Once a feature has had a reasonably complete visual design it should be tagged [Help Wanted](https://github.com/metabase/metabase/labels/Help%20Wanted). ### D) Build the feature Once a feature is tagged [Help Wanted](https://github.com/metabase/metabase/labels/Help%20Wanted), it is considered ready to be built. A core team member (or you, awesomely helpful person that you are) can start working on it. -Once one or more people have started to work on a feature, it should be marked [In Progress](https://github.com/metabase/metabase/labels/In%20Progress). Once there is a branch+some code, a pull request is opened, linked to the feature + any issues that were pulled together to inform the feature. + +If you're building something that users will see in Metabase, please refer to the [Style Guide](https://localhost:3000/_internal) while running the development environment to learn how and when to use various Metabase UI elements. + +Once one or more people have started to work on a feature, it should be marked [In Progress](https://github.com/metabase/metabase/labels/In%20Progress). Once there is a branch+some code, a pull request is opened, linked to the feature + any issues that were pulled together to inform the feature. ### E) Verification and merging All PRs that involve more than an insignificant change should be reviewed. See our [Code Review Process](code-reviews.md). - -If all goes well, the feature gets coded up, verified and then the pull request gets merged! High-fives all around. -If there are tests missing, code style concerns or specific architectural issues in the pull request, they should be fixed before merging. We have a very high bar on both code and product quality and it's important that this be maintained going forward, so please be patient with us here. +If all goes well, the feature gets coded up, verified and then the pull request gets merged! High-fives all around. + +If there are tests missing, code style concerns or specific architectural issues in the pull request, they should be fixed before merging. We have a very high bar on both code and product quality and it's important that this be maintained going forward, so please be patient with us here. ## Ways to help: @@ -72,14 +75,14 @@ By our definition, "Bugs" are situations where the program doesn't do what it wa There are a lot of docs. We often have difficulties keeping them up to date. If you are reading them and you notice inconsistencies, errors or outdated information, please help up keep them current! -### Working on features +### Working on features -Some features, eg Database drivers, don't have any user facing pixels. These are a great place to start off contributing as they don't require as much communication, discussions about tradeoffs and process in general. +Some features, eg Database drivers, don't have any user facing pixels. These are a great place to start off contributing as they don't require as much communication, discussions about tradeoffs and process in general. -In situations where a design has already been done, we can always use some help. Chime in on a pull request or an issue and offer to help. +In situations where a design has already been done, we can always use some help. Chime in on a pull request or an issue and offer to help. Generally speaking, any issue in [Help Wanted](https://github.com/metabase/metabase/labels/Help%20Wanted) is fair game. ### #YOLO JUST SUBMIT A PR -If you come up with something really cool, and want to share it with us, just submit a PR. If it hasn't gone through the above process, we probably won't merge it as is, but if it's compelling, we're more than willing to help you via code review, design review and generally OCD nitpicking so that it fits into the rest of our codebase. +If you come up with something really cool, and want to share it with us, just submit a PR. If it hasn't gone through the above process, we probably won't merge it as is, but if it's compelling, we're more than willing to help you via code review, design review and generally OCD nitpicking so that it fits into the rest of our codebase. diff --git a/docs/developers-guide-athena.md b/docs/developers-guide-athena.md new file mode 100644 index 000000000000..50e637be8133 --- /dev/null +++ b/docs/developers-guide-athena.md @@ -0,0 +1,316 @@ +# AWS Athena Tests + +NOTE: These instructions are only for running tests with AWS Athena Driver, +for general developers instructions see our [developers' guide](developers-guide.md). + +##### I'm using `metabase-athena/test-db` but you can choose your preferred s3 target + +## Generation Data Files + +To generate data files for AWS Athena tables execute: +```clojure +(metabase.test.data.athena/export-data) +``` + +## Upload generated data to S3 + +If you have AWS Cli you can sync your output folder with + +``` +aws s3 sync /tmp/metabase/test/ s3://metabase-athena/test-db/ +``` + +## Create the Test DB + + * Go to [AWS Glue Console](https://console.aws.amazon.com/glue/home) and create a test db; + * Go to [AWS Athena Console](https://console.aws.amazon.com/athena/home), +choose your db and create the tables. + +#### Why do not run crawler + +Some fields are identified as `string` and should be `date` or `timestamp`. +You can execute crawler and change it manually. + +##### Categories + +``` +CREATE EXTERNAL TABLE `categories`( + `name` string COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='id,name') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/categories/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='28', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='75', + 'sizeKey'='2118', + 'typeOfData'='file') +``` + +##### Checkins + +``` +CREATE EXTERNAL TABLE `checkins`( + `user_id` int COMMENT 'from deserializer', + `venue_id` int COMMENT 'from deserializer', + `date` timestamp COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='date,id,user_id,venue_id') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/checkins/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='66', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='1003', + 'sizeKey'='66203', + 'typeOfData'='file') +``` + +##### Cities + +``` +CREATE EXTERNAL TABLE `cities`( + `name` string COMMENT 'from deserializer', + `latitude` double COMMENT 'from deserializer', + `longitude` double COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='id,latitude,longitude,name') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/cities/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='87', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='151', + 'sizeKey'='13179', + 'typeOfData'='file') +``` + +##### Incidents + +``` +CREATE EXTERNAL TABLE `incidents`( + `severity` int COMMENT 'from deserializer', + `timestamp` bigint COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='id,severity,timestamp') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/incidents/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='49', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='201', + 'sizeKey'='9892', + 'typeOfData'='file') +``` + +##### Places + +``` +CREATE EXTERNAL TABLE `places`( + `name` string COMMENT 'from deserializer', + `liked` boolean COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='id,liked,name') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/places/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='40', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='3', + 'sizeKey'='121', + 'typeOfData'='file') +``` + +##### Sightings + +``` +CREATE EXTERNAL TABLE `sightings`( + `city_id` int COMMENT 'from deserializer', + `category_id` int COMMENT 'from deserializer', + `timestamp` int COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='category_id,city_id,id,timestamp') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/sightings/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='63', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='1004', + 'sizeKey'='63264', + 'typeOfData'='file') +``` + +##### Tips + +``` +CREATE EXTERNAL TABLE `tips`( + `text` string COMMENT 'from deserializer', + `url` struct COMMENT 'from deserializer', + `venue` struct,phone:string,id:string> COMMENT 'from deserializer', + `source` struct,tags:array,yelpphotoid:string,categories:array,foursquarephotoid:string,mayor:string> COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='id,source,text,url,venue') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/tips/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='637', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='500', + 'sizeKey'='318670', + 'typeOfData'='file') +``` + +##### Users + +``` +CREATE EXTERNAL TABLE `users`( + `name` string COMMENT 'from deserializer', + `last_login` timestamp COMMENT 'from deserializer', + `password` string COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='id,last_login,name,password') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/users/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='117', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='15', + 'sizeKey'='1768', + 'typeOfData'='file') +``` + +##### Venues + +``` +CREATE EXTERNAL TABLE `venues`( + `name` string COMMENT 'from deserializer', + `latitude` double COMMENT 'from deserializer', + `longitude` double COMMENT 'from deserializer', + `price` int COMMENT 'from deserializer', + `category_id` int COMMENT 'from deserializer', + `id` int COMMENT 'from deserializer') +ROW FORMAT SERDE + 'org.openx.data.jsonserde.JsonSerDe' +WITH SERDEPROPERTIES ( + 'paths'='category_id,id,latitude,longitude,name,price') +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION + 's3://metabase-athena/test-db/venues/' +TBLPROPERTIES ( + 'CrawlerSchemaDeserializerVersion'='1.0', + 'CrawlerSchemaSerializerVersion'='1.0', + 'UPDATED_BY_CRAWLER'='metabase', + 'averageRecordSize'='102', + 'classification'='json', + 'compressionType'='none', + 'objectCount'='1', + 'recordCount'='100', + 'sizeKey'='10206', + 'typeOfData'='file') +``` + +## Run it! + +``` +MB_ATHENA_S3_STAGING_DIR=[bucket to save queries] MB_ATHENA_SCHEMA=[your test db] MB_ATHENA_REGION=[same region of athena db and staging dir] MB_ATHENA_USER=[AWS KEY] MB_ATHENA_PASSWORD=[AWS SECRET] ENGINES=h2,athena lein test # we need to add h2 too +``` diff --git a/docs/developers-guide.md b/docs/developers-guide.md index 75d60d761c01..c42a3bda79ec 100644 --- a/docs/developers-guide.md +++ b/docs/developers-guide.md @@ -4,6 +4,7 @@ * [How to set up a development environment](#development-environment) * [How to run the Metabase Server](#development-server-quick-start) * [How to contribute back to the Metabase project](#contributing) +* [How to add support in Metabase for other languages](#internationalization) # Contributing @@ -14,6 +15,11 @@ For significant feature additions, it is expected that discussion will have take We don't like getting sued, so before merging any pull request, we'll need each person contributing code to sign a Contributor License Agreement [here](https://docs.google.com/a/metabase.com/forms/d/1oV38o7b9ONFSwuzwmERRMi9SYrhYeOrkbmNaq9pOJ_E/viewform) +# Development on Windows + +The development scripts are designed for Linux/Mac environment, so we recommend using the latest Windows 10 version with [WSL (Windows Subsystem for Linux)](https://msdn.microsoft.com/en-us/commandline/wsl/about) and [Ubuntu on Windows](https://www.microsoft.com/store/p/ubuntu/9nblggh4msv6). The Ubuntu Bash shell works well for both backend and frontend development. + +If you have problems with your development environment, make sure that you are not using any development commands outside the Bash shell. As an example, Node dependencies installed in normal Windows environment will not work inside Ubuntu Bash environment. # Install Prerequisites @@ -24,6 +30,7 @@ These are the set of tools which are required in order to complete any build of 3. [Yarn package manager for Node.js](https://yarnpkg.com/) 3. [Leiningen (http://leiningen.org/)](http://leiningen.org/) +If you are developing on Windows, make sure to use Ubuntu on Windows and follow instructions for Ubuntu/Linux instead of installing ordinary Windows versions. # Build Metabase @@ -70,13 +77,6 @@ Start the frontend build process with yarn run build-hot -Caveat - Yarn does not properly support `build-hot` on Windows 8/10. You will need to manually build the frontend client with - - yarn run build - -This will get you a full development server running on port :3000 by default. - - ## Frontend development We use these technologies for our FE build process to allow us to use modules, es6 syntax, and css variables. @@ -111,10 +111,10 @@ $ yarn run build-watch All frontend tests are located in `frontend/test` directory. Run all frontend tests with ``` -./bin/build version uberjar && yarn run test +yarn run test ``` -which will first build the backend JAR and then run integration, unit and Karma browser tests in sequence. +which will first build the backend JAR and then run integration, unit and Karma browser tests in sequence. ### Jest integration tests Integration tests simulate realistic sequences of user interactions. They render a complete DOM tree using [Enzyme](http://airbnb.io/enzyme/docs/api/index.html) and use temporary backend instances for executing API calls. @@ -123,16 +123,16 @@ Integration tests use an enforced file naming convention `.inte Useful commands: ```bash -./bin/build version uberjar # Builds the JAR without frontend assets; run this every time you need to update the backend +lein run refresh-integration-test-db-metadata # Scan the sample dataset and re-run sync/classification/field values caching yarn run test-integrated-watch # Watches for file changes and runs the tests that have changed -yarn run test-integrated-watch -- TestFileName # Watches the files in paths that match the given (regex) string +yarn run test-integrated-watch TestFileName # Watches the files in paths that match the given (regex) string ``` The way integration tests are written is a little unconventional so here is an example that hopefully helps in getting up to speed: ``` import { - login, + useSharedAdminLogin, createTestStore, } from "__support__/integrated_tests"; import { @@ -149,12 +149,11 @@ describe("Query builder", () => { beforeAll(async () => { // Usually you want to test stuff where user is already logged in // so it is convenient to login before any test case. - // Remember `await` here! - await login() + useSharedAdminLogin() }) it("should let you run a new query", async () => { - // Create a superpowered Redux store. + // Create a superpowered Redux store. // Remember `await` here! const store = await createTestStore() @@ -194,17 +193,17 @@ You can also skim through [`__support__/integrated_tests.js`](https://github.com ### Jest unit tests -Unit tests are focused around isolated parts of business logic. +Unit tests are focused around isolated parts of business logic. Unit tests use an enforced file naming convention `.unit.js` to separate them from integration tests. ``` -yarn run jest-test # Run all tests at once -yarn run jest-test-watch # Watch for file changes +yarn run test-unit # Run all tests at once +yarn run test-unit-watch # Watch for file changes ``` ### Karma browser tests -If you need to test code which uses browser APIs that are only available in real browsers, you can add a Karma test to `frontend/test/legacy-karma` directory. +If you need to test code which uses browser APIs that are only available in real browsers, you can add a Karma test to `frontend/test/legacy-karma` directory. ``` yarn run test-karma # Run all tests once @@ -265,6 +264,38 @@ Start up an instant cheatsheet for the project + dependencies by running lein instant-cheatsheet +## Internationalization +We are an application with lots of users all over the world. To help them use Metabase in their own language, we mark all of our strings as i18n. +### The general workflow for developers is: + +1. Tag strings in the frontend using `t` and `jt` ES6 template literals (see more details in https://c-3po.js.org/): + +```javascript +const someString = t`Hello ${name}!`; +const someJSX =
{ jt`Hello ${name}` }
+``` + +and in the backend using `trs` and related macros (see more details in https://github.com/puppetlabs/clj-i18n): + +```clojure +(trs "Hello {0}!" name) +``` + +2. When you have added/edited tagged strings in the code, run `./bin/i18n/update-translations` to update the base `locales/metabase.pot` template and each existing `locales/LOCALE.po` + +### The workflow for translators in starting a new translation, or editing an existing one: + +1. You should run `./bin/i18n/update-translations` first to ensure the latest strings have been extracted. +2. If you're starting a new translation or didn't run update-translations then run `./bin/i18n/update-translation LOCALE` +3. Edit ./locales/LOCALE.po +4. `Run ./bin/i18n/build-translation-resources` +5. Restart or rebuild Metabase, Test, repeat 2 and 3 +6. Commit changes to ./locales/LOCALE.po and ./resources/frontend_client/app/locales/LOCALE.json + + +To try it out, change your browser's language (e.x. chrome://settings/?search=language) to one of the locales to see it working. Run metabase with the `JAVA_TOOL_OPTIONS=-Duser.language=LOCALE` environment variable set to set the locale on the backend, e.x. for pulses and emails (eventually we'll also add a setting in the app) + + ## License Copyright © 2017 Metabase, Inc diff --git a/docs/faq.md b/docs/faq.md index 78f5e2aa1a9c..6eaca0ddaa32 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -48,7 +48,7 @@ However, in [Metabase version 0.25 we introduced nested queries](http://www.meta By default, Metabase attempts to make field names more readable by changing things like `somehorriblename` to `Some Horrible Name`. This does not work well for languages other than English, or for fields that have lots of abbreviations or codes in them. If you'd like to turn this setting off, you can do so from the Admin Panel under Settings > General > Friendly Table and Field Names. -Note that even with this setting turned off, Metabase will replace underscores with spaces. To manually fix field or table names if they still look wrong, you can go to the Metadata section of the Admin Panel, select the database that contains the table or field you want to edit, select the table, and then edit the name(s) in the input boxes that appear. +To manually fix field or table names if they still look wrong, you can go to the Metadata section of the Admin Panel, select the database that contains the table or field you want to edit, select the table, and then edit the name(s) in the input boxes that appear. ## Dashboards diff --git a/docs/information-collection.md b/docs/information-collection.md index 9a26b9ada610..5856fcfdedfb 100644 --- a/docs/information-collection.md +++ b/docs/information-collection.md @@ -2,9 +2,13 @@ Metabase uses Google Analytics to collect anonymous usage information from the installed servers that enable this feature. Below are the events we have instrumented, as well as the information we collect about the user performing the action and the instance being used. +We also will phone home some anonymized metrics from the metabase application server on a nightly basis. Note that we don't collect any usernames, any emails, the server IP, database details of any kind or any personally identifiable information in this process. + While this list of anonymous information we collect might seem long, it’s useful to compare this to other alternatives. With a typical SaaS platform, not only will this information be collected, but it will also be accompanied by information about your data, how often it is accessed, the specific queries that you use, specific numbers of records all tied to your company and current plan. -We collect this information to improve your experience and the quality of Metabase, and in the list below, we spell out exactly why we collect each bit of information. +We collect this information to improve your experience and the quality of Metabase. We use the information you voluntarily share to understand how our users are actually using our product, what kinds of features to prioritize, and how many items to target in our design process. For example, without knowing what the distribution of the number of accounts on each instance in our install base is, we can't know whether the tools we provide can't scale until someone complains. And even then, we only hear complaints and not the people who are quietly happily using us. We're striving to create the best product possible. + +In the list below, we spell out exactly why we collect each bit of information. If you prefer not to provide us with this anonymous usage data, please go to your instance’s admin section and toggle off the option for `Anonymous Tracking`. @@ -39,7 +43,7 @@ While we will closely follow reported issues and feature requests, we aim to mak NOTE: we never capture any specific details in any of our tracking methodology such as user details, table names, field names, etc. collected data is limited to the types of actions users are taking with the product. -### Events +### Google Analytics Events | Category | Action | Why we collect this | |---------|--------|--------------------| @@ -51,3 +55,32 @@ NOTE: we never capture any specific details in any of our tracking methodology s | Admin Settings | We capture some very basic stats about when settings are updated and if there are ever errors. We also capture non-intrusive settings such as the chosen timezone. | We use this information to make sure that users aren't having problems managing their Metabase instance and it provides us some sense for the most common configuration choices so we can optimize for those cases. | | Databases | We simply capture when databases are created or removed and what types of databases are being used | This helps Metabase ensure that we spend the most time and attention on the types of databases that are most popular to users. | | Data Model | The saving and updates on tables, fields, segments, and metrics are all counted, along with a few other details such as what types of special metadata choices are made. | We use this data to help ensure that Metabase provides an appropriate set of options for users to describe their data and also gives us a sense for how much time users spend marking up their schemas. | + + +### Server-side Analytics +| Metric | An example of why we collect this | +|---------|--------| +| Number of Users/Admins and whether SSO is enabled | To understand which auth methods are being used, and whether to prioritize features that scale with # of users. | +| Number of user groups | Action | To understand how complicated a permissions model most of our users have, and to make sure that we don't over-simplify our designs. | +| Number of Dashboards | Action | Whether we need to provide ways to organize dashboards. | +| Number of Cards per Dashboards | Action | Do we need to provide more structure to make long dashboards easier to parse?| +| Number of Dashboards per Card | Action | Are our users only creating a card to put it in a dashboard or are they used in many places? | +| Types of Databases | Action | Which database driver bugs to prioritize | +| Number of pulses with attachments | Action | Are people using attachments? | +| Number of alerts | Action | Are people using alerts? Do they typically have a few or does each user have them?| +| Number of Collections | Action | Do we need to add additional organization tools? | +| Number of Databases | Action | Are users using a single DB or many? How large should the icons for a database in the databrowser be? | +| Number of Schema | Action | Are users actively using namespaces on redshift? Do we actually need to design for 100s of schemas or is that just a small percentage of our users? | +| Number of Tables | Action | What kind of data models are people using? Do we need table search?| +| Number of Fields | Action | Can we pre-fetch all the fields in our metadata api to improve performance for most usres, or should we fetch them per table to scale more efficiently? | +| Number of Segments | Action | Are people using segments widely? If so, should we surface them up higher in the UI? | +| Number of Metrics | Action | Are metrics common? If not, should we remove the "Metrics" option in the New Question Flow| +| Number of Queries Run | Action | How many queries do our most active instances run per day? Do we need to improve caching? | +| Number of Query Errors | Action | Do we need to change how we display errors in the logs? Are they being spammed? | +| Query Latencies | Action | What percentage of our user base runs queries that allow for iterative querying (<1second) | +| Timezone | Action | We have a bug in a certain timezone, how many users are in that timezone? | +| Language | Action | How many non-english speaking users do we have? How fast should we be pushing internationalization?| +| OS + JVM Version | Action | Can we deprecate Java 7 already? | + +Note this is meant to be representative, an exact (as in the actual code that is running to generate this can be audited at https://github.com/metabase/metabase/blob/master/src/metabase/util/stats.clj) + diff --git a/docs/operations-guide/enable-jmx.md b/docs/operations-guide/enable-jmx.md new file mode 100644 index 000000000000..3556912cd7a1 --- /dev/null +++ b/docs/operations-guide/enable-jmx.md @@ -0,0 +1,114 @@ + +## Monitoring Your Metabase Instance + +This guide assumes that you have the VisualVM tool installed +locally. VisualVM is included with OpenJDK and the Oracle JDK and is +found in the `bin` directory of the JDK install. Some Linux +distributions separate VisualVM from the JDK, in which case it's a +separate `visualvm` package. + +### Connecting to a Local Metabase Instance + +If you have VisualVM installed on your Metabase server and are able to +run VisualVM there, this is the easiest path as there is no need to +setup remote communication with your metabase instance. In this +scenario, start Metabase like you would normally and separately start +VisualVM. Metabase will be listed under are running your Metabase +instance + +![localprocess](images/LocalProcessVisualVM.png) + +### Connecting to a Remote Metabase Instance + +Monitoring a remote Metabase instance (or a local instance running in +a docker container) is probably more common, but requires a more +setup. First we need to specify some system properties that lets the +JVM know that we want to allow remote monitoring. Assuming we are +running Metabase using `java -jar metabase.jar`, we'd need change the +invocation to the below command, which includes the properties: + +``` +java --add-to-start=jmx,jmx-remote \ + -Dcom.sun.management.jmxremote \ + -Dcom.sun.management.jmxremote.port=1099 \ + -Dcom.sun.management.jmxremote.rmi.port=1099 \ + -Dcom.sun.management.jmxremote.authenticate=false \ + -Dcom.sun.management.jmxremote.ssl=false \ + -Dcom.sun.management.jmxremote.local.only=false \ + -Djava.rmi.server.hostname= \ + -jar metabase.jar +``` + +Port `1099` is a typical RMI/JMX port, but it can be any accessible port. + +*Note:* The above command opens up your application to monitoring by +anyone and should only be used when on a trusted network for a short +period of time. Securing this connection is possible, see [the Oracle +docs](https://docs.oracle.com/javase/8/docs/technotes/guides/management/agent.html) +for more information. + +Users running Metabase in a `docker` container will need to specify +the system properties detailed above and will also need to ensure the +port is open. Docker allows specifying environment variables via a +separate file that can be passed into the `docker run` invocation. You +can create a file called `metabase-vars.env` with `JAVA_OPTS` +specified: + +``` +JAVA_OPTS=-Dcom.sun.management.jmxremote.port=1099 -Dcom.sun.management.jmxremote.rmi.port=1099 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.local.only=false -Djava.rmi.server.hostname= +``` + +*Note:* This file expects to have each environment variable on it's own line with no line breaks + +``` +docker run --env-file=metabase-vars.env -d -p 3000:3000 -p 1099:1099 -h --name metabase metabase/metabase +``` + +The addition `-p 1099:1099` opens the JMX port for monitoring and the +`--env-file=metabase-vars.env` passes in the extra JMX related +environment variables. With the instance started, VisualVM needs to +know how to connect to the running instance. First add a new remote +instance: + +![addremotehost](images/AddRemoteHost.png) + +Then use the hostname you specified above: + +![sethostname](images/SetRemoteHostName.png) + +*Note:* Your local machine needs to be able to refer to the hostname +you specified which might require a hosts entry + +![addjmxhost](images/ClickAddJMXHost.png) + +The port specified for the JMX host needs to match the system property +and the exposed port (if using Docker): + +![jmxport](images/EnterJMXPort.png) + +Next open the new remote JMX process: + +![jmxinstance](images/OpenRemoteInstance.png) + +### Runtime Information + +Connecting to a running Metabase instance with VisualVM will make lots +of runtime information available. This guide won't go over all of +possibilities of the tool, but will highlight a few important pieces. + +When running into memory related issues, typically the first question +we want to know is what is consuming extra memory? A heap dump will +take a snapshot of everything in memory at that specific point of +time. That memory snapshot can be analyzed later by tools like the +[Eclipse Memory Analyzer Tool](https://www.eclipse.org/mat/). Create a +heap dump from the "Monitor" tab: + +![heapdump](images/HeapDump.png) + +Another useful picture of a running Metabase system is a Thread +Dump. In cases when Metabase appears hung or extremely slow, a thread +dump will indicate what each thread is executing (or blocked on) for +that specific point in time. Collect a thread dump via the "Threads" +tab: + +![threaddump](images/ThreadDump.png) diff --git a/docs/operations-guide/images/AddRemoteHost.png b/docs/operations-guide/images/AddRemoteHost.png new file mode 100644 index 000000000000..c13ce5304a61 Binary files /dev/null and b/docs/operations-guide/images/AddRemoteHost.png differ diff --git a/docs/operations-guide/images/ClickAddJMXHost.png b/docs/operations-guide/images/ClickAddJMXHost.png new file mode 100644 index 000000000000..56420b406005 Binary files /dev/null and b/docs/operations-guide/images/ClickAddJMXHost.png differ diff --git a/docs/operations-guide/images/EnterJMXPort.png b/docs/operations-guide/images/EnterJMXPort.png new file mode 100644 index 000000000000..5ec7c464ff74 Binary files /dev/null and b/docs/operations-guide/images/EnterJMXPort.png differ diff --git a/docs/operations-guide/images/HeapDump.png b/docs/operations-guide/images/HeapDump.png new file mode 100644 index 000000000000..b7c1a30e6bfd Binary files /dev/null and b/docs/operations-guide/images/HeapDump.png differ diff --git a/docs/operations-guide/images/LocalProcessVisualVM.png b/docs/operations-guide/images/LocalProcessVisualVM.png new file mode 100644 index 000000000000..ad311dcae865 Binary files /dev/null and b/docs/operations-guide/images/LocalProcessVisualVM.png differ diff --git a/docs/operations-guide/images/OpenRemoteInstance.png b/docs/operations-guide/images/OpenRemoteInstance.png new file mode 100644 index 000000000000..20618794b426 Binary files /dev/null and b/docs/operations-guide/images/OpenRemoteInstance.png differ diff --git a/docs/operations-guide/images/SetRemoteHostName.png b/docs/operations-guide/images/SetRemoteHostName.png new file mode 100644 index 000000000000..ade0429abd69 Binary files /dev/null and b/docs/operations-guide/images/SetRemoteHostName.png differ diff --git a/docs/operations-guide/images/ThreadDump.png b/docs/operations-guide/images/ThreadDump.png new file mode 100644 index 000000000000..e56b2417f2fb Binary files /dev/null and b/docs/operations-guide/images/ThreadDump.png differ diff --git a/docs/operations-guide/running-metabase-on-debian.md b/docs/operations-guide/running-metabase-on-debian.md index 2fa2a4ac9611..4178d8b451a5 100644 --- a/docs/operations-guide/running-metabase-on-debian.md +++ b/docs/operations-guide/running-metabase-on-debian.md @@ -104,7 +104,7 @@ In `/etc/init.d/metabase`, replace configurable items (they look like ` export MB_DB_PASS= export MB_DB_HOST=localhost -java -jar metabase.jar load-from-h2 +java -jar metabase.jar load-from-h2 /path/to/metabase.db # do not include .mv.db or .h2.db suffix ``` -It is expected that you will run the command against a brand new (empty!) database and Metabase will handle all of the work of creating the database schema and migrating the data for you. +It is expected that you will run the command against a brand-new (empty!) database; Metabase will handle all of the work of creating the database schema and migrating the data for you. ###### Notes -* It is required that wherever you are running this migration command can connect to the target MySQL or Postgres database. So if you are attempting to move the data to a cloud database make sure you take that into consideration. +* It is required that you can connect to the target MySQL or Postgres database in whatever environment you are running this migration command in. So, if you are attempting to move the data to a cloud database, make sure you take that into consideration. * The code that handles these migrations uses a Postgres SQL command that is only available in Postgres 9.4 or newer versions. Please make sure you Postgres database is version 9.4 or newer. +* H2 automatically adds a `.h2.db` or `.mv.db` extension to the database path you specify, so make sure the path to the DB file you pass to the command *does not* include it. For example, if you have a file named `/path/to/metabase.db.h2.db`, call the command with `load-from-h2 /path/to/metabase.db`. # Running Metabase database migrations manually -When Metabase is starting up it will typically attempt to determine if any changes are required to the application database and it will execute those changes automatically. If for some reason you wanted to see what these changes are and run them manually on your database then we let you do that. +When Metabase is starting up, it will typically attempt to determine if any changes are required to the application database, and, if so, will execute those changes automatically. If for some reason you wanted to see what these changes are and run them manually on your database then we let you do that. Simply set the following environment variable before launching Metabase: @@ -322,3 +328,40 @@ Metabase to use your own logging configuration file by passing a `-Dlog4j.config java -Dlog4j.configuration=file:/path/to/custom/log4j.properties -jar metabase.jar The easiest way to get started customizing logging would be to use a copy of default `log4j.properties` file linked to above and adjust that to meet your needs. Keep in mind that you'll need to restart Metabase for changes to the file to take effect. + +# [Monitoring via JMX](enable-jmx.md) + +Diagnosing performance related issues can be a challenge. Luckily the JVM ships with tools that can help diagnose many common issues. Enabling JMX and using a tool like VisualVM can help diagnose issues related to running out of memory, a hung Metabase instance and slow response times. See [Monitoring via JMX](enable-jmx.md) for more information on setting this up. + +# Java Versions + +Metabase will run on Java version 8 or greater; Java 8 is the easiest and most common choice. + +## Running on Java 8 + +Running on Java 8 is the easiest path to running Metabase. There are no additional parameters required, if launching from a Jar the below invocation will work: + + java -jar metabase.jar + +## Running on Java 9 or Newer + +To use Metabase on Java 9 with Oracle, Vertica, SparkSQL, or other drivers that require external dependencies, +you'll need to tweak the way you launch Metabase. + +Java version 9 has introduced a new module system that places some additional restrictions on class loading. To use +Metabase drivers that require extra external dependencies, you'll need to include them as part of the classpath at +launch time. Run Metabase as follows: + +```bash +# Unix +java -cp metabase.jar:plugins/* metabase.core +``` + +On Windows, use a semicolon instead: + +```powershell +# Windows +java -cp metabase.jar;plugins/* metabase.core +``` + +The default Docker images use Java 8 so this step is only needed when running the JAR directly. diff --git a/docs/setting-up-metabase.md b/docs/setting-up-metabase.md index 8ca5ea5d1798..ac6f97c072f8 100644 --- a/docs/setting-up-metabase.md +++ b/docs/setting-up-metabase.md @@ -25,7 +25,7 @@ If you’re ready to connect, here’s what you’ll need: * The **username** you use for the database * The **password** you use for the database -If you’re using Heroku, here are [instructions on how to get this information](./administration-guide/01-managing-databases.md#heroku-databases). If you’re an Amazon RDS kind of person, you can follow [these instructions](./administration-guide/01-managing-databases.md#rds-databases). +If you’re using Heroku, here are [instructions on how to get this information](./administration-guide/01-managing-databases.html#heroku-databases). If you’re an Amazon RDS kind of person, you can follow [these instructions](./administration-guide/01-managing-databases.html#rds-databases). If you don't have this information handy, the person responsible for administering the database should have it. diff --git a/docs/troubleshooting-guide/email.md b/docs/troubleshooting-guide/email.md index 4c798c64162f..aeac1772bbef 100644 --- a/docs/troubleshooting-guide/email.md +++ b/docs/troubleshooting-guide/email.md @@ -19,6 +19,7 @@ 4. For user accounts specifically, did you previously create an account under this email and then delete it? This occasionally results in that email address being "claimed". +5. Make sure that the HOSTNAME is being set correctly. EC2 instances in particular have those set to the local ip, and some email delivery services such as GMail will error out in this situation. ## Specific Problems: @@ -27,4 +28,4 @@ ### Metabase can't send email via Office365 We see users report issues with sending email via Office365. We recommend using a different email delivery service if you can. -https://github.com/metabase/metabase/issues/4272 \ No newline at end of file +https://github.com/metabase/metabase/issues/4272 diff --git a/docs/troubleshooting-guide/running.md b/docs/troubleshooting-guide/running.md index ea1949c58b4a..911647793416 100644 --- a/docs/troubleshooting-guide/running.md +++ b/docs/troubleshooting-guide/running.md @@ -1,28 +1,6 @@ ## Specific Problems: - -### Metabase fails to start due to PermGen OutOfMemoryErrors - -On Java 7, Metabase may fail to launch with a message like - - java.lang.OutOfMemoryError: PermGen space - -or one like - - Exception: java.lang.OutOfMemoryError thrown from the UncaughtExceptionHandler - -If this happens, setting a few JVM options should fix your issue: - - java -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:MaxPermSize=256m -jar target/uberjar/metabase.jar - -You can also pass JVM arguments by setting the environment variable `JAVA_TOOL_OPTIONS`, e.g. - - JAVA_TOOL_OPTIONS='-XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:MaxPermSize=256m' - -Alternatively, you can upgrade to Java 8 instead, which will fix the issue as well. - - ### Metabase fails to start due to Heap Space OutOfMemoryErrors Normally, the JVM can figure out how much RAM is available on the system and automatically set a sensible upper bound for heap memory usage. On certain shared hosting @@ -40,3 +18,11 @@ As above, you can use the environment variable `JAVA_TOOL_OPTIONS` to set JVM ar for example. docker run -d -p 3000:3000 -e "JAVA_TOOL_OPTIONS=-Xmx2g" metabase/metabase + +### Diagnosing memory issues causing OutOfMemoryErrors + +If the Metabase instance starts and runs for a significant amount of time before running out of memory, there might be an event (i.e. a large query) triggering the `OutOfMemoryError`. One way to help diagnose where the memory is being used is to enable heap dumps when an OutOfMemoryError is triggered. To enable this, you need to add two flags to the `java` invocation: + + java -Xmx2g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/path/to/a/directory -jar metabase-jar + +The `-XX:HeapDumpPath` flag is optional, with the current directory being the default. When an `OutOfMemoryError` occurs, it will dump an `hprof` file to the directory specified. These can be large (i.e. the size of the `-Xmx` argument) so ensure your disk has enough space. These `hprof` files can be read with many different tools, such as `jhat` included with the JDK or the [Eclipse Memory Analyzer Tool](https://www.eclipse.org/mat/). diff --git a/docs/users-guide/01-what-is-metabase.md b/docs/users-guide/01-what-is-metabase.md index 3ba08779f728..1ed4cc455ccc 100644 --- a/docs/users-guide/01-what-is-metabase.md +++ b/docs/users-guide/01-what-is-metabase.md @@ -1,6 +1,54 @@ ## What is Metabase? -Metabase is an open source business intelligence tool. It lets you ask questions about your data and displays answers in formats that make sense, whether that's a bar graph or a detailed table. -Your questions can be saved for later, making it easy to come back to them, or you can group questions into great looking dashboards. Metabase also makes it easy to share questions and dashboards with the rest of your team. +Metabase is an open source business intelligence tool. It lets you ask questions about your data, and displays answers in formats that make sense, whether that's a bar graph or a detailed table. + +Your questions can be saved for later, making it easy to come back to them, or you can group questions into great looking dashboards. Metabase also makes it easy to share questions and dashboards with the rest of your team. + +## Finding your way around + +So, you've [gotten Metabase up and running](../operations-guide/start.md) and [connected it to your data](../administration-guide/01-managing-databases.md). It's time to give you the lay of the land. + +### The home page + +![The home page](./images/metabase-homepage.png) + +Fresh out of the box, Metabase will show you a few things on the home page: +* Some [automatic explorations](14-x-rays.md) of your tables that you can look at and save as a dashboard if you like any of them. +* An area where things you or your teammates create will show up, along with a link to see all the dashboards, questions, and pulses you have. +* A list of the databases you've connected to Metabase. + +![Our data](./images/our-data.png) + +Once you've created some [dashboards](07-dashboards.md), any of them that you pin will show up on the homepage for all of your teammates, so that when they log in to Metabase they'll know right where to go. + +### Browse your data + +![Browse data](./images/browse-data.png) + +If you connected your database to Metabase during setup, you'll see it listed at the bottom of the homepage along with the sample dataset that Metabase comes with. Click on a database to see its contents. You can click on a table to see its rows, or you can also click on the bolt icon to x-ray a table and see an automatic exploration of it, or click on the book icon to go to the data reference view for that table to learn more about it. + +### Explore your analytics + +As you and your team create dashboards and collections, they'll start to show up on the homepage. Click on a collection in the "Our analytics" section to see its contents, or click "browse all items" to see everything you and your team have made. [More about exploring](03-basic-exploration.md) + +### Ask a question or write a query + +Click the `Ask a question button` in the top-right of Metabase to start a new custom exploration of one of your tables, or to write a new SQL or native query if you want to really dig in. + +### Make a new dashboard or pulse + +In Metabase, dashboards are made up of saved questions that you can arrange and resize as you please. They're a great way to track important metrics and stats that you care about. Pulses are what regularly scheduled reports are called in Metabase. They can be sent out either via email, Slack, or both. + +To make a dashboard or pulse, click the plus (+) icon in the top-right of the main navigation bar. + +![Create menu](./images/create-menu.png) + +### Use search to quickly find things + +![Search results](./images/search-results.png) + +The search bar that's always present at the top of the screen lets you search through your dashboards, collections, saved questions, and pulses in an instant. Just type part of the title of the thing you're looking for and hit enter. + +## A primer on databases To fully understand how to use Metabase, it’s useful to have at least a high-level understanding of databases, so we'll discuss [the basics of databases](02-database-basics.md) next. diff --git a/docs/users-guide/03-basic-exploration.md b/docs/users-guide/03-basic-exploration.md index 47da7d701aed..b2df4bb89ded 100644 --- a/docs/users-guide/03-basic-exploration.md +++ b/docs/users-guide/03-basic-exploration.md @@ -1,30 +1,44 @@ ### Exploring in Metabase -As long as you're not the very first user in your team's Metabase, the easiest way to start exploring your data is by looking at dashboards, charts, and lists that your teammates have already created. + +#### See what your teammates have made +As long as you're not the very first user in your team's Metabase, the easiest way to start exploring your data is by looking at dashboards, charts, and lists that your teammates have already created. The best place to start is by checking out any dashboards that might be pinned on your home page or in the collections you have access to. + +#### Browse your data +Alternatively, you can dive right in to exploring the data in Metabase by clicking on one of the databases at the bottom of the home page, and then clicking on a table to see it. You can also click on the bolt icon on any table to see an automatic exploration of its data. Give it a try! + +![Browse data](./images/browse-data.png) + +#### Exploring collections +Collections in Metabase are a lot like folders. They're where all your team's dashboards and charts are kept. To explore a collection just click on one in the "Our analytics" section of the home page, or click on `Browse all items` to see everything. + +![A collection](./images/collection-detail.png) + +If your teammates are cool, they'll have pinned some important dashboards or questions within your collections; if so, those important or useful items will show up nice and large at the top of a collection. Collections also have a list of any other items that are saved within them, as well as a list of other collections that are saved inside the current one. #### Exploring dashboards -Click on the `Dashboards` nav item to see all the dashboards your teammates have created. Dashboards are simply collections of charts and numbers that you want to be able to refer back to regularly. (You can learn more about dashboards [here](07-dashboards.md)) +Dashboards are simply collections of charts and numbers that you want to be able to refer back to regularly. You can learn more about dashboards [here](07-dashboards.md). -If you click on a part of a chart, such as a bar in a bar chart, or a dot on a line chart, you'll see a menu with actions you can take to dive deeper into that result, or to branch off from it in a different direction. +If you click on a part of a chart, such as a bar in a bar chart, or a dot on a line chart, you'll see a menu with actions you can take to dive deeper into that result, to branch off from it in a different direction, or to [x-ray](14-x-rays.md) it to see an automatic exploration the thing you clicked on. ![Drill through](images/drill-through/drill-through.png) In this example of pie orders by type over time, clicking on a dot on this line chart gives us the ability to: -- Zoom in — i.e., see just the banana cream pie orders in June 2017 over time -- View these Orders — which lets us see a list of banana cream pie orders in June 2017 -- Break out by a category — this lets us do things like see the banana cream pie orders in June 2017 broken out by the status of the customer (e.g., `new` or `VIP`, etc.) or other different aspects of the order. Different charts will have different break out options, such as Location and Time. +- **Zoom in** — i.e., see just the banana cream pie orders in June 2017 over time +- **View these Orders** — which lets us see a list of banana cream pie orders in June 2017 +- **Break out by a category** — this lets us do things like see the banana cream pie orders in June 2017 broken out by the status of the customer (e.g., `new` or `VIP`, etc.) or other different aspects of the order. Different charts will have different break out options, such as Location and Time. + +**Note that charts created with SQL don't currently have these action options.** Other charts as well as table cells will often also allow you to go to a filtered view of that chart or table. You can click on one of the inequality symbols to see that chart where, for example, the value of the Subtotal column is less than $100, or where the Purchased-at timestamp is greater than (i.e., after) April 1, 2017. ![Inequality filters](images/drill-through/inequality-filters.png) -Lastly, clicking on the ID of an item in table gives you the option to go to a detail view for that single record. (E.g., you can click on a customer's ID to see the profile view for that one customer.) - -**Note that charts created with SQL don't currently have these action options.** +Lastly, clicking on the ID of an item in a table gives you the option to go to a detail view for that single record. (E.g., you can click on a customer's ID to see the profile view for that one customer.) #### Exploring saved questions -In Metabase parlance, every chart or number on a dashboard is called a "question." Clicking on the title of a question on a dashboard will take you to a detail view of that question. You'll also end up at this detail view if you use one of the actions mentioned above. You can also browse all the questions your teammates have saved by clicking the `Questions` link in the main navigation. +In Metabase parlance, every chart or number on a dashboard is called a "question." Clicking on the title of a question on a dashboard will take you to a detail view of that question. You'll also end up at this detail view if you use one of the actions mentioned above. -When you're viewing the detail view of a question, you can use all the same actions mentioned above. You can also click on the headings of tables to see more options, like viewing the sum of the values in a column, or finding the minimum or maximum value in it. +When you're looking at the detail view of a question, you can use all the same actions mentioned above. You can also click on the headings of tables to see more options, like viewing the sum of the values in a column, or finding the minimum or maximum value in it. ![Heading actions](images/drill-through/heading-actions.png) @@ -33,15 +47,17 @@ Additionally, the question detail page has an Explore button in the bottom-right ![Action menu](images/drill-through/actions.png) Here's a list of all the actions: -* Table actions +* **Table actions** + - `X-ray this` will show you an automatic exploration and summary of the data in this table. [Learn more about x-rays](14-x-rays.md) - `Count of rows by time` lets you see how many rows there were in this table over time. - `Summarize this segment` gives you options of various summarization functions (sum, average, maximum, etc.) you can use on this table to arrive at a number. -* Chart and pivot table actions +* **Chart and pivot table actions** - `Break outs` will be listed depending on the question, and include the option to break out by a category, location, or time. For example, if you're looking at the count of total orders over time, you might be able to further break that out by customer gender, if that information is present. - `View this as a table` does what it says. Every chart has a table behind it that is providing the data for the chart, and this action lets you see that table. - `View the underlying records` shows you the un-summarized list of records underlying the chart or number you're currently viewing. + - `X-ray this question` will show you an automatic [x-ray]((14-x-rays.md)) exploration of this question's results. --- -## Next: Asking new questions +## Next: Asking custom questions So what do you do if you can't find an existing dashboard or question that's exactly what you're looking for? Let's learn about [asking our own new questions](04-asking-questions.md) diff --git a/docs/users-guide/04-asking-questions.md b/docs/users-guide/04-asking-questions.md index 509b4c846b8f..1989b06a1f67 100644 --- a/docs/users-guide/04-asking-questions.md +++ b/docs/users-guide/04-asking-questions.md @@ -1,21 +1,20 @@ -## Asking questions +## Asking custom questions --- Metabase's two core concepts are questions and their corresponding answers. Everything else is based around questions and answers. To ask Metabase a question, click the New Question button at the top of the screen. ### Ways to start a new question -If an administrator has [defined some metrics or segments](../administration-guide/07-segments-and-metrics.md), when you click on the New Question button, you'll see a screen like this one: +If an administrator has [defined some metrics](../administration-guide/07-segments-and-metrics.md), when you click on the `Ask a question` button in the top bar you'll see a screen like this one: ![New question options](images/new-question-all-options.png) You can start your new question: - from an existing metric -- from an existing segment - from scratch with the Question Builder interface -- or using the SQL / native query editor +- using the SQL / native query editor -Asking a new question about a **metric** or a **segment** is often a great place to start. +Asking a new question about a **metric** is often a great place to start. #### Asking a new question about a metric @@ -31,25 +30,23 @@ You can also use the Action Menu in the bottom-right of the screen to choose a b ![Metric action menu](images/metric-action-menu.png) -#### Asking a new question about a segment +#### Asking a new question about a table -A **segment** is any kind of list or table of things that your company cares about: returning users, orders that used a certain promo code, or sales leads that need to be followed up with are all examples of possible segments. +Another quick way to start a new question is by clicking on one of your connected databases at the bottom of the homepage, and picking a table that you have a question about. You'll immediately see the table and the graphical question builder so that you can keep exploring. -Selecting the Segment option from the new question menu will show you a list of your company's segments. When you click on one, you'll see a list, like this one: +![Browse data](./images/browse-data.png) -![Californians segment](images/segment-californians.png) - -When viewing a segment or a table, you can click on the headings of columns to see options for ways to explore more, like seeing the distribution of the values a column has, or the number of distinct values: +When viewing a table you can also click on the headings of columns to see options for ways to explore more, like clicking on the Age column of your Users table to see how many Users you have per age group (that's called a "distribution"): ![Table heading actions](images/table-heading-actions.png) -You can also use the Action Menu when viewing a segment or table to see any metrics that are related, or to summarize the table. +You can also use the Action Menu when viewing a table to see any metrics in it, or to summarize the table. ![Table action menu](images/segment-actions.png) #### Asking a new custom question -If your team hasn't set up any metrics or segments, or if you have a question that isn't covered by an existing question or segment, you can create a custom question using the Question Builder interface by clicking "Custom." Or, if you're an advanced user, you can click "SQL" to go straight to the SQL/native query editor. +If you have a question that isn't covered by an existing question, you can create a new custom question using the Question Builder interface by clicking "Custom." Or, if you're an advanced user, you can click "SQL" to go straight to the SQL/native query editor. ### Using the Question Builder interface @@ -61,7 +58,7 @@ Metabase has a simple graphical question builder that looks like this: The question builder is made up of four distinct sections, from left to right: - **Data**, where you pick the source data you want to ask a question about - **Filters**, where you can optionally add one or more filters to narrow down your source data -- **View**, where you choose what you want to see — raw table data, a basic metric, or a saved metric +- **View**, where you choose what you want to see — raw table data, a basic metric, or a "common" metric that an administrator has defined - **Groupings**, where you can group or break out your metric by time, location, or other categories #### Source data @@ -86,7 +83,7 @@ You can use most saved questions as source data, provided you have [permission]( #### Filters --- -Filtering your data lets you exclude information that you don’t want. You can filter by any field in the table you're working with, or by any tables that are connected through a foreign key. Filters narrow down the source data to an interesting subset, like "active users" or "bookings after June 15th, 2015." +Filtering your data lets you exclude information that you don’t want. You can filter by any field in the table you're working with, or by any tables that are connected through a foreign key. Filters narrow down the source data to an interesting subset, like "active users" or "bookings after June 15th, 2015." Different fields will have different filter options based on what kind of data type they are. There are four universal filter options, or “operators,” that can be applied to any field. These operators are: @@ -105,27 +102,25 @@ Fields that are comparable, like numbers or dates, can also be filtered using th ##### Filtering by dates -If filtering by dates, a date picker will appear to allow you to select dates easily. You have two main options for picking your date: relative or specific. +If filtering by dates, a date picker will appear to allow you to select dates easily, and will default to the previous 30 days. -**Specific Dates** -This is the most basic way to select dates. You just click on the date you want from the calendar. If you click on a second date, the picker will select all the dates in between the two you clicked on, creating a range. Clicking on any date while you have a range selected will clear the range. You can also use the **All before** and **All after** buttons to quickly select all dates before or after the one you’ve selected. +Click on the first dropdown to change the kind of date filter you're using. The rest of the popup menu will change depending on this first selection. -**Relative Dates** -Relative dates are how we more commonly talk about time: “how many customers did we have **last month**?” We talk about time relative to today. +One important thing to understand when filtering by time or dates like this is the difference between specific and relative dates: -In practice, if you select **Past 30 days** from the Relative Date calendar picker, this would be the same as selecting those same dates from the Specific Date picker — *unless* you save your question and look at it again tomorrow. +**Specific dates** are things like November 1, 2010, or June 3 – July 12, 2017; they always refer to the same date(s). -Now the relative date will be referencing the past 30 days from *today*, *not* from the day you saved the question. This is a really useful way of creating and saving questions that stay up-to-date: you can always know what your total sales were in the past 7 days, for example. +**Relative dates** are things like "the past 30 days," or "the current week;" as time passes, the dates these refer to change. Relative dates are a useful way to set up a filter on a question so that it stays up to date by showing you for example how many users visited your website in the last 7 days. ##### Using segments -If your Metabase admins have created special named filters, called segments, for the table you’re viewing, they’ll appear at the top of the filter dropdown in purple text with a star next to them. These are shortcuts to sets of filters that are commonly used in your organization. They might be something like “Active Users,” or “Most Popular Products.” +If your Metabase administrators have created special named filters for the table you're viewing they’ll appear at the top of the filter dropdown in purple text with a star next to them. These are called "segments," and they're shortcuts to filters that are commonly used in your organization. They might be called things like “Active Users,” or “Most Popular Products.” #### Selecting answer output in the View section --- The next section of the question builder is where you select what you want the output of your answer to be, under the View dropdown. You’re basically telling Metabase, “I want to view…” Metabase can output the answer to your question in four different ways: ##### 1. Raw data -Raw Data is just a table with the answer listed in rows. It's useful when you want to see the actual data you're working with, rather than a sum or average, etc., or when you're exploring a small table with a limited number of records. +Raw Data is just a table with the answer listed in rows. It's useful when you want to see the actual data you're working with, rather than a sum or average, etc., or when you're exploring a small table with a limited number of records. When you filter your data to see groups of interesting users, orders, etc., Raw Data will show you an output of each individual record that matches your question's criteria. diff --git a/docs/users-guide/05-visualizing-results.md b/docs/users-guide/05-visualizing-results.md index 1febf66335ee..ffaac59c4954 100644 --- a/docs/users-guide/05-visualizing-results.md +++ b/docs/users-guide/05-visualizing-results.md @@ -9,6 +9,7 @@ In Metabase, an answer to a question can be visualized in a number of ways: * Table * Line chart * Bar chart +* Row chart * Area chart * Scatterplot or bubble chart * Pie/donut chart @@ -38,19 +39,49 @@ Progress bars are for comparing a single number result to a goal value that you ![Progress bar](images/visualizations/progress.png) #### Tables -The Table option is good for looking at tabular data (duh), or for lists of things like users. The options allow you to hide and rearrange fields in the table you're looking at. If your table is a result that contains one metric and two dimensions, Metabase will also automatically pivot your table, like in the example below (the example shows the count of orders grouped by the review rating for that order and the category of the product that was purchased; you can tell it's pivoted because the grouping field values are all in the first column and first row). You can turn this behavior off in the chart settings. +The Table option is good for looking at tabular data (duh), or for lists of things like users or orders. The visualization options for tables allow you to add, hide, or rearrange fields in the table you're looking at. + +##### Adding or hiding fields + +![Additional fields](images/visualizations/add-fields.png) + +Open up the visualization options and you'll see the Data tab, which displays all the fields currently being shown in the table, as well as more fields from linked tables that you can add to the current table view. + +To hide a field, click the X icon on it; that'll send it down to the "More fields" area in case you want to bring it back. To add a linked field, just click the + icon on it, which will bring it to the "Visible fields" section. Click and drag any of the fields listed there to rearrange the order in which they appear. + +**Note:** changing these options doesn't change the actual table itself; it just creates a custom view of it that you can save as a "question" in Metabase and refer back to later, share with others, or add to a dashboard. + +##### Conditional formatting +Sometimes is helpful to highlight certain rows or columns in your tables when they meet a specific condition. You can set up conditional formatting rules by going to the visualization settings while looking at any table, then clicking on the `Formatting` tab. + +![Conditional formatting](images/visualizations/conditional-formatting.png) + +When you add a new rule, you'll first need to pick which column(s) should be affected. For now, you can only pick numeric columns. Your columns can be formatted in one of two ways: + +1. **Single color:** pick this if you want to highlight cells in the column if they're greater, less than, or equal to a specific number. You can optionally highlight the whole row of a cell that matches the condition you pick so that it's easier to spot as you scroll down your table. +2. **Color range:** choose this option if you want to tint all the cells in the column from smallest to largest or vice a versa. + +You can set as many rules on a table as you want. If two or more rules disagree with each other, the rule that's on the top of your list of rules will win. You can click and drag your rules to reorder them, and click on a rule to edit it. + +##### Pivoted tables +If your table is a result that contains one metric and two dimensions, Metabase will also automatically "pivot" your table, like in the example below (the example shows the count of orders grouped by the review rating for that order and the category of the product that was purchased; you can tell it's pivoted because the grouping field values are all in the first column and first row). You can turn this behavior off in the chart settings. ![Pivot table](images/visualizations/pivot.png) #### Line, bar, and area charts -Line charts are best for displaying the trend of a number over time, especially when you have lots of x-axis values. Bar charts are great for displaying a metric grouped by a category (e.g., the number of users you have by country), and they can also be useful for showing a number over time if you have a smaller number of x-axis values (like orders per month this year). +Line charts are best for displaying the trend of a number over time, especially when you have lots of x-axis values. Bar charts are great for displaying a number grouped by a category (e.g., the number of users you have by country), and they can also be useful for showing a number over time if you have a smaller number of x-axis values (like orders per month this year). ![Bar chart](images/visualizations/bar.png) -Area charts are useful when comparing the the proportions between two metrics over time. Both bar and area charts can be stacked. +Area charts are useful when comparing the proportions of two metrics over time. Both bar and area charts can be stacked. ![Stacked area chart](images/visualizations/area.png) +#### Row charts +If you're trying to group a number by a field that has a lot of possible values, like a Vendor or Product Title field, try visualizing it as a row chart. Metabase will show you the bars in descending order of size, with a final bar at the bottom for items that didn't fit. + +![Row chart](images/visualizations/row.png) + ##### Histograms If you have a bar chart like Count of Users by Age, where the x-axis is a number, you'll get a special kind of chart called a **histogram**, where each bar represents a range of values (called a "bin"). Note that Metabase will automatically bin your results any time you use a number as a grouping, even if you aren't viewing a bar chart. Questions that use latitude and longitude will also get binned automatically. diff --git a/docs/users-guide/06-sharing-answers.md b/docs/users-guide/06-sharing-answers.md index ddb895afd222..c293c56f5bbf 100644 --- a/docs/users-guide/06-sharing-answers.md +++ b/docs/users-guide/06-sharing-answers.md @@ -1,15 +1,15 @@ -## Sharing your questions and answers +## Sharing and organizing your questions and answers --- ### How to save a question Whenever you’ve arrived at an answer that you want to save for later, click the **SAVE** button in the top right of the screen. This will also save the visualization option you’ve chosen for your answer. -![savebutton](images/SaveButton.png) +![Save button](images/SaveButton.png) -A pop-up box will appear, you to give your question a name and a description. We suggest phrasing the names for your questions in the form of a question, such as, “How many customers did we have last month?” After saving your question, you'll be asked if you want to add it to a dashboard. +A pop-up box will appear, prompting you to give your question a name and description, and to pick which collection to save it in. Note that your administrator might have set things up so that you're only allowed to save questions in certain collections, but you can always save things in your Personal Collection. After saving your question, you'll be asked if you want to add it to a new or existing dashboard. -Now, whenever you want to refer to your question again you can find it in the saved questions list by clicking on the **Questions** link from the main navigation. To edit your question, go to it and click the pencil icon in the top-right. +Now, whenever you want to refer to your question again you can find it by searching for it in the search bar at the top of Metabase, or by navigating to the collection where you saved it. To edit your question, go to it and click the pencil icon in the top-right. ### Sharing questions with public links If your Metabase administrator has enabled [public sharing](../administration-guide/12-public-links.md) on a saved question or dashboard, you can go to that question or dashboard and click on the sharing icon to find its public links. Public links can be viewed by anyone, even if they don't have access to Metabase. You can also use the public embedding code to embed your question or dashboard in a simple web page or blog post. @@ -17,37 +17,50 @@ If your Metabase administrator has enabled [public sharing](../administration-gu ![Share icon](images/share-icon.png) ### Organizing and finding your saved questions -After your team has been using Metabase for a while, you’ll probably end up with lots of saved questions. The Questions page has several tools that’ll help you organize things and find what you’re looking for. +After your team has been using Metabase for a while, you’ll probably end up with lots of saved questions. Metabase has several ways to help you organize things and find what you’re looking for. -![Questions](images/saved-questions.png) +![Our analytics](images/our-analytics-page.png) #### Collections -Administrators of Metabase can create collections to put saved questions in. Depending on the permissions you've been given to collections, you'll be able to view the questions inside, edit them, and move questions from one collection to another. Questions that aren't saved in any collection will appear in the "Everything else" section of the main Questions page, and are visible to all Metabase users in your organization. If you're an administrator of your Metabase instance, here are [instructions for creating collections and managing permissions](../administration-guide/06-collections.md). +Collections are the main way to organize questions, as well as dashboards and pulses. [Administrators can give you different kinds of access](../administration-guide/06-collections.md) to each collection: -#### Labels -Older versions of Metabase included a way to add labels to your questions, but this feature will be going away in a future version of Metabase. Currently, if your team was already using labels, you'll still be able to edit and apply them to questions. Here are some [suggestions for switching from labels to collections](../administration-guide/06-collections.md#what-about-labels). +- **View access:** you can see the collection and its contents, but you can't modify anything or put anything new into the collection. +- **Curate access:** you can edit, move, or archive the collection and its contents. You can also move or save new things in it and create new collections inside of it, and can also pin items in the collection to the top of the screen. Only administrators can edit permissions for collections, however. +- **No access:** you can't see the collection or its contents. If you have access to a dashboard, but it contains questions that are saved in a collection you don't have access to, those questions will show a permissions notification instead of the chart or table. -#### Shortcuts -At the top of lists of saved questions you’ll find a dropdown with shortcuts to your favorite questions (mark a question as a favorite by clicking on the star icon that appears when you hover over it), questions you’ve recently viewed, questions that you’ve saved personally, and popular questions that are used the most by your team. +#### Your personal collection +In addition to the collections you and your teammates have made, you'll also always have your own personal collection that only you and administrators can see. To find it, click on the "browse all items" button on the homepage and click on "my personal collection" in the list of collections. -#### Search and filtering -On the main Questions page, you can search through all of your collections for a particular question using the search box in the top-right. You can also filter lists of saved questions by typing in the `Filter the list…` area. +You can use your personal collection as a scratch space to put experiments and explorations that you don't think would be particularly interesting to the rest of your team, or as a work-in-progress space where you can work on things and then move them to a shared place once they're ready. -#### Moving -To move a question into a collection, or from one collection to another, hover over it and click on the right-arrow icon that appears on the far right of the question. Note that you have to have permission to edit the collection that you're moving a question into, and the collection you're moving the question out of. +#### Pinned items -#### Archiving -Sometimes questions outlive their usefulness and need to be sent to Question Heaven. To archive a question, just click on the box icon that appears on the far right when you hover over a question. Collections can also be archived and unarchived, but only by Metabase administrators. +![Pins](images/pinned-items.png) + +In each collection, you can pin important or useful dashboards or questions to make them stick to the top of the screen. Pinned items will also be displayed as large cards to make them stand out well. If you have Curate permissions for a collection, you can pin and un-pin things, and drag and drop pins to change their order. -Note that archiving a question removes it from all dashboards or Pulses where it appears, so be careful! +Any dashboards that are pinned in the main "Our analytics" collection will also show up on the homepage. -If you have second thoughts and want to bring an archived question back, you can see all your archived questions from the **Archive** icon at the top-right of the Questions page. To unarchive a question, hover over it and click the box icon that appears on the far right. +#### Search -#### Selecting multiple questions -Clicking on the icon to the left of questions let's you select several at once so that you can move or archive many questions at once. +![Search results](./images/search-results.png) + +Type into the search box that's at the top of Metabase and hit enter to search through all the dashboards, questions, collections, and pulses your team has. + +#### Moving +To move a question, dashboard, or pulse into a collection, or from one collection to another, just click and drag it onto the collection where you want it to go. You can also click on the `…` menu to the right of the question and pick the Move action. If you're trying to move several things at once, click on the items' icons to select them, then click the Move action that pops up at the bottom of the screen. ![Selecting questions](images/question-checkbox.png) +Note that you have to have Curate permission for the collection that you're moving a question into *and* the collection you're moving the question out of. + +#### Archiving +Sometimes questions outlive their usefulness and need to be sent to Question Heaven. To archive a question or dashboard, just click on the `…` menu that appears on the far right when you hover over a question and pick the Archive action. You'll only see that option if you have "curate" permission for the current collection. You can also archive multiple items at once, the same way as you move multiple items. Note that archiving a question removes it from all dashboards or Pulses where it appears, so be careful! + +You can also archive *collections* as long as you have curate permissions for the collection you're trying to archive, the collection *it's* inside of, as well as any and all collections inside of *it*. Archiving a collection archives all of its contents as well. + +If you have second thoughts and want to bring an archived item back, you can see all your archived questions from the archive; click the menu icon in the top-right of any collection page to get to the archive. To unarchive a question, hover over it and click the unarchive icon that appears on the far right. + --- ## Next: creating dashboards diff --git a/docs/users-guide/07-dashboards.md b/docs/users-guide/07-dashboards.md index 841591df76b4..4c1e6ddff48b 100644 --- a/docs/users-guide/07-dashboards.md +++ b/docs/users-guide/07-dashboards.md @@ -10,19 +10,39 @@ Have a few key performance indicators that you want to be able to easily check? You can make as many dashboards as you want. Go nuts. ### How to create a dashboard -Once you have a question saved, you can create a dashboard. Click the **Dashboards** link at the top of the screen, then click the plus icon in the top-right to create a new dashboard. Give your new dashboard a name and a description, then click **Create**, and you’ll be taken to your shiny new dashboard. +Click the plus (+) icon in the top-right of the screen to open the menu to create a new dashboard. Give your new dashboard a name and a description, choose which collection it should be saved in, then click **Create**, and you’ll be taken to your shiny new dashboard. ![Create Dashboard](images/dashboards/DashboardCreate.png) ### Adding saved questions to a dashboard -You can add a newly saved question to a dashboard directly from the window that pops up after you save the question, or by clicking the Add to Dashboard icon in the top-right of a question page. You can also go to one of your dashboards and click the plus icon in the top right to add any of your saved questions to the dashboard. +You can add a newly saved question to a dashboard directly from the window that pops up after you save the question, or by clicking the Add to Dashboard icon in the top-right of a question page. You can also go to one of your dashboards and click the plus icon in the top right to add any of your saved questions to the dashboard. Dashboards and the questions they contain do not need to be saved in the same collection. Once you add a question to your dashboard, it’ll look something like this: ![First Dashboard](images/dashboards/FirstDashboard.png) +### Adding headings or descriptions with text cards +Another neat thing you can do is add text cards to your dashboards so that you can write descriptions, explanations, notes, or even add images. Text cards can also be used to create separations between sections of charts in your dashboards. + +To add a new text card, create a new dashboard or edit an existing one and click on the text card button in the top-right: + +![Text card button](images/dashboards/text-cards/text-card-button.png) + +Your new, empty text card will appear. It has two modes: writing and previewing. Toggle between the modes by clicking the eye or pencil icons in the top-left of the card: + +![New text card](images/dashboards/text-cards/new-text-card.png) + +You can use [Markdown](http://commonmark.org/help/) to format the text in your text card, create inline tables or code snippets, or even embed linked images (easy on the GIFs, friends). + +![Markdown](images/dashboards/text-cards/markdown.png) + +Click the eyeball icon to see what your formatted Markdown will look like when you save the card: + +![Result](images/dashboards/text-cards/result.png) + + ### Arranging cards -Each question on a dashboard is in its own card that you can move around or resize as you see fit; just click the edit icon that looks like a pencil in the top-right of the dashboard screen. +Each question on a dashboard is in its own card that you can move around or resize as you see fit; just click the edit icon that looks like a pencil in the top-right of the dashboard screen. Once you're in edit mode you'll see a grid appear. You can move and resize the cards in the dashboard to your liking and they'll snap to the grid. @@ -37,14 +57,12 @@ Questions in your dashboard will automatically update their display based on the ### Archiving a dashboard Archiving a dashboard does not archive the individual saved questions on it — it just archives the dashboard. To archive a dashboard while viewing it, click the pencil icon to enter edit mode, then click the Archive button. -You can view all of your archived dashboards by clicking the box icon in the top-right of the Dashboards page. Archived dashboards in this list can be unarchived by clicking the icon of the box with the upward arrow next to that dashboard. - -(Note: as of Metabase v0.24, dashboards can no longer be permanently deleted; only archived.) +You can view all of your archived items by clicking the menu icon in the top-right of any collection page. Archived dashboards in this list can be unarchived by clicking the icon of the box with the upward arrow next to that dashboard. ### Finding dashboards -After a while, your team might have a lot of dashboards. To make it a little easier to find dashboards that you look at often, you can mark a dashboard as a favorite by clicking the star icon on it from the dashboards list. You can use the filter dropdown in the top of the list to view only your favorite dashboards, or only the ones that you created yourself. +After a while, your team might have a lot of dashboards. To make it a little easier to find dashboards that your team looks at often, you can pin them to the top of the collection by clicking and dragging them to the top or by opening the `…` menu and selecting the Pin action. -![Filter list](images/dashboards/FilterDashboards.png) +You can also search for any dashboard (or question, collection, or pulse) by its title in the big search box at the top of Metabase. ### Fullscreen dashboards @@ -91,7 +109,7 @@ Some tips: * Place the most important saved question cards near the top of the dashboard, and/or make them bigger than the other cards. That will help draw people’s attention to what matters most. * If you have more than 10 cards on a dashboard, think about breaking the dashboard into two separate ones. You don't want to overwhelm people with too much information, and each dashboard should revolve around one theme or topic. Remember — you can make as many dashboards as you want, so you don’t have to cram everything into just one. -* Consider [adding filters to your dashboard](07-dashboards.md#dashboard-filters) to make them more useful and flexible. For example, instead of your dashboard being full of questions that are restricted to a specific time span, you can make more general questions and use dashboard filters to change the time span you're looking at. +* Consider [adding filters to your dashboard](08-dashboard-filters.md) to make them more useful and flexible. For example, instead of your dashboard being full of questions that are restricted to a specific time span, you can make more general questions and use dashboard filters to change the time span you're looking at. --- diff --git a/docs/users-guide/08-dashboard-filters.md b/docs/users-guide/08-dashboard-filters.md index 6328af63a8b7..89f302bb448c 100644 --- a/docs/users-guide/08-dashboard-filters.md +++ b/docs/users-guide/08-dashboard-filters.md @@ -13,7 +13,7 @@ To add a filter to a dashboard, first enter dashboard editing mode, then click t ![Add a Filter](images/dashboard-filters/01-add-filter.png) You can choose from a number of filter types: Time, Location, ID, or Other Categories. The type of filter you choose will determine what the filter widget will look like, and will also determine what fields you’ll be able to filter your cards by: -* **Time:** when picking a Time filter, you'll also be prompted to pick a specific type of filter widget: Month and Year, Quarter and Year, Single Date, Date Range, or Relative Date. Single Date and Date Range will provide a calendar widget, while the other options all provide slightly different dropdown interfaces for picking values. +* **Time:** when picking a Time filter, you'll also be prompted to pick a specific type of filter widget: Month and Year, Quarter and Year, Single Date, Date Range, Relative Date, or All Options. "Single Date" and "Date Range" will provide a calendar widget, while the other options all provide slightly different dropdown interfaces for picking values. Choose "All Options" to get a widget that's just like the time filter in the graphical query builder. * **Location:** there are four types of Location filters to choose from: City, State, ZIP or Postal Code, and Country. These will all show up as input box widgets unless the field(s) you're filtering contain fewer than 40 distinct possible values, in which case the widget will be a dropdown. * **ID:** this filter provides a simple input box where you can type the ID of a user, order, etc. * **Other Categories:** this is a flexible filter type that will let you create either a dropdown or input box to filter on any category field in your cards. Whether the filter widget is displayed as a dropdown or an input box is dependent on the field(s) you pick to filter on: if there are fewer than 40 distinct possible values for that field, you'll see a dropdown; otherwise you'll see an input box. (A future version of Metabase will include type-ahead search suggestions for the input box widget.) @@ -29,8 +29,7 @@ Now we’ve entered a new mode where we’ll need to wire up each card on our da So here’s what we’re doing — when we pick a month and year with our new filter, the filter needs to know which field in the card to filter on. For example, if we have a `Total Orders` card, and each order has a `Date Ordered` as well as a `Date Delivered`, we have to pick which of those fields to filter — do we want to see all the orders *placed* in January, or do we want to see all the orders *delivered* in January? So, for each card on our dashboard, we’ll pick a date field to connect to the filter. If one of your cards says there aren’t any valid fields, that just means that card doesn’t contain any fields that match the kind of filter you chose. #### Filtering SQL-based cards -Note that if your dashboard includes cards that were created using the SQL/native query editor, you'll need to add a bit of additional markup to the SQL in those cards in order to use a dashboard filter on them. [Using SQL parameters](13-sql-parameters.md) - +**Important:** note that if your dashboard includes saved questions that were created using the SQL/native query editor, you'll need to [add a bit of additional markup to your query](13-sql-parameters.md) to add a "field filter variable" in order to use a dashboard filter with your SQL/native questions. ![Select fields](images/dashboard-filters/04-select-fields.png) @@ -58,6 +57,24 @@ Once you’ve added a filter to your dashboard, just click on it to select a val ![Using a filter](images/dashboard-filters/08-use-filter.png) +### Choosing between a dropdown or autocomplete for your filter + +Picking selections for a filter with lots of options is easier than ever before. If the field you're using for a filter has more than 100 unique values, you'll now automatically see a search box with autocomplete suggestions. + +![Autocomplete](images/dashboard-filters/autocomplete.png) + +Fields with fewer than 100 distinct values will have display a list of all the options. + +![List](images/dashboard-filters/list.png) + +In both cases, you can pick one or multiple selections for your filter. + +![Multi-select](images/dashboard-filters/multi-select.png) + +If Metabase somehow picked the wrong behavior for your field, admins can go to the Data Model section of the admin panel and click on the gear icon by the field in question to manually choose between a list, a search box, or just a plain input box. + +![Search options](images/dashboard-filters/search-options.png) + ### Best practices Here are a few tips to get the most out of dashboard filters: diff --git a/docs/users-guide/09-multi-series-charting.md b/docs/users-guide/09-multi-series-charting.md index f626a7d641e2..174839b81ee6 100644 --- a/docs/users-guide/09-multi-series-charting.md +++ b/docs/users-guide/09-multi-series-charting.md @@ -43,10 +43,8 @@ Once you have your chart looking how you’d like, hit done and your changes wil #### A quick note about SQL based questions. Metabase has less information about SQL based questions, so we cannot guarantee if they can be added reliably. You'll see a little warning sign next to SQL questions to indicate this and when you try adding them just be aware it may not work. -### Combining scalars -If you need to compare flat numbers and get a sense of how they differ, Metabase also lets you turn multiple scalars into a bar chart. To do this, follow the same process outlined above. While editing a dashboard, click “edit data” on the scalar of your choice and then select the other scalars you’d like to see represented on the bar chart. - -At Metabase, we use this to create simple funnel visualizations. +### Combining Number charts +If you need to compare single numbers and get a sense of how they differ, Metabase also lets you turn multiple Number charts into a bar chart. To do this, follow the same process outlined above. While editing a dashboard, click “edit data” on the Number chart of your choice and then select the other saved question(s) you’d like to see represented on the bar chart. (At Metabase, we use this to create simple funnel visualizations.) ### Creating a multi-series visualization in the query builder. If you’re creating a new question in the query builder, you can also view the result as a multi-series visualization. To do this you’ll need to add two dimensions to your question and use an aggregation that isn’t just “raw data.” diff --git a/docs/users-guide/10-pulses.md b/docs/users-guide/10-pulses.md index aff3ee85570a..36612bed7911 100644 --- a/docs/users-guide/10-pulses.md +++ b/docs/users-guide/10-pulses.md @@ -1,18 +1,16 @@ -## Sharing Updates with Pulses +## Sharing updates with pulses The Pulses feature in Metabase gives you the ability to automatically send regular updates to your teammates to help everyone keep track of changes to the metrics that matter to you most. You can deliver a pulse via email or [Slack](https://slack.com/), on the schedule of your choice. -You can create a pulse and view all of the existing pulses by clicking the `Pulses` link from the top menu. Click `Create a pulse` to get started. +To create a new pulse, click the plus (+) button in the top-right of Metabase and select `New pulse`. -![Create a pulse](images/pulses/01-empty-state.png) - -### Name It -First, choose a name for your pulse. This will show up in the email subject line and the Slack message title, so choose something that will let people know what kind of updates the pulse will contain, like “Daily Marketing Update,” or “Users Metrics.” +### Name it +First, choose a name for your pulse. This will show up in the email subject line and the Slack message title, so choose something that will let people know what kind of updates the pulse will contain, like “Daily Marketing Update,” or “Users Metrics.” Next, choose which collection it should be saved in so that it's easy to find in the future. ![Giving it a name](images/pulses/02-name-it.png) -### Pick Your Data -Before you can create a pulse, you’ll need to have some [saved questions](06-sharing-answers.md). You can choose up to five of them to put into a single pulse. Click the dropdown to see a list of all your saved questions. You can type in the dropdown to help filter and find the question you’re looking for. +### Pick your data +Before you can create a pulse, you’ll need to have some [saved questions](06-sharing-answers.md). Click the dropdown to see a list of all your saved questions. You can type in the dropdown to help filter and find the question you’re looking for. ![Pick your data](images/pulses/03-pick-your-data.png) @@ -20,31 +18,43 @@ When you select a saved question, Metabase will show you a preview of how it’l ![Behold! The metamorphosis.](images/pulses/04-transformation.png) -Currently, there are a few restrictions on what kinds of saved questions you can put into a pulse: +Now you can include tables in your pulses as well. They'll be capped to 10 columns and 20 rows, and for emailed pulses the rest of the results will be included automatically as a file attachment, with a limit of 2,000 rows. -* Raw data can’t be put in a pulse -* Tables will be cropped to a maximum of three columns and 10 rows -* Bar charts (and pie charts which get turned into bar charts) will be cropped to one column for the labels, one column for the values, and 10 total rows +![Table in pulse](images/pulses/table.png) -### Choose How and When to Deliver Your Data -Each pulse you create can be delivered by email, Slack, or both. You can also set a different delivery schedule for email versus Slack. To deliver by email, just type in the email addresses you want to send the pulse to, separated by commas. Then, choose to either send it daily, weekly, or monthly, and the time you want it to be sent. +#### Attaching a .csv or .xls with results +You can also optionally include the results of a saved question in an emailed pulse as a .csv or .xls file attachment. Just click the paperclip icon on an included saved question to add the attachment. Click the paperclip again to remove the attachment. -![Setting the email schedule](images/pulses/05-email-schedule.png) +![Attach button](images/pulses/attachments/attach-button.png) -To send via Slack, you’ll need to choose which channel you want to post the pulse in, whether you want it to post hourly or daily, and at what time. Again, the schedule for Slack can be different from the schedule for email. +Choose between a .csv or .xls file by clicking on the text buttons: + +![Attached](images/pulses/attachments/attached.png) + +Your attachments will be included in your emailed pulse just like a regular email attachment: -Once you’re done, just click `Create pulse`. You’ll see your new pulse, along with its recipients, and the saved questions that are included in the pulse. If anyone else on your team wants to subscribe to a pulse that’s delivered by email, they can click the button that says `Get this email` from the Pulses screen. +![Email attachment](images/pulses/attachments/email.png) -![A beautiful, completed pulse](images/pulses/06-created.png) +#### Limitations +Currently, there are a few restrictions on what kinds of saved questions you can put into a pulse: + +* Raw data questions are capped to 10 columns and 20 rows. For emailed pulses, the rest of the results will be included automatically as a file attachment, with a limit of 2,000 rows. +* Pivot tables will be cropped to a maximum of three columns and 10 rows. +* Bar charts (and pie charts which get turned into bar charts) will be cropped to one column for the labels, one column for the values, and 10 total rows. + +### Choose how and when to deliver your data +Each pulse you create can be delivered by email, Slack, or both. You can also set a different delivery schedule for email versus Slack. To deliver by email, just type in the Metabase user names, or email addresses you want to send the pulse to, separated by commas. Then, choose to either send it daily, weekly, or monthly, and the time at which you want it to be sent. -### Editing and Deleting a Pulse -If you ever need to make changes to a pulse, just hover over the pulse from the list and click the `edit` button that appears. +![Setting the email schedule](images/pulses/05-email-schedule.png) + +To send via Slack, you’ll need to choose which channel you want to post the pulse in, whether you want it to post hourly or daily, and at what time. Again, the schedule for Slack can be different from the schedule for email. -![Edit button](images/pulses/07-edit-button.png) +Once you’re done, just click `Create pulse` and you’ll see your new pulse in the collection where you chose to save it -If you want to delete a pulse, you can find that option at the bottom of the edit screen. Just remember: if you delete a pulse, no one will receive it anymore. +### Editing or archiving a pulse +If you ever need to make changes to a pulse, just navigate to the collection where it's saved and click on it, or search for it in the big search bar at the top of Metabase. -![The danger zone](images/pulses/08-delete.png) +If a pulse has outlived its usefulness you can archive it by clicking on the Archive button at the bottom of the pulse's detail page. Just remember: if you archive a pulse, no one will receive it anymore. You can unarchive a pulse just like you can with questions and dashboards by navigating to the archive from the top-right button while viewing any collection and clicking on the View Archive menu option, then clicking on the `Unarchive this` button on the far right next to the pulse. --- diff --git a/docs/users-guide/13-sql-parameters.md b/docs/users-guide/13-sql-parameters.md index bc36b96f8a35..2794f2512d8f 100644 --- a/docs/users-guide/13-sql-parameters.md +++ b/docs/users-guide/13-sql-parameters.md @@ -34,7 +34,7 @@ WHERE {% raw %}{{created_at}}{% endraw %} ``` ##### Creating SQL question filters using field filter variables -First, insert a variable tag in your SQL, like `{{my_var}}`. Then, in the side panel, select the `Field Filter` variable type, and choose which field to map your variable to. In order to display a filter widget, you'll have to choose a field whose Type in the Data Model section of the Admin Panel is one of the following: +First, insert a variable tag in your SQL, like `{% raw %}{{my_var}}{% endraw %}`. Then, in the side panel, select the `Field Filter` variable type, and choose which field to map your variable to. In order to display a filter widget, you'll have to choose a field whose Type in the Data Model section of the Admin Panel is one of the following: - Category - City - Entity Key @@ -44,6 +44,7 @@ First, insert a variable tag in your SQL, like `{{my_var}}`. Then, in the side p - UNIX Timestamp (Seconds) - UNIX Timestamp (Milliseconds) - ZIP or Postal Code + The field can also be a datetime one (which can be left as `No special type` in the Data Model). You'll then see a dropdown labeled `Widget`, which will let you choose the kind of filter widget you want on your question, which is especially useful for datetime fields (you can select `None` if you don't want a widget at all). **Note:** If you're not seeing the option to display a filter widget, make sure the mapped field is set to one of the above types, and then try manually syncing your database from the Databases section of the Admin Panel to force Metabase to scan and cache the field's values. @@ -64,7 +65,7 @@ Filter widgets **can't** be displayed if the variable is mapped to a field marke If you input a default value for your field filter, this value will be selected in the filter whenever you come back to this question. If you clear out the filter, though, no value will be passed (i.e., not even the default value). The default value has no effect on the behavior of your SQL question when viewed in a dashboard. ##### Connecting a SQL question to a dashboard filter -In order for a saved SQL question to be usable with a dashboard filter, it must contain at least one field filter. The kind of dashboard filter that can be used with the SQL question depends on the field that you map to the question's field filter(s). For example, if you have a field filter called `{{var}}` and you map it to a State field, you can map a Location dashboard filter to your SQL question. In this example, you'd create a new dashboard or go to an existing one, click the Edit button, and the SQL question that contains your State field filter, add a new dashboard filter or edit an existing Location filter, then click the dropdown on the SQL question card to see the State field filter. [Learn more about dashboard filters here](08-dashboard-filters.md). +In order for a saved SQL question to be usable with a dashboard filter, it must contain at least one field filter. The kind of dashboard filter that can be used with the SQL question depends on the field that you map to the question's field filter(s). For example, if you have a field filter called `{% raw %}{{var}}{% endraw %}` and you map it to a State field, you can map a Location dashboard filter to your SQL question. In this example, you'd create a new dashboard or go to an existing one, click the Edit button, and the SQL question that contains your State field filter, add a new dashboard filter or edit an existing Location filter, then click the dropdown on the SQL question card to see the State field filter. [Learn more about dashboard filters here](08-dashboard-filters.md). ![Field filter](images/sql-parameters/state-field-filter.png) @@ -93,5 +94,5 @@ WHERE True --- -## That’s it! -If you still have questions, or want to share Metabase tips and tricks, head over to our [discussion board](http://discourse.metabase.com/). See you there! +## Next: automated x-ray explorations +Learn about how to easily and quickly see automatic explorations of your data with Metabase's powerful [x-ray feature](14-x-rays.md). diff --git a/docs/users-guide/14-x-rays.md b/docs/users-guide/14-x-rays.md index afa722830f1e..153994c1790a 100644 --- a/docs/users-guide/14-x-rays.md +++ b/docs/users-guide/14-x-rays.md @@ -1,46 +1,76 @@ -## X-rays and Comparisons +## X-rays --- -X-rays and comparisons are two powerful new features in Metabase that allow you to get deeper statistical reports about your segments, fields, and time series. +X-rays are a fast and easy way to get automatic insights and explorations of your data. -### Time series x-rays +### Viewing x-rays by clicking on charts or tables -To view an x-ray report for a time series, open up a saved time series question (any kind of chart or table with a metric broken out by time), click on the Action Menu in the bottom-right of the screen, and select "X-ray this question:" +One great way to explore your data in general in Metabase is to click on points of interest in charts or tables, which shows you ways to further explore that point. We've added x-rays to this action menu, so if you for example find a point on your line chart that seems extra interesting, give it a click and x-ray it! We think you'll like what you see. -![Time series x-ray action](images/x-ray-action-time-series.png) +![X-ray action in drill-through menu](images/x-rays/drill-through.png) -You'll get an in-depth analysis of your time series question, including growth rates, the distribution of values, and seasonality: +### Comparisons -![Time series x-ray](images/x-ray-time-series.png) +When you click on a bar or point on a chart, you can now also choose the Compare action from the menu that pops up to see how the thing you've clicked on compares to the rest of the data. -### Segment, table, and field x-rays -To view an x-ray for a segment, table, or field, first go to the Data Reference, then navigate to the thing you want to x-ray, then select the x-ray option in the lefthand menu: +![Compare menu](images/x-rays/x-ray-compare-popover.png) -![X-rays in data reference](images/x-ray-data-reference.png) +If you're already looking at an x-ray of a table or a segment, Metabase will also give you the option to compare the current table or segment to other segments of the table, if there are any. This is a very fast, powerful way to see, for example, how different segments of your users or orders compare to each other. -If you have a saved Raw Data question that uses one or more segments as filters, you can also x-ray one of those segments from the Action Menu in the bottom-right of the screen when viewing that question: +![Comparison](images/x-rays/x-ray-comparison-1.png) +![Comparison](images/x-rays/x-ray-comparison-2.png) -![X-ray action](images/x-ray-action.png) -An x-ray report for a segment called "Californians" looks like this, displaying a summary of the distribution of values for each field in the segment, and the maximal and minimal values if applicable: +### Table x-rays -![X-ray](images/x-ray.png) +Another great way to get to know your data is by x-raying your tables. From the home page, scroll to the bottom of the screen, click on one of your connected databases, and then click the bolt icon on a table to view an x-ray of it. -Clicking on the summary for any field will take you to the detailed x-ray report for that single field. +### X-raying question results -### Changing the fidelity of an x-ray +You can also see an x-ray of the results of a saved or unsaved question by clicking the blue compass button in the bottom-right of the question screen and selecting the x-ray action. This will show you an x-ray of the numbers and fields in your question's results. -X-rays can be a somewhat costly or slow operation for your database to run, so by default Metabase only does a quick sampling of the segment or field you're x-raying. You can increase the fidelity in the top-right of the x-ray page: +![X-ray results](images/x-rays/x-ray-action.png) -![X-ray fidelity](images/x-ray-fidelity.png) +## X-rays in the Data Reference -### Comparing a segment +You can also view an x-ray by navigating to a table, field, metric, or segment in the [Data Reference](./12-data-model-reference.md). Just click the x-ray link in the lefthand sidebar. -Segments are a subset of a larger table or list, so one thing you can do when viewing an x-ray of a segment is compare it to its "parent" table. For example, if I have a segment called "Californians," which is a subset of the "People" table, I can click on the button that says "Compare to all People" to see a comparison report: +![Data Reference x-ray](images/x-rays/data-reference.png) -![Compare](images/x-ray-compare-button.png) +### Browsing through x-rays -The comparison report shows how many rows there are in the segment versus the parent table, and also gives you a breakdown of how the fields in the segment differ from that of the parent table: +One fun and interesting thing you can do once you're looking at an x-ray is to click and browse through the list of suggested next x-rays that show up in the righthand column. -![Comparison report](images/x-ray-comparison.png) +Depending on the x-ray you're currently viewing, you'll see suggestions that will let you: -An example for where this can be especially useful is a scenario where you've defined many different segments for your users or customers, like "Repeat Customers," "Users between 18 and 35," or "Female customers in Kalamazoo who dislike cheese." You can open up the x-ray for any of these segments, and then compare them to the larger Users or Customers table to see if there are any interesting patterns or differences. +- compare the table or segment you're currently x-raying to another segment +- "zoom out" and view an x-ray of the table the current x-ray is based on +- "zoom in" to see a more detailed x-ray about a field or dimension of the current x-ray +- go to an x-ray of a related item, like a metric based on the current table, or a different table that's related to the current one + +### Exploring newly added datasets + +If you're an administrator, when you first connect a database to Metabase, Metabot will offer to show you some automated explorations of your newly-connected data. + +![X-ray example](images/x-rays/suggestions.png) + +Click on one of these to see an x-ray. + +![X-ray example](images/x-rays/example.png) + +You can see more suggested x-rays over on the right-hand side of the screen. Browsing through x-rays like this is a pretty fun way of getting a quick overview of your data. + +### Saving x-rays + +If you come across an x-ray that's particularly interesting, you can save it as a dashboard by clicking the green Save button. Metabase will create a new dashboard and put it and all of its charts in a new collection, and will save this new collection wherever you choose. + +### Where did the old x-rays go? + +We're reworking the way we do things like time series growth analysis, which was present in past versions of x-rays. In the meantime, we've removed those previous x-rays, and will bring those features back in a more elegant and streamlined way in a future version of Metabase. + +### Need help? +If you still have questions about x-rays or comparisons, you can head over to our [discussion forum](http://discourse.metabase.com/). See you there! + +--- + +## Next: setting up alerts +Learn how to get notified when one of your questions meets or goal or has results with [alerts](15-alerts.md). diff --git a/docs/users-guide/15-alerts.md b/docs/users-guide/15-alerts.md new file mode 100644 index 000000000000..761fc45ba794 --- /dev/null +++ b/docs/users-guide/15-alerts.md @@ -0,0 +1,73 @@ + +## Getting alerts about questions +Whether you're keeping track of revenue, users, or negative reviews, there are often times when you want to be alerted about something. Metabase has a few different kinds of alerts you can set up, and you can choose to be notified via email or Slack. + +### Getting alerts +To start using alerts, someone on your team who's an administrator will need to make sure that [email integration](../administration-guide/02-setting-up-email.md) is set up first. + +### Types of alerts +There are three kinds of things you can get alerted about in Metabase: +1. When a time series crosses a goal line. +2. When a progress bar reaches or goes below its goal. +3. When any other kind of question returns a result. + +We'll go through these one by one. + +### Goal line alerts +This kind of alert is useful when you're doing things like tracking daily active users and you want to know when you reach a certain number of them, or when you're tracking orders per week and you want to know whenever that number ever goes below a certain threshold. + +To start, you'll need a line, area, or bar chart displaying a number over time. (If you need help with that, check out the page on [asking questions](04-asking-questions.md).) + +Now we need to set up a goal line. To do that, open up the visualization settings by clicking the gear icon next to the dropdown where you chose your chart type. Then click on the Display tab, and turn on the "Show goal" setting. Choose a value for your goal and click Done. + +Save your question, then click on the menu button in the top right of the screen and click on "Get alerts about this." + +![Get alerts](./images/alerts/get-alerts-about-this.png) + +This is where you'll get to choose a few things: +- Whether you want to be alerted when the time series goes above the goal line or when it goes below it. +- Whether you only wanted to be alerted every time this happens or only the first time. +- How often you want Metabase to check to see if the goal line has been crossed. + +![Goal line alert options](./images/alerts/goal-line-options.png) + +Click Done, and your alert will be all set up! You'll get an email confirmation, too. If you need to edit or unsubscribe from the alert you set up, just open up that same menu. It'll say "Alerts are on," so just click that, and you'll see the Edit and Unsubscribe buttons. This is also where you'll see alerts about this question that administrators might have added you to. + +![Edit menu](./images/alerts/edit-menu.png) + +### Progress bar alerts +Setting up this kind of alert is really similar to setting up a goal line alert. First, create a question that returns a single number as its result, then choose the Progress Bar chart type from the Visualization menu. Click the gear to select a goal value, click Done, then save your question. + +Next, open up the menu in the top-right, click "Get alerts about this," and you'll see that same screen of options for when you want to get alerts about this progress bar. + +### Results alerts +Lastly, you can get an alert when one of your saved questions returns any result. This kind of alert is the most useful if you have a question that doesn't *usually* return any results, but you just want to know when it *does*. For example, you might have a table called `Reviews`, and you want to know any time a customer leaves a bad review, which you consider to be anything below three stars. To set up an alert for this situation, you'd go and create a raw data question (i.e., a question that returns a list of reviews), and add a filter to only include results with one or two stars. + +![Bad reviews](./images/alerts/bad-reviews.png) + +You probably don't want to be alerted about all the bad reviews you've *ever* gotten, but just recent ones, you'd probably also add a filter to only include results from yesterday or today, depending on how often you want to check for these bad reviews. At this point, when you check the results of this question, it probably won't return any results, which is a good thing. + +![No results](./images/alerts/no-results.png) + +Save the question, the click on "get alerts about this" from the menu in the top-right of the screen, and select how often you want Metabase to check this question for results. That's it! + +### Adding additional recipients to your alerts +If you're an administrator of your Metabase instance, you'll be able to see and edit every alert on all saved questions. You'll also see some additional options to add recipients to alerts, which look like this: + +![Recipients](./images/alerts/recipients.png) + +Just like with [Pulses](10-pulses.md), you can add any Metabase user, email address, or even a Slack channel as a recipient of an alert. Admins can add or remove recipients on any alert, even ones that they did not create themselves. + +Here's more information about [setting up email integration](../administration-guide/02-setting-up-email.md) and [setting up Slack integration](../administration-guide/09-setting-up-slack.md). + +### Stopping alerts +There are a few ways alerts can be stopped: +- Regular users can unsubscribe from any alert that they're a recipient of. +- Admins can edit any alert and delete it entirely. This can't be undone, so be careful! +- If a saved question that has an alert on it gets edited in such a way that the alert doesn't make sense anymore, the alert will get deleted. For example, if a saved question with a goal line alert on it gets edited, and the goal line is removed entirely, that alert will get deleted. +- If a question gets archived, any alerts on it will be deleted. + +--- + +## That’s it! +If you still have questions about using alerts, you can head over to our [discussion forum](http://discourse.metabase.com/). See you there! diff --git a/docs/users-guide/images/MultiSeriesFinished.png b/docs/users-guide/images/MultiSeriesFinished.png index 0b1b8cbfd051..6c3fa315523e 100644 Binary files a/docs/users-guide/images/MultiSeriesFinished.png and b/docs/users-guide/images/MultiSeriesFinished.png differ diff --git a/docs/users-guide/images/MultiSeriesQueryBuilder.png b/docs/users-guide/images/MultiSeriesQueryBuilder.png index 4ebc8b367f6e..f786b6fcbb15 100644 Binary files a/docs/users-guide/images/MultiSeriesQueryBuilder.png and b/docs/users-guide/images/MultiSeriesQueryBuilder.png differ diff --git a/docs/users-guide/images/MultiSeriesTrigger.png b/docs/users-guide/images/MultiSeriesTrigger.png index 87031ade0e1b..c3d1a92ab368 100644 Binary files a/docs/users-guide/images/MultiSeriesTrigger.png and b/docs/users-guide/images/MultiSeriesTrigger.png differ diff --git a/docs/users-guide/images/alerts/bad-reviews.png b/docs/users-guide/images/alerts/bad-reviews.png new file mode 100644 index 000000000000..c3fa0c784f72 Binary files /dev/null and b/docs/users-guide/images/alerts/bad-reviews.png differ diff --git a/docs/users-guide/images/alerts/edit-menu.png b/docs/users-guide/images/alerts/edit-menu.png new file mode 100644 index 000000000000..f9d5c9a3bd70 Binary files /dev/null and b/docs/users-guide/images/alerts/edit-menu.png differ diff --git a/docs/users-guide/images/alerts/get-alerts-about-this.png b/docs/users-guide/images/alerts/get-alerts-about-this.png new file mode 100644 index 000000000000..fa4e722a2147 Binary files /dev/null and b/docs/users-guide/images/alerts/get-alerts-about-this.png differ diff --git a/docs/users-guide/images/alerts/goal-line-options.png b/docs/users-guide/images/alerts/goal-line-options.png new file mode 100644 index 000000000000..dbc901da87e6 Binary files /dev/null and b/docs/users-guide/images/alerts/goal-line-options.png differ diff --git a/docs/users-guide/images/alerts/no-results.png b/docs/users-guide/images/alerts/no-results.png new file mode 100644 index 000000000000..15a34e4c2068 Binary files /dev/null and b/docs/users-guide/images/alerts/no-results.png differ diff --git a/docs/users-guide/images/alerts/recipients.png b/docs/users-guide/images/alerts/recipients.png new file mode 100644 index 000000000000..cd68acf9a51d Binary files /dev/null and b/docs/users-guide/images/alerts/recipients.png differ diff --git a/docs/users-guide/images/browse-data.png b/docs/users-guide/images/browse-data.png new file mode 100644 index 000000000000..423205c7fdfd Binary files /dev/null and b/docs/users-guide/images/browse-data.png differ diff --git a/docs/users-guide/images/collection-detail.png b/docs/users-guide/images/collection-detail.png new file mode 100644 index 000000000000..5dd7fe927776 Binary files /dev/null and b/docs/users-guide/images/collection-detail.png differ diff --git a/docs/users-guide/images/create-menu.png b/docs/users-guide/images/create-menu.png new file mode 100644 index 000000000000..6c93c4798b0f Binary files /dev/null and b/docs/users-guide/images/create-menu.png differ diff --git a/docs/users-guide/images/dashboard-filters/02-filter-type.png b/docs/users-guide/images/dashboard-filters/02-filter-type.png index a4ca69c03825..3df4dc4db3b2 100644 Binary files a/docs/users-guide/images/dashboard-filters/02-filter-type.png and b/docs/users-guide/images/dashboard-filters/02-filter-type.png differ diff --git a/docs/users-guide/images/dashboard-filters/05-edit-label.png b/docs/users-guide/images/dashboard-filters/05-edit-label.png index d713a15d3b38..b758d9afcc65 100644 Binary files a/docs/users-guide/images/dashboard-filters/05-edit-label.png and b/docs/users-guide/images/dashboard-filters/05-edit-label.png differ diff --git a/docs/users-guide/images/dashboard-filters/06-edit-and-remove.png b/docs/users-guide/images/dashboard-filters/06-edit-and-remove.png index c516ae9bc7fa..0e3d9188e8ca 100644 Binary files a/docs/users-guide/images/dashboard-filters/06-edit-and-remove.png and b/docs/users-guide/images/dashboard-filters/06-edit-and-remove.png differ diff --git a/docs/users-guide/images/dashboard-filters/07-default-value.png b/docs/users-guide/images/dashboard-filters/07-default-value.png index 4c1cb8c42d85..cfa0024b0d08 100644 Binary files a/docs/users-guide/images/dashboard-filters/07-default-value.png and b/docs/users-guide/images/dashboard-filters/07-default-value.png differ diff --git a/docs/users-guide/images/dashboard-filters/08-use-filter.png b/docs/users-guide/images/dashboard-filters/08-use-filter.png index ffc895a3477c..eeac18bd60cc 100644 Binary files a/docs/users-guide/images/dashboard-filters/08-use-filter.png and b/docs/users-guide/images/dashboard-filters/08-use-filter.png differ diff --git a/docs/users-guide/images/dashboard-filters/autocomplete.png b/docs/users-guide/images/dashboard-filters/autocomplete.png new file mode 100644 index 000000000000..78355b601c7d Binary files /dev/null and b/docs/users-guide/images/dashboard-filters/autocomplete.png differ diff --git a/docs/users-guide/images/dashboard-filters/dashboard-filters.png b/docs/users-guide/images/dashboard-filters/dashboard-filters.png index 600cf366846d..a0c1f3fdbaff 100644 Binary files a/docs/users-guide/images/dashboard-filters/dashboard-filters.png and b/docs/users-guide/images/dashboard-filters/dashboard-filters.png differ diff --git a/docs/users-guide/images/dashboard-filters/list.png b/docs/users-guide/images/dashboard-filters/list.png new file mode 100644 index 000000000000..c514f64969fa Binary files /dev/null and b/docs/users-guide/images/dashboard-filters/list.png differ diff --git a/docs/users-guide/images/dashboard-filters/multi-select.png b/docs/users-guide/images/dashboard-filters/multi-select.png new file mode 100644 index 000000000000..f21b6e58c348 Binary files /dev/null and b/docs/users-guide/images/dashboard-filters/multi-select.png differ diff --git a/docs/users-guide/images/dashboard-filters/search-options.png b/docs/users-guide/images/dashboard-filters/search-options.png new file mode 100644 index 000000000000..ef9383957349 Binary files /dev/null and b/docs/users-guide/images/dashboard-filters/search-options.png differ diff --git a/docs/users-guide/images/dashboards/DashboardAutorefresh.png b/docs/users-guide/images/dashboards/DashboardAutorefresh.png index 575e3e7ede5c..181d45553fed 100644 Binary files a/docs/users-guide/images/dashboards/DashboardAutorefresh.png and b/docs/users-guide/images/dashboards/DashboardAutorefresh.png differ diff --git a/docs/users-guide/images/dashboards/DashboardCreate.png b/docs/users-guide/images/dashboards/DashboardCreate.png index d70d98746be1..c91a061360af 100644 Binary files a/docs/users-guide/images/dashboards/DashboardCreate.png and b/docs/users-guide/images/dashboards/DashboardCreate.png differ diff --git a/docs/users-guide/images/dashboards/FilterDashboards.png b/docs/users-guide/images/dashboards/FilterDashboards.png deleted file mode 100644 index 256a90cbd5b8..000000000000 Binary files a/docs/users-guide/images/dashboards/FilterDashboards.png and /dev/null differ diff --git a/docs/users-guide/images/dashboards/FirstDashboard.png b/docs/users-guide/images/dashboards/FirstDashboard.png index 0b4d9084dffb..a26f97c24747 100644 Binary files a/docs/users-guide/images/dashboards/FirstDashboard.png and b/docs/users-guide/images/dashboards/FirstDashboard.png differ diff --git a/docs/users-guide/images/dashboards/text-cards/markdown.png b/docs/users-guide/images/dashboards/text-cards/markdown.png new file mode 100644 index 000000000000..fb1d27fb1527 Binary files /dev/null and b/docs/users-guide/images/dashboards/text-cards/markdown.png differ diff --git a/docs/users-guide/images/dashboards/text-cards/new-text-card.png b/docs/users-guide/images/dashboards/text-cards/new-text-card.png new file mode 100644 index 000000000000..4447cfd80b32 Binary files /dev/null and b/docs/users-guide/images/dashboards/text-cards/new-text-card.png differ diff --git a/docs/users-guide/images/dashboards/text-cards/result.png b/docs/users-guide/images/dashboards/text-cards/result.png new file mode 100644 index 000000000000..b0e089fe9767 Binary files /dev/null and b/docs/users-guide/images/dashboards/text-cards/result.png differ diff --git a/docs/users-guide/images/dashboards/text-cards/text-card-button.png b/docs/users-guide/images/dashboards/text-cards/text-card-button.png new file mode 100644 index 000000000000..e623c3a31e22 Binary files /dev/null and b/docs/users-guide/images/dashboards/text-cards/text-card-button.png differ diff --git a/docs/users-guide/images/homepage-x-rays.png b/docs/users-guide/images/homepage-x-rays.png new file mode 100644 index 000000000000..800eaef33e9b Binary files /dev/null and b/docs/users-guide/images/homepage-x-rays.png differ diff --git a/docs/users-guide/images/insights.png b/docs/users-guide/images/insights.png new file mode 100644 index 000000000000..a7dd4dc24ed3 Binary files /dev/null and b/docs/users-guide/images/insights.png differ diff --git a/docs/users-guide/images/metabase-homepage.png b/docs/users-guide/images/metabase-homepage.png new file mode 100644 index 000000000000..2618c7204ea8 Binary files /dev/null and b/docs/users-guide/images/metabase-homepage.png differ diff --git a/docs/users-guide/images/metric-action-menu.png b/docs/users-guide/images/metric-action-menu.png index a43ca62351ab..c84407afa661 100644 Binary files a/docs/users-guide/images/metric-action-menu.png and b/docs/users-guide/images/metric-action-menu.png differ diff --git a/docs/users-guide/images/metric-drill-through.png b/docs/users-guide/images/metric-drill-through.png index 094d0a1df59a..9415a5f9c927 100644 Binary files a/docs/users-guide/images/metric-drill-through.png and b/docs/users-guide/images/metric-drill-through.png differ diff --git a/docs/users-guide/images/metrics-list.png b/docs/users-guide/images/metrics-list.png index 4abc90ff8d3b..95b21fb6b7e3 100644 Binary files a/docs/users-guide/images/metrics-list.png and b/docs/users-guide/images/metrics-list.png differ diff --git a/docs/users-guide/images/new-question-all-options.png b/docs/users-guide/images/new-question-all-options.png index d9abb89be58d..fa0a3c9c794e 100644 Binary files a/docs/users-guide/images/new-question-all-options.png and b/docs/users-guide/images/new-question-all-options.png differ diff --git a/docs/users-guide/images/our-analytics-page.png b/docs/users-guide/images/our-analytics-page.png new file mode 100644 index 000000000000..ccd13915992b Binary files /dev/null and b/docs/users-guide/images/our-analytics-page.png differ diff --git a/docs/users-guide/images/our-data.png b/docs/users-guide/images/our-data.png new file mode 100644 index 000000000000..b50515d710e0 Binary files /dev/null and b/docs/users-guide/images/our-data.png differ diff --git a/docs/users-guide/images/pinned-items.png b/docs/users-guide/images/pinned-items.png new file mode 100644 index 000000000000..0453c64cb496 Binary files /dev/null and b/docs/users-guide/images/pinned-items.png differ diff --git a/docs/users-guide/images/pulses/01-empty-state.png b/docs/users-guide/images/pulses/01-empty-state.png deleted file mode 100644 index 754ae15f99a5..000000000000 Binary files a/docs/users-guide/images/pulses/01-empty-state.png and /dev/null differ diff --git a/docs/users-guide/images/pulses/02-name-it.png b/docs/users-guide/images/pulses/02-name-it.png index d4402c280ae1..e213cd79e36a 100644 Binary files a/docs/users-guide/images/pulses/02-name-it.png and b/docs/users-guide/images/pulses/02-name-it.png differ diff --git a/docs/users-guide/images/pulses/03-pick-your-data.png b/docs/users-guide/images/pulses/03-pick-your-data.png index 6a3bb322f934..1e6d730059ee 100644 Binary files a/docs/users-guide/images/pulses/03-pick-your-data.png and b/docs/users-guide/images/pulses/03-pick-your-data.png differ diff --git a/docs/users-guide/images/pulses/06-created.png b/docs/users-guide/images/pulses/06-created.png deleted file mode 100644 index f28e570fbf9c..000000000000 Binary files a/docs/users-guide/images/pulses/06-created.png and /dev/null differ diff --git a/docs/users-guide/images/pulses/07-edit-button.png b/docs/users-guide/images/pulses/07-edit-button.png deleted file mode 100644 index 20e480ec3fb4..000000000000 Binary files a/docs/users-guide/images/pulses/07-edit-button.png and /dev/null differ diff --git a/docs/users-guide/images/pulses/attachments/attach-button.png b/docs/users-guide/images/pulses/attachments/attach-button.png new file mode 100644 index 000000000000..9e890576dbb5 Binary files /dev/null and b/docs/users-guide/images/pulses/attachments/attach-button.png differ diff --git a/docs/users-guide/images/pulses/attachments/attached.png b/docs/users-guide/images/pulses/attachments/attached.png new file mode 100644 index 000000000000..f5a331671e4c Binary files /dev/null and b/docs/users-guide/images/pulses/attachments/attached.png differ diff --git a/docs/users-guide/images/pulses/attachments/email.png b/docs/users-guide/images/pulses/attachments/email.png new file mode 100644 index 000000000000..e3d5c8e4a3a2 Binary files /dev/null and b/docs/users-guide/images/pulses/attachments/email.png differ diff --git a/docs/users-guide/images/pulses/table.png b/docs/users-guide/images/pulses/table.png new file mode 100644 index 000000000000..b859a009021d Binary files /dev/null and b/docs/users-guide/images/pulses/table.png differ diff --git a/docs/users-guide/images/saved-questions.png b/docs/users-guide/images/saved-questions.png deleted file mode 100644 index 91c771735e88..000000000000 Binary files a/docs/users-guide/images/saved-questions.png and /dev/null differ diff --git a/docs/users-guide/images/search-results.png b/docs/users-guide/images/search-results.png new file mode 100644 index 000000000000..1ecc3053cf32 Binary files /dev/null and b/docs/users-guide/images/search-results.png differ diff --git a/docs/users-guide/images/visualizations/add-fields.png b/docs/users-guide/images/visualizations/add-fields.png new file mode 100644 index 000000000000..988927d832ad Binary files /dev/null and b/docs/users-guide/images/visualizations/add-fields.png differ diff --git a/docs/users-guide/images/visualizations/conditional-formatting.png b/docs/users-guide/images/visualizations/conditional-formatting.png new file mode 100644 index 000000000000..5d60851d3df3 Binary files /dev/null and b/docs/users-guide/images/visualizations/conditional-formatting.png differ diff --git a/docs/users-guide/images/visualizations/row.png b/docs/users-guide/images/visualizations/row.png new file mode 100644 index 000000000000..28b451b60f9c Binary files /dev/null and b/docs/users-guide/images/visualizations/row.png differ diff --git a/docs/users-guide/images/x-ray-action-time-series.png b/docs/users-guide/images/x-ray-action-time-series.png deleted file mode 100644 index 63d87d366ba3..000000000000 Binary files a/docs/users-guide/images/x-ray-action-time-series.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray-action.png b/docs/users-guide/images/x-ray-action.png deleted file mode 100644 index 71af33ef3215..000000000000 Binary files a/docs/users-guide/images/x-ray-action.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray-compare-button.png b/docs/users-guide/images/x-ray-compare-button.png deleted file mode 100644 index 6d5706798541..000000000000 Binary files a/docs/users-guide/images/x-ray-compare-button.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray-comparison.png b/docs/users-guide/images/x-ray-comparison.png deleted file mode 100644 index 16168753f55f..000000000000 Binary files a/docs/users-guide/images/x-ray-comparison.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray-data-reference.png b/docs/users-guide/images/x-ray-data-reference.png deleted file mode 100644 index 2ffd2f11471c..000000000000 Binary files a/docs/users-guide/images/x-ray-data-reference.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray-fidelity.png b/docs/users-guide/images/x-ray-fidelity.png deleted file mode 100644 index c69df7cf32ae..000000000000 Binary files a/docs/users-guide/images/x-ray-fidelity.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray-time-series.png b/docs/users-guide/images/x-ray-time-series.png deleted file mode 100644 index b702da3c37bf..000000000000 Binary files a/docs/users-guide/images/x-ray-time-series.png and /dev/null differ diff --git a/docs/users-guide/images/x-ray.png b/docs/users-guide/images/x-ray.png deleted file mode 100644 index 5e37259405f7..000000000000 Binary files a/docs/users-guide/images/x-ray.png and /dev/null differ diff --git a/docs/users-guide/images/x-rays/data-reference.png b/docs/users-guide/images/x-rays/data-reference.png new file mode 100644 index 000000000000..e4521e8bdd94 Binary files /dev/null and b/docs/users-guide/images/x-rays/data-reference.png differ diff --git a/docs/users-guide/images/x-rays/drill-through.png b/docs/users-guide/images/x-rays/drill-through.png new file mode 100644 index 000000000000..749d9df5f11a Binary files /dev/null and b/docs/users-guide/images/x-rays/drill-through.png differ diff --git a/docs/users-guide/images/x-rays/example.png b/docs/users-guide/images/x-rays/example.png new file mode 100644 index 000000000000..4dde0fe6a443 Binary files /dev/null and b/docs/users-guide/images/x-rays/example.png differ diff --git a/docs/users-guide/images/x-rays/suggestions.png b/docs/users-guide/images/x-rays/suggestions.png new file mode 100644 index 000000000000..ee2e1f1c5e63 Binary files /dev/null and b/docs/users-guide/images/x-rays/suggestions.png differ diff --git a/docs/users-guide/images/x-rays/x-ray-action.png b/docs/users-guide/images/x-rays/x-ray-action.png new file mode 100644 index 000000000000..3dce83a3e075 Binary files /dev/null and b/docs/users-guide/images/x-rays/x-ray-action.png differ diff --git a/docs/users-guide/images/x-rays/x-ray-compare-popover.png b/docs/users-guide/images/x-rays/x-ray-compare-popover.png new file mode 100644 index 000000000000..c871ef88cb01 Binary files /dev/null and b/docs/users-guide/images/x-rays/x-ray-compare-popover.png differ diff --git a/docs/users-guide/images/x-rays/x-ray-comparison-1.png b/docs/users-guide/images/x-rays/x-ray-comparison-1.png new file mode 100644 index 000000000000..0d84e7327315 Binary files /dev/null and b/docs/users-guide/images/x-rays/x-ray-comparison-1.png differ diff --git a/docs/users-guide/images/x-rays/x-ray-comparison-2.png b/docs/users-guide/images/x-rays/x-ray-comparison-2.png new file mode 100644 index 000000000000..258ad9fd8e48 Binary files /dev/null and b/docs/users-guide/images/x-rays/x-ray-comparison-2.png differ diff --git a/docs/users-guide/start.md b/docs/users-guide/start.md index 8a57fe4c703d..a737d161e266 100644 --- a/docs/users-guide/start.md +++ b/docs/users-guide/start.md @@ -2,7 +2,7 @@ **This guide will teach you:** -* [What Metabase does](01-what-is-metabase.md) +* [An overview of Metabase](01-what-is-metabase.md) * [The basics of database terminology](02-database-basics.md) * [Basic exploration in Metabase](03-basic-exploration.md) * [Asking questions in Metabase](04-asking-questions.md) @@ -16,5 +16,6 @@ * [Some helpful tips on building your data model](12-data-model-reference.md) * [Creating SQL Templates](13-sql-parameters.md) * [Viewing X-ray reports](14-x-rays.md) +* [Getting alerts](15-alerts.md) Let's get started with an overview of [What Metabase does](01-what-is-metabase.md). diff --git a/flow-typed/styled-components.js b/flow-typed/styled-components.js new file mode 100644 index 000000000000..487dc36ecdf5 --- /dev/null +++ b/flow-typed/styled-components.js @@ -0,0 +1,5 @@ +/* For some reason this is needed to suppress flow errros */ + +declare module 'styled-components' { + declare module.exports: any +} diff --git a/frontend/interfaces/grid-styled.js b/frontend/interfaces/grid-styled.js new file mode 100644 index 000000000000..e5a24d142d42 --- /dev/null +++ b/frontend/interfaces/grid-styled.js @@ -0,0 +1,3 @@ +declare module "grid-styled" { + declare var exports: any; +} diff --git a/frontend/interfaces/icepick.js b/frontend/interfaces/icepick.js index 89eec22f31d3..941805b6a671 100644 --- a/frontend/interfaces/icepick.js +++ b/frontend/interfaces/icepick.js @@ -2,16 +2,42 @@ type Key = string | number; type Value = any; declare module icepick { - declare function assoc, K:Key, V:Value>(object: O, key: K, value: V): O; - declare function dissoc, K:Key, V:Value>(object: O, key: K): O; + declare function assoc, K: Key, V: Value>( + object: O, + key: K, + value: V, + ): O; + declare function dissoc, K: Key, V: Value>( + object: O, + key: K, + ): O; - declare function getIn, K:Key, V:Value>(object: ?O, path: Array): ?V; - declare function setIn, K:Key, V:Value>(object: O, path: Array, value: V): O; - declare function assocIn, K:Key, V:Value>(object: O, path: Array, value: V): O; - declare function updateIn, K:Key, V:Value>(object: O, path: Array, callback: ((value: V) => V)): O; + declare function getIn, K: Key, V: Value>( + object: ?O, + path: Array, + ): ?V; + declare function setIn, K: Key, V: Value>( + object: O, + path: Array, + value: V, + ): O; + declare function assocIn, K: Key, V: Value>( + object: O, + path: Array, + value: V, + ): O; + declare function dissocIn, K: Key>( + object: O, + path: Array, + ): O; + declare function updateIn, K: Key, V: Value>( + object: O, + path: Array, + callback: (value: V) => V, + ): O; - declare function merge>(object: O, other: O): O; + declare function merge>(object: O, other: O): O; - // TODO: improve this - declare function chain>(object: O): any; + // TODO: improve this + declare function chain>(object: O): any; } diff --git a/frontend/interfaces/redux-actions_v2.x.x.js b/frontend/interfaces/redux-actions_v2.x.x.js index 728652bd8c67..22056747b8de 100644 --- a/frontend/interfaces/redux-actions_v2.x.x.js +++ b/frontend/interfaces/redux-actions_v2.x.x.js @@ -1,7 +1,6 @@ // Origin: https://github.com/flowtype/flow-typed/blob/master/definitions/npm/redux-actions_v2.x.x/flow_v0.34.x-/redux-actions_v2.x.x.js -declare module 'redux-actions' { - +declare module "redux-actions" { /* * Use `ActionType` to get the type of the action created by a given action * creator. For example: @@ -17,7 +16,6 @@ declare module 'redux-actions' { declare type ActionType = _ActionType<*, ActionCreator>; declare type _ActionType R> = R; - /* * To get the most from Flow type checking use a `payloadCreator` argument * with `createAction`. Make sure that Flow can infer the argument type of the @@ -29,30 +27,39 @@ declare module 'redux-actions' { */ declare function createAction( type: T, - $?: empty // hack to force Flow to not use this signature when more than one argument is given + $?: empty, // hack to force Flow to not use this signature when more than one argument is given ): (payload: P, ...rest: any[]) => { type: T, payload: P, error?: boolean }; declare function createAction( type: T, payloadCreator: (_: P) => P2, - $?: empty + $?: empty, ): (payload: P, ...rest: any[]) => { type: T, payload: P2, error?: boolean }; declare function createAction( type: T, payloadCreator: (_: P) => P2, - metaCreator: (_: P) => M - ): (payload: P, ...rest: any[]) => { type: T, payload: P2, error?: boolean, meta: M }; + metaCreator: (_: P) => M, + ): ( + payload: P, + ...rest: any[] + ) => { type: T, payload: P2, error?: boolean, meta: M }; declare function createAction( type: T, payloadCreator: null | void, - metaCreator: (_: P) => M - ): (payload: P, ...rest: any[]) => { type: T, payload: P, error?: boolean, meta: M }; + metaCreator: (_: P) => M, + ): ( + payload: P, + ...rest: any[] + ) => { type: T, payload: P, error?: boolean, meta: M }; // `createActions` is quite difficult to write a type for. Maybe try not to // use this one? - declare function createActions(actionMap: Object, ...identityActions: string[]): Object; + declare function createActions( + actionMap: Object, + ...identityActions: string[] + ): Object; declare function createActions(...identityActions: string[]): Object; declare type Reducer = (state: S, action: A) => S; @@ -60,7 +67,7 @@ declare module 'redux-actions' { declare type ReducerMap = | { next: Reducer } | { throw: Reducer } - | { next: Reducer, throw: Reducer } + | { next: Reducer, throw: Reducer }; /* * To get full advantage from Flow, use a type annotation on the action @@ -79,14 +86,17 @@ declare module 'redux-actions' { declare function handleAction( type: Type, reducer: Reducer | ReducerMap, - defaultState: State + defaultState: State, ): Reducer; declare function handleActions( - reducers: { [key: string]: Reducer | ReducerMap }, - defaultState?: State + reducers: { + [key: string]: Reducer | ReducerMap, + }, + defaultState?: State, ): Reducer; - declare function combineActions(...types: (string | Symbol | Function)[]) : string; - + declare function combineActions( + ...types: (string | Symbol | Function)[] + ): string; } diff --git a/frontend/interfaces/underscore.js b/frontend/interfaces/underscore.js index 4c9edd5d120f..f62b30311960 100644 --- a/frontend/interfaces/underscore.js +++ b/frontend/interfaces/underscore.js @@ -1,9 +1,19 @@ // type definitions for (some of) underscore declare module "underscore" { - declare function find(list: ?T[], predicate: (val: T)=>boolean): ?T; - declare function findWhere(list: ?T[], properties: {[key:string]: any}): ?T; - declare function findIndex(list: ?T[], predicate: (val: T)=>boolean): number; + declare function find(list: ?(T[]), predicate: (val: T) => boolean): ?T; + declare function findWhere( + list: ?(T[]), + properties: { [key: string]: any }, + ): ?T; + declare function findIndex( + list: ?(T[]), + predicate: (val: T) => boolean, + ): number; + declare function findLastIndex( + list: ?(T[]), + predicate: (val: T) => boolean, + ): number; declare function clone(obj: T): T; @@ -15,59 +25,100 @@ declare module "underscore" { declare function flatten(a: Array>): S[]; - - declare function each(o: {[key:string]: T}, iteratee: (val: T, key: string)=>void): void; - declare function each(a: T[], iteratee: (val: T, key: string)=>void): void; - - declare function map(a: T[], iteratee: (val: T, n?: number)=>U): U[]; - declare function map(a: {[key:K]: T}, iteratee: (val: T, k?: K)=>U): U[]; + declare function each( + o: { [key: string]: T }, + iteratee: (val: T, key: string) => void, + ): void; + declare function each( + a: T[], + iteratee: (val: T, key: string) => void, + ): void; + + declare function map(a: T[], iteratee: (val: T, n?: number) => U): U[]; + declare function map( + a: { [key: K]: T }, + iteratee: (val: T, k?: K) => U, + ): U[]; declare function mapObject( - object: Object, - iteratee: (val: any, key: string) => Object, - context?: mixed + object: Object, + iteratee: (val: any, key: string) => Object, + context?: mixed, ): Object; - declare function object(a: Array<[string, T]>): {[key:string]: T}; + declare function object(a: Array<[string, T]>): { [key: string]: T }; - declare function every(a: Array, pred: (val: T)=>boolean): boolean; - declare function some(a: Array, pred: (val: T)=>boolean): boolean; - declare function all(a: Array, pred: (val: T)=>boolean): boolean; - declare function any(a: Array, pred: (val: T)=>boolean): boolean; + declare function every(a: Array, pred: (val: T) => boolean): boolean; + declare function some(a: Array, pred: (val: T) => boolean): boolean; + declare function all(a: Array, pred: (val: T) => boolean): boolean; + declare function any(a: Array, pred: (val: T) => boolean): boolean; declare function contains(a: Array, val: T): boolean; declare function initial(a: Array, n?: number): Array; declare function rest(a: Array, index?: number): Array; - declare function sortBy(a: T[], iteratee: string|(val: T)=>any): T[]; + declare function sortBy(a: T[], iteratee: string | ((val: T) => any)): T[]; - declare function filter(o: {[key:string]: T}, pred: (val: T, k: string)=>boolean): T[]; + declare function filter( + o: { [key: string]: T }, + pred: (val: T, k: string) => boolean, + ): T[]; declare function isEmpty(o: any): boolean; declare function isString(o: any): boolean; declare function isObject(o: any): boolean; declare function isArray(o: any): boolean; - declare function groupBy(a: Array, iteratee: string|(val: T, index: number)=>any): {[key:string]: T[]}; + declare function groupBy( + a: Array, + iteratee: string | ((val: T, index: number) => any), + ): { [key: string]: T[] }; - declare function min(a: Array|{[key:any]: T}): T; - declare function max(a: Array|{[key:any]: T}): T; + declare function min(a: Array | { [key: any]: T }): T; + declare function max(a: Array | { [key: any]: T }): T; declare function uniq(a: T[], iteratee?: (val: T) => boolean): T[]; - declare function uniq(a: T[], isSorted: boolean, iteratee?: (val: T) => boolean): T[]; - - declare function values(o: {[key: any]: T}): T[]; - declare function omit(o: {[key: any]: any}, ...properties: string[]): {[key: any]: any}; - declare function omit(o: {[key: any]: any}, predicate: (val: any, key: any, object: {[key: any]: any})=>boolean): {[key: any]: any}; - declare function pick(o: {[key: any]: any}, ...properties: string[]): {[key: any]: any}; - declare function pick(o: {[key: any]: any}, predicate: (val: any, key: any, object: {[key: any]: any})=>boolean): {[key: any]: any}; - declare function pluck(o: Array<{[key: any]: any}>, propertyNames: string): Array; - declare function has(o: {[key: any]: any}, ...properties: string[]): boolean; + declare function uniq( + a: T[], + isSorted: boolean, + iteratee?: (val: T) => boolean, + ): T[]; + + declare function values(o: { [key: any]: T }): T[]; + declare function omit( + o: { [key: any]: any }, + ...properties: string[] + ): { [key: any]: any }; + declare function omit( + o: { [key: any]: any }, + predicate: (val: any, key: any, object: { [key: any]: any }) => boolean, + ): { [key: any]: any }; + declare function pick( + o: { [key: any]: any }, + ...properties: string[] + ): { [key: any]: any }; + declare function pick( + o: { [key: any]: any }, + predicate: (val: any, key: any, object: { [key: any]: any }) => boolean, + ): { [key: any]: any }; + declare function pluck( + o: Array<{ [key: any]: any }>, + propertyNames: string, + ): Array; + declare function has( + o: { [key: any]: any }, + ...properties: string[] + ): boolean; declare function difference(array: T[], ...others: T[][]): T[]; declare function flatten(a: Array): Array; - declare function debounce any>(func: T): T; + declare function debounce any>(func: T): T; + + declare function partition( + array: T[], + pred: (val: T) => boolean, + ): [T[], T[]]; // TODO: improve this declare function chain(obj: S): any; diff --git a/frontend/lint/eslint-rules/no-color-literals.js b/frontend/lint/eslint-rules/no-color-literals.js new file mode 100644 index 000000000000..32f96d1e393b --- /dev/null +++ b/frontend/lint/eslint-rules/no-color-literals.js @@ -0,0 +1,39 @@ +/** + * @fileoverview Rule to disallow color literals + * @author Tom Robinson + */ + +"use strict"; + +//------------------------------------------------------------------------------ +// Rule Definition +//------------------------------------------------------------------------------ + +const COLOR_REGEX = /(?:#[a-fA-F0-9]{3}(?:[a-fA-F0-9]{3})?\b|(?:rgb|hsl)a?\(\s*\d+\s*(?:,\s*\d+(?:\.\d+)?%?\s*){2,3}\))/g; +const LINT_MESSAGE = + "Color literals forbidden. Import colors from 'metabase/lib/colors'."; + +module.exports = { + meta: { + docs: { + description: "disallow color literals", + category: "Possible Errors", + recommended: true, + }, + schema: [], // no options + }, + create: function(context) { + return { + Literal(node) { + if (typeof node.value === "string" && COLOR_REGEX.test(node.value)) { + context.report({ node, message: LINT_MESSAGE }); + } + }, + TemplateLiteral(node) { + if (node.quasis.filter(q => COLOR_REGEX.test(q.value.raw)).length > 0) { + context.report({ node, message: LINT_MESSAGE }); + } + }, + }; + }, +}; diff --git a/frontend/src/metabase-lib/lib/Action.js b/frontend/src/metabase-lib/lib/Action.js index c42268a3e582..c05d4fe336a9 100644 --- a/frontend/src/metabase-lib/lib/Action.js +++ b/frontend/src/metabase-lib/lib/Action.js @@ -1,7 +1,7 @@ /* @flow weak */ export default class Action { - perform() {} + perform() {} } export class ActionClick {} diff --git a/frontend/src/metabase-lib/lib/Alert.js b/frontend/src/metabase-lib/lib/Alert.js new file mode 100644 index 000000000000..508b5eacec82 --- /dev/null +++ b/frontend/src/metabase-lib/lib/Alert.js @@ -0,0 +1,37 @@ +export const ALERT_TYPE_ROWS = "alert-type-rows"; +export const ALERT_TYPE_TIMESERIES_GOAL = "alert-type-timeseries-goal"; +export const ALERT_TYPE_PROGRESS_BAR_GOAL = "alert-type-progress-bar-goal"; + +export type AlertType = + | ALERT_TYPE_ROWS + | ALERT_TYPE_TIMESERIES_GOAL + | ALERT_TYPE_PROGRESS_BAR_GOAL; + +export const getDefaultAlert = (question, user, visualizationSettings) => { + const alertType = question.alertType(visualizationSettings); + + const typeDependentAlertFields = + alertType === ALERT_TYPE_ROWS + ? { alert_condition: "rows", alert_first_only: false } + : { + alert_condition: "goal", + alert_first_only: true, + alert_above_goal: true, + }; + + const defaultEmailChannel = { + enabled: true, + channel_type: "email", + recipients: [user], + schedule_day: "mon", + schedule_frame: null, + schedule_hour: 0, + schedule_type: "daily", + }; + + return { + card: { id: question.id(), include_csv: false, include_xls: false }, + channels: [defaultEmailChannel], + ...typeDependentAlertFields, + }; +}; diff --git a/frontend/src/metabase-lib/lib/Dashboard.js b/frontend/src/metabase-lib/lib/Dashboard.js index bc10bd91865d..d48812c10251 100644 --- a/frontend/src/metabase-lib/lib/Dashboard.js +++ b/frontend/src/metabase-lib/lib/Dashboard.js @@ -1,3 +1,3 @@ export default class Dashboard { - getParameters() {} + getParameters() {} } diff --git a/frontend/src/metabase-lib/lib/Dimension.js b/frontend/src/metabase-lib/lib/Dimension.js index 39ec3621b811..9154ba6a597c 100644 --- a/frontend/src/metabase-lib/lib/Dimension.js +++ b/frontend/src/metabase-lib/lib/Dimension.js @@ -1,8 +1,9 @@ import React from "react"; +import { t, ngettext, msgid } from "c-3po"; import Icon from "metabase/components/Icon"; -import { stripId, inflect } from "metabase/lib/formatting"; +import { stripId } from "metabase/lib/formatting"; import Query_DEPRECATED from "metabase/lib/query"; import { mbqlEq } from "metabase/lib/query/util"; import _ from "underscore"; @@ -11,12 +12,12 @@ import Field from "./metadata/Field"; import Metadata from "./metadata/Metadata"; import type { - ConcreteField, - LocalFieldReference, - ForeignFieldReference, - DatetimeField, - ExpressionReference, - DatetimeUnit + ConcreteField, + LocalFieldReference, + ForeignFieldReference, + DatetimeField, + ExpressionReference, + DatetimeUnit, } from "metabase/meta/types/Query"; import type { IconName } from "metabase/meta/types"; @@ -25,8 +26,8 @@ import type { IconName } from "metabase/meta/types"; * A dimension option returned by the query_metadata API */ type DimensionOption = { - mbql: any, - name?: string + mbql: any, + name?: string, }; /** @@ -38,250 +39,252 @@ type DimensionOption = { * @abstract */ export default class Dimension { - _parent: ?Dimension; - _args: any; - _metadata: ?Metadata; - - // Display names provided by the backend - _subDisplayName: ?String; - _subTriggerDisplayName: ?String; - - /** - * Dimension constructor - */ - constructor( - parent: ?Dimension, - args: any[], - metadata?: Metadata - ): Dimension { - this._parent = parent; - this._args = args; - this._metadata = metadata || (parent && parent._metadata); - } - - /** - * Parses an MBQL expression into an appropriate Dimension subclass, if possible. - * Metadata should be provided if you intend to use the display name or render methods. - */ - static parseMBQL(mbql: ConcreteField, metadata?: Metadata): ?Dimension { - for (const D of DIMENSION_TYPES) { - const dimension = D.parseMBQL(mbql, metadata); - if (dimension != null) { - return dimension; - } - } - return null; - } - - /** - * Returns true if these two dimensions are identical to one another. - */ - static isEqual(a: ?Dimension | ConcreteField, b: ?Dimension): boolean { - let dimensionA: ?Dimension = a instanceof Dimension - ? a - : // $FlowFixMe - Dimension.parseMBQL(a, this._metadata); - let dimensionB: ?Dimension = b instanceof Dimension - ? b - : // $FlowFixMe - Dimension.parseMBQL(b, this._metadata); - return !!dimensionA && !!dimensionB && dimensionA.isEqual(dimensionB); - } - - /** - * Sub-dimensions for the provided dimension of this type. - * @abstract - */ - // TODO Atte Keinänen 5/21/17: Rename either this or the instance method with the same name - // Also making it clear in the method name that we're working with sub-dimensions would be good - static dimensions(parent: Dimension): Dimension[] { - return []; - } - - /** - * The default sub-dimension for the provided dimension of this type, if any. - * @abstract - */ - static defaultDimension(parent: Dimension): ?Dimension { - return null; - } - - /** - * Returns "sub-dimensions" of this dimension. - * @abstract - */ - // TODO Atte Keinänen 5/21/17: Rename either this or the static method with the same name - // Also making it clear in the method name that we're working with sub-dimensions would be good - dimensions(DimensionTypes?: typeof Dimension[]): Dimension[] { - const dimensionOptions = this.field().dimension_options; - if (!DimensionTypes && dimensionOptions) { - return dimensionOptions.map(option => - this._dimensionForOption(option)); - } else { - return [].concat( - ...(DimensionTypes || []) - .map(DimensionType => DimensionType.dimensions(this)) - ); - } - } - - /** - * Returns the default sub-dimension of this dimension, if any. - * @abstract - */ - defaultDimension(DimensionTypes: any[] = DIMENSION_TYPES): ?Dimension { - const defaultDimensionOption = this.field().default_dimension_option; - if (defaultDimensionOption) { - return this._dimensionForOption(defaultDimensionOption); - } else { - for (const DimensionType of DimensionTypes) { - const defaultDimension = DimensionType.defaultDimension(this); - if (defaultDimension) { - return defaultDimension; - } - } - } - - return null; - } - - // Internal method gets a Dimension from a DimensionOption - _dimensionForOption(option: DimensionOption) { - // fill in the parent field ref - const fieldRef = this.baseDimension().mbql(); - let mbql = option.mbql; - if (mbql) { - mbql = [mbql[0], fieldRef, ...mbql.slice(2)]; - } else { - mbql = fieldRef; - } - let dimension = Dimension.parseMBQL(mbql, this._metadata); - if (option.name) { - dimension._subDisplayName = option.name; - dimension._subTriggerDisplayName = option.name; - } + _parent: ?Dimension; + _args: any; + _metadata: ?Metadata; + + // Display names provided by the backend + _subDisplayName: ?String; + _subTriggerDisplayName: ?String; + + /** + * Dimension constructor + */ + constructor(parent: ?Dimension, args: any[], metadata?: Metadata): Dimension { + this._parent = parent; + this._args = args; + this._metadata = metadata || (parent && parent._metadata); + } + + /** + * Parses an MBQL expression into an appropriate Dimension subclass, if possible. + * Metadata should be provided if you intend to use the display name or render methods. + */ + static parseMBQL(mbql: ConcreteField, metadata?: Metadata): ?Dimension { + for (const D of DIMENSION_TYPES) { + const dimension = D.parseMBQL(mbql, metadata); + if (dimension != null) { return dimension; - } - - /** - * Is this dimension idential to another dimension or MBQL clause - */ - isEqual(other: ?Dimension | ConcreteField): boolean { - if (other == null) { - return false; - } - - let otherDimension: ?Dimension = other instanceof Dimension - ? other - : Dimension.parseMBQL(other, this._metadata); - if (!otherDimension) { - return false; + } + } + return null; + } + + /** + * Returns true if these two dimensions are identical to one another. + */ + static isEqual(a: ?Dimension | ConcreteField, b: ?Dimension): boolean { + let dimensionA: ?Dimension = + a instanceof Dimension + ? a + : // $FlowFixMe + Dimension.parseMBQL(a, this._metadata); + let dimensionB: ?Dimension = + b instanceof Dimension + ? b + : // $FlowFixMe + Dimension.parseMBQL(b, this._metadata); + return !!dimensionA && !!dimensionB && dimensionA.isEqual(dimensionB); + } + + /** + * Sub-dimensions for the provided dimension of this type. + * @abstract + */ + // TODO Atte Keinänen 5/21/17: Rename either this or the instance method with the same name + // Also making it clear in the method name that we're working with sub-dimensions would be good + static dimensions(parent: Dimension): Dimension[] { + return []; + } + + /** + * The default sub-dimension for the provided dimension of this type, if any. + * @abstract + */ + static defaultDimension(parent: Dimension): ?Dimension { + return null; + } + + /** + * Returns "sub-dimensions" of this dimension. + * @abstract + */ + // TODO Atte Keinänen 5/21/17: Rename either this or the static method with the same name + // Also making it clear in the method name that we're working with sub-dimensions would be good + dimensions(DimensionTypes?: typeof Dimension[]): Dimension[] { + const dimensionOptions = this.field().dimension_options; + if (!DimensionTypes && dimensionOptions) { + return dimensionOptions.map(option => this._dimensionForOption(option)); + } else { + return [].concat( + ...(DimensionTypes || []).map(DimensionType => + DimensionType.dimensions(this), + ), + ); + } + } + + /** + * Returns the default sub-dimension of this dimension, if any. + * @abstract + */ + defaultDimension(DimensionTypes: any[] = DIMENSION_TYPES): ?Dimension { + const defaultDimensionOption = this.field().default_dimension_option; + if (defaultDimensionOption) { + return this._dimensionForOption(defaultDimensionOption); + } else { + for (const DimensionType of DimensionTypes) { + const defaultDimension = DimensionType.defaultDimension(this); + if (defaultDimension) { + return defaultDimension; } - // must be instace of the same class - if (this.constructor !== otherDimension.constructor) { - return false; - } - // must both or neither have a parent - if (!this._parent !== !otherDimension._parent) { - return false; - } - // parents must be equal - if (this._parent && !this._parent.isEqual(otherDimension._parent)) { - return false; - } - // args must be equal - if (!_.isEqual(this._args, otherDimension._args)) { - return false; - } - return true; - } - - /** - * Does this dimension have the same underlying base dimension, typically a field - */ - isSameBaseDimension(other: ?Dimension | ConcreteField): boolean { - if (other == null) { - return false; - } - - let otherDimension: ?Dimension = other instanceof Dimension - ? other - : Dimension.parseMBQL(other, this._metadata); - - const baseDimensionA = this.baseDimension(); - const baseDimensionB = otherDimension && otherDimension.baseDimension(); - - return !!baseDimensionA && - !!baseDimensionB && - baseDimensionA.isEqual(baseDimensionB); - } - - /** - * The base dimension of this dimension, typically a field. May return itself. - */ - baseDimension(): Dimension { - return this; - } - - /** - * The underlying field for this dimension - */ - field(): Field { - return new Field(); - } - - /** - * Valid operators on this dimension - */ - operators() { - return this.field().operators || []; - } - - /** - * The operator with the provided operator name (e.x. `=`, `<`, etc) - */ - operator(op) { - return this.field().operator(op); - } - - /** - * The display name of this dimension, e.x. the field's display_name - * @abstract - */ - displayName(): string { - return ""; - } - - /** - * The name to be shown when this dimension is being displayed as a sub-dimension of another - * @abstract - */ - subDisplayName(): string { - return this._subDisplayName || ""; - } - - /** - * A shorter version of subDisplayName, e.x. to be shown in the dimension picker trigger - * @abstract - */ - subTriggerDisplayName(): string { - return this._subTriggerDisplayName || ""; - } - - /** - * An icon name representing this dimension's type, to be used in the component. - * @abstract - */ - icon(): ?IconName { - return null; - } - - /** - * Renders a dimension to React - */ - render(): ?React$Element { - return [this.displayName()]; - } + } + } + + return null; + } + + // Internal method gets a Dimension from a DimensionOption + _dimensionForOption(option: DimensionOption) { + // fill in the parent field ref + const fieldRef = this.baseDimension().mbql(); + let mbql = option.mbql; + if (mbql) { + mbql = [mbql[0], fieldRef, ...mbql.slice(2)]; + } else { + mbql = fieldRef; + } + let dimension = Dimension.parseMBQL(mbql, this._metadata); + if (option.name) { + dimension._subDisplayName = option.name; + dimension._subTriggerDisplayName = option.name; + } + return dimension; + } + + /** + * Is this dimension idential to another dimension or MBQL clause + */ + isEqual(other: ?Dimension | ConcreteField): boolean { + if (other == null) { + return false; + } + + let otherDimension: ?Dimension = + other instanceof Dimension + ? other + : Dimension.parseMBQL(other, this._metadata); + if (!otherDimension) { + return false; + } + // must be instace of the same class + if (this.constructor !== otherDimension.constructor) { + return false; + } + // must both or neither have a parent + if (!this._parent !== !otherDimension._parent) { + return false; + } + // parents must be equal + if (this._parent && !this._parent.isEqual(otherDimension._parent)) { + return false; + } + // args must be equal + if (!_.isEqual(this._args, otherDimension._args)) { + return false; + } + return true; + } + + /** + * Does this dimension have the same underlying base dimension, typically a field + */ + isSameBaseDimension(other: ?Dimension | ConcreteField): boolean { + if (other == null) { + return false; + } + + let otherDimension: ?Dimension = + other instanceof Dimension + ? other + : Dimension.parseMBQL(other, this._metadata); + + const baseDimensionA = this.baseDimension(); + const baseDimensionB = otherDimension && otherDimension.baseDimension(); + + return ( + !!baseDimensionA && + !!baseDimensionB && + baseDimensionA.isEqual(baseDimensionB) + ); + } + + /** + * The base dimension of this dimension, typically a field. May return itself. + */ + baseDimension(): Dimension { + return this; + } + + /** + * The underlying field for this dimension + */ + field(): Field { + return new Field(); + } + + /** + * Valid operators on this dimension + */ + operators() { + return this.field().operators || []; + } + + /** + * The operator with the provided operator name (e.x. `=`, `<`, etc) + */ + operator(op) { + return this.field().operator(op); + } + + /** + * The display name of this dimension, e.x. the field's display_name + * @abstract + */ + displayName(): string { + return ""; + } + + /** + * The name to be shown when this dimension is being displayed as a sub-dimension of another + * @abstract + */ + subDisplayName(): string { + return this._subDisplayName || ""; + } + + /** + * A shorter version of subDisplayName, e.x. to be shown in the dimension picker trigger + * @abstract + */ + subTriggerDisplayName(): string { + return this._subTriggerDisplayName || ""; + } + + /** + * An icon name representing this dimension's type, to be used in the component. + * @abstract + */ + icon(): ?IconName { + return null; + } + + /** + * Renders a dimension to React + */ + render(): ?React$Element { + return [this.displayName()]; + } } /** @@ -289,286 +292,286 @@ export default class Dimension { * @abstract */ export class FieldDimension extends Dimension { - field(): Field { - if (this._parent instanceof FieldDimension) { - return this._parent.field(); - } - return new Field(); - } - - displayName(): string { - return stripId( - Query_DEPRECATED.getFieldPathName( - this.field().id, - this.field().table - ) - ); - } - - subDisplayName(): string { - if (this._subDisplayName) { - return this._subTriggerDisplayName; - } else if (this._parent) { - // TODO Atte Keinänen 8/1/17: Is this used at all? - // foreign key, show the field name - return this.field().display_name; - } else { - // TODO Atte Keinänen 8/1/17: Is this used at all? - return "Default"; - } - } - - subTriggerDisplayName(): string { - if (this.defaultDimension() instanceof BinnedDimension) { - return "Unbinned"; - } else { - return ""; - } - } - - icon() { - return this.field().icon(); - } + field(): Field { + if (this._parent instanceof FieldDimension) { + return this._parent.field(); + } + return new Field(); + } + + displayName(): string { + return stripId( + Query_DEPRECATED.getFieldPathName(this.field().id, this.field().table), + ); + } + + subDisplayName(): string { + if (this._subDisplayName) { + return this._subTriggerDisplayName; + } else if (this._parent) { + // TODO Atte Keinänen 8/1/17: Is this used at all? + // foreign key, show the field name + return this.field().display_name; + } else { + // TODO Atte Keinänen 8/1/17: Is this used at all? + return "Default"; + } + } + + subTriggerDisplayName(): string { + if (this.defaultDimension() instanceof BinnedDimension) { + return "Unbinned"; + } else { + return ""; + } + } + + icon() { + return this.field().icon(); + } } /** * Field ID-based dimension, `["field-id", field-id]` */ export class FieldIDDimension extends FieldDimension { - static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata) { - if (typeof mbql === "number") { - // DEPRECATED: bare field id - return new FieldIDDimension(null, [mbql], metadata); - } else if (Array.isArray(mbql) && mbqlEq(mbql[0], "field-id")) { - return new FieldIDDimension(null, mbql.slice(1), metadata); - } - return null; - } - - mbql(): LocalFieldReference { - return ["field-id", this._args[0]]; - } - - field() { - return (this._metadata && this._metadata.fields[this._args[0]]) || - new Field(); - } + static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata) { + if (typeof mbql === "number") { + // DEPRECATED: bare field id + return new FieldIDDimension(null, [mbql], metadata); + } else if (Array.isArray(mbql) && mbqlEq(mbql[0], "field-id")) { + return new FieldIDDimension(null, mbql.slice(1), metadata); + } + return null; + } + + mbql(): LocalFieldReference { + return ["field-id", this._args[0]]; + } + + field() { + return ( + (this._metadata && this._metadata.fields[this._args[0]]) || new Field() + ); + } } /** * Foreign key-based dimension, `["fk->", fk-field-id, dest-field-id]` */ export class FKDimension extends FieldDimension { - static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata): ?Dimension { - if (Array.isArray(mbql) && mbqlEq(mbql[0], "fk->")) { - // $FlowFixMe - const fkRef: ForeignFieldReference = mbql; - const parent = Dimension.parseMBQL(fkRef[1], metadata); - return new FKDimension(parent, fkRef.slice(2)); - } - return null; - } - - static dimensions(parent: Dimension): Dimension[] { - if (parent instanceof FieldDimension) { - const field = parent.field(); - if (field.target && field.target.table) { - return field.target.table.fields.map( - field => new FKDimension(parent, [field.id]) - ); - } - } - return []; - } - - mbql(): ForeignFieldReference { - // TODO: not sure `this._parent._args[0]` is the best way to handle this? - // we don't want the `["field-id", ...]` wrapper from the `this._parent.mbql()` - return ["fk->", this._parent._args[0], this._args[0]]; - } - - field() { - return (this._metadata && this._metadata.fields[this._args[0]]) || - new Field(); - } - - render() { - return [ - stripId(this._parent.field().display_name), - , - this.field().display_name - ]; - } + static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata): ?Dimension { + if (Array.isArray(mbql) && mbqlEq(mbql[0], "fk->")) { + // $FlowFixMe + const fkRef: ForeignFieldReference = mbql; + const parent = Dimension.parseMBQL(fkRef[1], metadata); + return new FKDimension(parent, fkRef.slice(2)); + } + return null; + } + + static dimensions(parent: Dimension): Dimension[] { + if (parent instanceof FieldDimension) { + const field = parent.field(); + if (field.target && field.target.table) { + return field.target.table.fields.map( + field => new FKDimension(parent, [field.id]), + ); + } + } + return []; + } + + mbql(): ForeignFieldReference { + // TODO: not sure `this._parent._args[0]` is the best way to handle this? + // we don't want the `["field-id", ...]` wrapper from the `this._parent.mbql()` + return ["fk->", this._parent._args[0], this._args[0]]; + } + + field() { + return ( + (this._metadata && this._metadata.fields[this._args[0]]) || new Field() + ); + } + + render() { + return [ + stripId(this._parent.field().display_name), + , + this.field().display_name, + ]; + } } import { DATETIME_UNITS, formatBucketing } from "metabase/lib/query_time"; const isFieldDimension = dimension => - dimension instanceof FieldIDDimension || dimension instanceof FKDimension; + dimension instanceof FieldIDDimension || dimension instanceof FKDimension; /** * DatetimeField dimension, `["datetime-field", field-reference, datetime-unit]` */ export class DatetimeFieldDimension extends FieldDimension { - static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata): ?Dimension { - if (Array.isArray(mbql) && mbqlEq(mbql[0], "datetime-field")) { - const parent = Dimension.parseMBQL(mbql[1], metadata); - // DEPRECATED: ["datetime-field", id, "of", unit] - if (mbql.length === 4) { - return new DatetimeFieldDimension(parent, mbql.slice(3)); - } else { - return new DatetimeFieldDimension(parent, mbql.slice(2)); - } - } - return null; - } - - static dimensions(parent: Dimension): Dimension[] { - if (isFieldDimension(parent) && parent.field().isDate()) { - return DATETIME_UNITS.map( - unit => new DatetimeFieldDimension(parent, [unit]) - ); - } - return []; - } - - static defaultDimension(parent: Dimension): ?Dimension { - if (isFieldDimension(parent) && parent.field().isDate()) { - return new DatetimeFieldDimension(parent, ["day"]); - } - return null; - } - - mbql(): DatetimeField { - return ["datetime-field", this._parent.mbql(), this._args[0]]; - } - - baseDimension(): Dimension { - return this._parent.baseDimension(); - } - - bucketing(): DatetimeUnit { - return this._args[0]; - } - - subDisplayName(): string { - return formatBucketing(this._args[0]); - } - - subTriggerDisplayName(): string { - return "by " + formatBucketing(this._args[0]).toLowerCase(); - } - - render() { - return [...super.render(), ": ", this.subDisplayName()]; - } + static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata): ?Dimension { + if (Array.isArray(mbql) && mbqlEq(mbql[0], "datetime-field")) { + const parent = Dimension.parseMBQL(mbql[1], metadata); + // DEPRECATED: ["datetime-field", id, "of", unit] + if (mbql.length === 4) { + return new DatetimeFieldDimension(parent, mbql.slice(3)); + } else { + return new DatetimeFieldDimension(parent, mbql.slice(2)); + } + } + return null; + } + + static dimensions(parent: Dimension): Dimension[] { + if (isFieldDimension(parent) && parent.field().isDate()) { + return DATETIME_UNITS.map( + unit => new DatetimeFieldDimension(parent, [unit]), + ); + } + return []; + } + + static defaultDimension(parent: Dimension): ?Dimension { + if (isFieldDimension(parent) && parent.field().isDate()) { + return new DatetimeFieldDimension(parent, ["day"]); + } + return null; + } + + mbql(): DatetimeField { + return ["datetime-field", this._parent.mbql(), this._args[0]]; + } + + baseDimension(): Dimension { + return this._parent.baseDimension(); + } + + bucketing(): DatetimeUnit { + return this._args[0]; + } + + subDisplayName(): string { + return formatBucketing(this._args[0]); + } + + subTriggerDisplayName(): string { + return "by " + formatBucketing(this._args[0]).toLowerCase(); + } + + render() { + return [...super.render(), ": ", this.subDisplayName()]; + } } /** * Binned dimension, `["binning-strategy", field-reference, strategy, ...args]` */ export class BinnedDimension extends FieldDimension { - static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata) { - if (Array.isArray(mbql) && mbqlEq(mbql[0], "binning-strategy")) { - const parent = Dimension.parseMBQL(mbql[1], metadata); - return new BinnedDimension(parent, mbql.slice(2)); - } - return null; - } - - static dimensions(parent: Dimension): Dimension[] { - // Subdimensions are are provided by the backend through the dimension_options field property - return []; - } - - mbql() { - return ["binning-strategy", this._parent.mbql(), ...this._args]; - } - - baseDimension(): Dimension { - return this._parent.baseDimension(); - } - - subTriggerDisplayName(): string { - if (this._args[0] === "num-bins") { - return `${this._args[1]} ${inflect("bins", this._args[1])}`; - } else if (this._args[0] === "bin-width") { - const binWidth = this._args[1]; - const units = this.field().isCoordinate() ? "°" : ""; - return `${binWidth}${units}`; - } else { - return "Auto binned"; - } - } - - render() { - return [...super.render(), ": ", this.subTriggerDisplayName()]; - } + static parseMBQL(mbql: ConcreteField, metadata?: ?Metadata) { + if (Array.isArray(mbql) && mbqlEq(mbql[0], "binning-strategy")) { + const parent = Dimension.parseMBQL(mbql[1], metadata); + return new BinnedDimension(parent, mbql.slice(2)); + } + return null; + } + + static dimensions(parent: Dimension): Dimension[] { + // Subdimensions are are provided by the backend through the dimension_options field property + return []; + } + + mbql() { + return ["binning-strategy", this._parent.mbql(), ...this._args]; + } + + baseDimension(): Dimension { + return this._parent.baseDimension(); + } + + subTriggerDisplayName(): string { + if (this._args[0] === "num-bins") { + const n = this._args[1]; + return ngettext(msgid`${n} bin`, `${n} bins`, n); + } else if (this._args[0] === "bin-width") { + const binWidth = this._args[1]; + const units = this.field().isCoordinate() ? "°" : ""; + return `${binWidth}${units}`; + } else { + return t`Auto binned`; + } + } + + render() { + return [...super.render(), ": ", this.subTriggerDisplayName()]; + } } /** * Expression reference, `["expression", expression-name]` */ export class ExpressionDimension extends Dimension { - tag = "Custom"; + tag = "Custom"; - static parseMBQL(mbql: any, metadata?: ?Metadata): ?Dimension { - if (Array.isArray(mbql) && mbqlEq(mbql[0], "expression")) { - return new ExpressionDimension(null, mbql.slice(1)); - } + static parseMBQL(mbql: any, metadata?: ?Metadata): ?Dimension { + if (Array.isArray(mbql) && mbqlEq(mbql[0], "expression")) { + return new ExpressionDimension(null, mbql.slice(1)); } + } - mbql(): ExpressionReference { - return ["expression", this._args[0]]; - } + mbql(): ExpressionReference { + return ["expression", this._args[0]]; + } - displayName(): string { - return this._args[0]; - } + displayName(): string { + return this._args[0]; + } - icon(): IconName { - // TODO: eventually will need to get the type from the return type of the expression - return "int"; - } + icon(): IconName { + // TODO: eventually will need to get the type from the return type of the expression + return "int"; + } } /** * Aggregation reference, `["aggregation", aggregation-index]` */ export class AggregationDimension extends Dimension { - static parseMBQL(mbql: any, metadata?: ?Metadata): ?Dimension { - if (Array.isArray(mbql) && mbqlEq(mbql[0], "aggregation")) { - return new AggregationDimension(null, mbql.slice(1)); - } + static parseMBQL(mbql: any, metadata?: ?Metadata): ?Dimension { + if (Array.isArray(mbql) && mbqlEq(mbql[0], "aggregation")) { + return new AggregationDimension(null, mbql.slice(1)); } + } - constructor(parent, args, metadata, displayName) { - super(parent, args, metadata); - this._displayName = displayName; - } + constructor(parent, args, metadata, displayName) { + super(parent, args, metadata); + this._displayName = displayName; + } - displayName(): string { - return this._displayName; - } + displayName(): string { + return this._displayName; + } - aggregationIndex(): number { - return this._args[0]; - } + aggregationIndex(): number { + return this._args[0]; + } - mbql() { - return ["aggregation", this._args[0]]; - } + mbql() { + return ["aggregation", this._args[0]]; + } - icon() { - return "int"; - } + icon() { + return "int"; + } } const DIMENSION_TYPES: typeof Dimension[] = [ - FieldIDDimension, - FKDimension, - DatetimeFieldDimension, - ExpressionDimension, - BinnedDimension, - AggregationDimension + FieldIDDimension, + FKDimension, + DatetimeFieldDimension, + ExpressionDimension, + BinnedDimension, + AggregationDimension, ]; diff --git a/frontend/src/metabase-lib/lib/Mode.js b/frontend/src/metabase-lib/lib/Mode.js index 385a5ffba4be..badcebc266dd 100644 --- a/frontend/src/metabase-lib/lib/Mode.js +++ b/frontend/src/metabase-lib/lib/Mode.js @@ -4,52 +4,54 @@ import Question from "metabase-lib/lib/Question"; import { getMode } from "metabase/qb/lib/modes"; import type { - ClickAction, - ClickObject, - QueryMode + ClickAction, + ClickObject, + QueryMode, } from "metabase/meta/types/Visualization"; export default class Mode { - _question: Question; - _queryMode: QueryMode; - - constructor(question: Question, queryMode: QueryMode) { - this._question = question; - this._queryMode = queryMode; - } - - static forQuestion(question: Question): ?Mode { - // TODO Atte Keinänen 6/22/17: Move getMode here and refactor it after writing tests - const card = question.card(); - const tableMetadata = question.tableMetadata(); - const queryMode = getMode(card, tableMetadata); - - if (queryMode) { - return new Mode(question, queryMode); - } else { - return null; - } - } - - queryMode() { - return this._queryMode; - } - - name() { - return this._queryMode.name; - } - - actions(settings): ClickAction[] { - return _.flatten( - this._queryMode.actions.map(actionCreator => - actionCreator({ question: this._question, settings })) - ); - } - - actionsForClick(clicked: ?ClickObject, settings): ClickAction[] { - return _.flatten( - this._queryMode.drills.map(actionCreator => - actionCreator({ question: this._question, settings, clicked })) - ); + _question: Question; + _queryMode: QueryMode; + + constructor(question: Question, queryMode: QueryMode) { + this._question = question; + this._queryMode = queryMode; + } + + static forQuestion(question: Question): ?Mode { + // TODO Atte Keinänen 6/22/17: Move getMode here and refactor it after writing tests + const card = question.card(); + const tableMetadata = question.tableMetadata(); + const queryMode = getMode(card, tableMetadata); + + if (queryMode) { + return new Mode(question, queryMode); + } else { + return null; } + } + + queryMode() { + return this._queryMode; + } + + name() { + return this._queryMode.name; + } + + actions(settings): ClickAction[] { + return _.flatten( + this._queryMode.actions.map(actionCreator => + actionCreator({ question: this._question, settings }), + ), + ); + } + + actionsForClick(clicked: ?ClickObject, settings): ClickAction[] { + return _.flatten( + this._queryMode.drills.map(actionCreator => + actionCreator({ question: this._question, settings, clicked }), + ), + ); + } } diff --git a/frontend/src/metabase-lib/lib/Question.js b/frontend/src/metabase-lib/lib/Question.js index e9169b0e3e43..0ee2e88756a7 100644 --- a/frontend/src/metabase-lib/lib/Question.js +++ b/frontend/src/metabase-lib/lib/Question.js @@ -7,39 +7,41 @@ import Table from "./metadata/Table"; import Field from "./metadata/Field"; import StructuredQuery, { - STRUCTURED_QUERY_TEMPLATE + STRUCTURED_QUERY_TEMPLATE, } from "./queries/StructuredQuery"; import NativeQuery from "./queries/NativeQuery"; import { memoize } from "metabase-lib/lib/utils"; -import Utils from "metabase/lib/utils"; import * as Card_DEPRECATED from "metabase/lib/card"; -import Query_DEPRECATED from "metabase/lib/query"; -import { getParametersWithExtras } from "metabase/meta/Card"; +import { getParametersWithExtras, isTransientId } from "metabase/meta/Card"; import { - summarize, - pivot, - filter, - breakout, - toUnderlyingRecords, - drillUnderlyingRecords + summarize, + pivot, + filter, + breakout, + distribution, + toUnderlyingRecords, + drillUnderlyingRecords, } from "metabase/qb/lib/actions"; import _ from "underscore"; import { chain, assoc } from "icepick"; import type { - Parameter as ParameterObject, - ParameterValues + Parameter as ParameterObject, + ParameterValues, } from "metabase/meta/types/Parameter"; import type { - DatasetQuery, - Card as CardObject + DatasetQuery, + Card as CardObject, + VisualizationSettings, } from "metabase/meta/types/Card"; import { MetabaseApi, CardApi } from "metabase/services"; +import Questions from "metabase/entities/questions"; + import AtomicQuery from "metabase-lib/lib/queries/AtomicQuery"; import type { Dataset } from "metabase/meta/types/Dataset"; @@ -47,403 +49,544 @@ import type { TableId } from "metabase/meta/types/Table"; import type { DatabaseId } from "metabase/meta/types/Database"; import * as Urls from "metabase/lib/urls"; import Mode from "metabase-lib/lib/Mode"; +import { + ALERT_TYPE_PROGRESS_BAR_GOAL, + ALERT_TYPE_ROWS, + ALERT_TYPE_TIMESERIES_GOAL, +} from "metabase-lib/lib/Alert"; /** * This is a wrapper around a question/card object, which may contain one or more Query objects */ export default class Question { - /** - * The Question wrapper requires a metadata object because the queries it contains (like {@link StructuredQuery)) - * need metadata for accessing databases, tables and metrics. - */ - _metadata: Metadata; - - /** - * The plain object presentation of this question, equal to the format that Metabase REST API understands. - * It is called `card` for both historical reasons and to make a clear distinction to this class. - */ - _card: CardObject; - - /** - * Parameter values mean either the current values of dashboard filters or SQL editor template parameters. - * They are in the grey area between UI state and question state, but having them in Question wrapper is convenient. - */ - _parameterValues: ParameterValues; - - /** - * Question constructor - */ - constructor( - metadata: Metadata, - card: CardObject, - parameterValues?: ParameterValues - ) { - this._metadata = metadata; - this._card = card; - this._parameterValues = parameterValues || {}; - } - - /** - * TODO Atte Keinänen 6/13/17: Discussed with Tom that we could use the default Question constructor instead, - * but it would require changing the constructor signature so that `card` is an optional parameter and has a default value - */ - static create( - { - databaseId, - tableId, - metadata, - parameterValues, - ...cardProps - }: { - databaseId?: DatabaseId, - tableId?: TableId, - metadata: Metadata, - parameterValues?: ParameterValues - } - ) { - // $FlowFixMe - const card: Card = { - name: cardProps.name || null, - display: cardProps.display || "table", - visualization_settings: cardProps.visualization_settings || {}, - dataset_query: STRUCTURED_QUERY_TEMPLATE // temporary placeholder - }; - - const initialQuestion = new Question(metadata, card, parameterValues); - const query = StructuredQuery.newStucturedQuery({ - question: initialQuestion, - databaseId, - tableId - }); - - return initialQuestion.setQuery(query); - } - - metadata(): Metadata { - return this._metadata; - } - - card() { - return this._card; - } - setCard(card: CardObject): Question { - return new Question(this._metadata, card, this._parameterValues); - } - - withoutNameAndId() { - return this.setCard( - chain(this.card()) - .dissoc("id") - .dissoc("name") - .dissoc("description") - .value() - ); - } - - /** - * A question contains either a: - * - StructuredQuery for queries written in MBQL - * - NativeQuery for queries written in data source's native query language - * - * This is just a wrapper object, the data is stored in `this._card.dataset_query` in a format specific to the query type. - */ - @memoize query(): Query { - const datasetQuery = this._card.dataset_query; - - for (const QueryClass of [StructuredQuery, NativeQuery]) { - if (QueryClass.isDatasetQueryType(datasetQuery)) { - return new QueryClass(this, datasetQuery); - } - } - - throw new Error("Unknown query type: " + datasetQuery.type); - } - - /** - * Returns a new Question object with an updated query. - * The query is saved to the `dataset_query` field of the Card object. - */ - setQuery(newQuery: Query): Question { - if (this._card.dataset_query !== newQuery.datasetQuery()) { - return this.setCard( - assoc(this.card(), "dataset_query", newQuery.datasetQuery()) - ); - } - return this; - } - - setDatasetQuery(newDatasetQuery: DatasetQuery): Question { - return this.setCard( - assoc(this.card(), "dataset_query", newDatasetQuery) - ); - } - - /** - * Returns a list of atomic queries (NativeQuery or StructuredQuery) contained in this question - */ - atomicQueries(): AtomicQuery[] { - const query = this.query(); - if (query instanceof AtomicQuery) return [query]; - return []; - } - - /** - * The visualization type of the question - */ - display(): string { - return this._card && this._card.display; - } - setDisplay(display) { - return this.setCard(assoc(this.card(), "display", display)); - } - - isEmpty(): boolean { - return this.query().isEmpty(); - } - /** - * Question is valid (as far as we know) and can be executed - */ - canRun(): boolean { - return this.query().canRun(); - } - - canWrite(): boolean { - return this._card && this._card.can_write; - } - - /** - * Visualization drill-through and action widget actions - * - * Although most of these are essentially a way to modify the current query, having them as a part - * of Question interface instead of Query interface makes it more convenient to also change the current visualization - */ - summarize(aggregation) { - const tableMetadata = this.tableMetadata(); - return this.setCard(summarize(this.card(), aggregation, tableMetadata)); - } - breakout(b) { - return this.setCard(breakout(this.card(), b)); - } - pivot(breakouts = [], dimensions = []) { - const tableMetadata = this.tableMetadata(); - return this.setCard( - // $FlowFixMe: tableMetadata could be null - pivot(this.card(), tableMetadata, breakouts, dimensions) - ); - } - filter(operator, column, value) { - return this.setCard(filter(this.card(), operator, column, value)); - } - drillUnderlyingRecords(dimensions) { - return this.setCard(drillUnderlyingRecords(this.card(), dimensions)); - } - toUnderlyingRecords(): ?Question { - const newCard = toUnderlyingRecords(this.card()); - if (newCard) { - return this.setCard(newCard); - } - } - toUnderlyingData(): Question { - return this.setDisplay("table"); - } - - composeThisQuery(): ?Question { - const SAVED_QUESTIONS_FAUX_DATABASE = -1337; - - if (this.id()) { - const card = { - display: "table", - dataset_query: { - type: "query", - database: SAVED_QUESTIONS_FAUX_DATABASE, - query: { - source_table: "card__" + this.id() - } - } - }; - return this.setCard(card); - } - } - - drillPK(field: Field, value: Value): ?Question { - const query = this.query(); - if (query instanceof StructuredQuery) { - return query - .reset() - .setTable(field.table) - .addFilter(["=", ["field-id", field.id], value]) - .question(); - } - } - - // deprecated - tableMetadata(): ?Table { - const query = this.query(); - if (query instanceof StructuredQuery) { - return query.table(); - } else { - return null; - } - } - - mode(): ?Mode { - return Mode.forQuestion(this); - } - - /** - * A user-defined name for the question - */ - displayName(): ?string { - return this._card && this._card.name; - } - - setDisplayName(name: String) { - return this.setCard(assoc(this.card(), "name", name)); - } - - id(): number { - return this._card && this._card.id; - } - - isSaved(): boolean { - return !!this.id(); - } - - publicUUID(): string { - return this._card && this._card.public_uuid; - } - - getUrl(originalQuestion?: Question): string { - const isDirty = !originalQuestion || - this.isDirtyComparedTo(originalQuestion); - - return isDirty - ? Urls.question(null, this._serializeForUrl()) - : Urls.question(this.id(), ""); - } - - /** - * Runs the query and returns an array containing results for each single query. - * - * If we have a saved and clean single-query question, we use `CardApi.query` instead of a ad-hoc dataset query. - * This way we benefit from caching and query optimizations done by Metabase backend. - */ - async getResults( - { cancelDeferred, isDirty = false, ignoreCache = false } = {} - ): Promise<[Dataset]> { - // TODO Atte Keinänen 7/5/17: Should we clean this query with Query.cleanQuery(query) before executing it? - - const canUseCardApiEndpoint = !isDirty && this.isSaved(); - - const parameters = this.parametersList() - // include only parameters that have a value applied - .filter(param => _.has(param, "value")) - // only the superset of parameters object that API expects - .map(param => _.pick(param, "type", "target", "value")); - - if (canUseCardApiEndpoint) { - const queryParams = { - cardId: this.id(), - ignore_cache: ignoreCache, - parameters - }; - - return [ - await CardApi.query(queryParams, { - cancelled: cancelDeferred.promise - }) - ]; - } else { - const getDatasetQueryResult = datasetQuery => { - const datasetQueryWithParameters = { - ...datasetQuery, - parameters - }; - - return MetabaseApi.dataset( - datasetQueryWithParameters, - cancelDeferred ? { cancelled: cancelDeferred.promise } : {} - ); - }; - - const datasetQueries = this.atomicQueries().map(query => - query.datasetQuery()); - return Promise.all(datasetQueries.map(getDatasetQueryResult)); - } - } - - // TODO: Fix incorrect Flow signature - parameters(): ParameterObject[] { - return getParametersWithExtras(this.card(), this._parameterValues); - } - - parametersList(): ParameterObject[] { - // $FlowFixMe - return (Object.values(this.parameters()): ParameterObject[]); - } - - // predicate function that dermines if the question is "dirty" compared to the given question - isDirtyComparedTo(originalQuestion: Question) { - // TODO Atte Keinänen 6/8/17: Reconsider these rules because they don't completely match - // the current implementation which uses original_card_id for indicating that question has a lineage - - // The rules: - // - if it's new, then it's dirty when - // 1) there is a database/table chosen or - // 2) when there is any content on the native query - // - if it's saved, then it's dirty when - // 1) the current card doesn't match the last saved version - - if (!this._card) { - return false; - } else if (!this._card.id) { - if ( - this._card.dataset_query.query && - this._card.dataset_query.query.source_table - ) { - return true; - } else if ( - this._card.dataset_query.type === "native" && - !_.isEmpty(this._card.dataset_query.native.query) - ) { - return true; - } else { - return false; - } + /** + * The Question wrapper requires a metadata object because the queries it contains (like {@link StructuredQuery)) + * need metadata for accessing databases, tables and metrics. + */ + _metadata: Metadata; + + /** + * The plain object presentation of this question, equal to the format that Metabase REST API understands. + * It is called `card` for both historical reasons and to make a clear distinction to this class. + */ + _card: CardObject; + + /** + * Parameter values mean either the current values of dashboard filters or SQL editor template parameters. + * They are in the grey area between UI state and question state, but having them in Question wrapper is convenient. + */ + _parameterValues: ParameterValues; + + /** + * Question constructor + */ + constructor( + metadata: Metadata, + card: CardObject, + parameterValues?: ParameterValues, + ) { + this._metadata = metadata; + this._card = card; + this._parameterValues = parameterValues || {}; + } + + /** + * TODO Atte Keinänen 6/13/17: Discussed with Tom that we could use the default Question constructor instead, + * but it would require changing the constructor signature so that `card` is an optional parameter and has a default value + */ + static create({ + databaseId, + tableId, + metadata, + parameterValues, + ...cardProps + }: { + databaseId?: DatabaseId, + tableId?: TableId, + metadata: Metadata, + parameterValues?: ParameterValues, + } = {}) { + // $FlowFixMe + const card: Card = { + name: cardProps.name || null, + display: cardProps.display || "table", + visualization_settings: cardProps.visualization_settings || {}, + dataset_query: STRUCTURED_QUERY_TEMPLATE, // temporary placeholder + }; + + const initialQuestion = new Question(metadata, card, parameterValues); + const query = StructuredQuery.newStucturedQuery({ + question: initialQuestion, + databaseId, + tableId, + }); + + return initialQuestion.setQuery(query); + } + + metadata(): Metadata { + return this._metadata; + } + + card() { + return this._card; + } + setCard(card: CardObject): Question { + return new Question(this._metadata, card, this._parameterValues); + } + + withoutNameAndId() { + return this.setCard( + chain(this.card()) + .dissoc("id") + .dissoc("name") + .dissoc("description") + .value(), + ); + } + + /** + * A question contains either a: + * - StructuredQuery for queries written in MBQL + * - NativeQuery for queries written in data source's native query language + * + * This is just a wrapper object, the data is stored in `this._card.dataset_query` in a format specific to the query type. + */ + @memoize + query(): Query { + const datasetQuery = this._card.dataset_query; + + for (const QueryClass of [StructuredQuery, NativeQuery]) { + if (QueryClass.isDatasetQueryType(datasetQuery)) { + return new QueryClass(this, datasetQuery); + } + } + + throw new Error("Unknown query type: " + datasetQuery.type); + } + + isNative(): boolean { + return this.query() instanceof NativeQuery; + } + + /** + * Returns a new Question object with an updated query. + * The query is saved to the `dataset_query` field of the Card object. + */ + setQuery(newQuery: Query): Question { + if (this._card.dataset_query !== newQuery.datasetQuery()) { + return this.setCard( + assoc(this.card(), "dataset_query", newQuery.datasetQuery()), + ); + } + return this; + } + + setDatasetQuery(newDatasetQuery: DatasetQuery): Question { + return this.setCard(assoc(this.card(), "dataset_query", newDatasetQuery)); + } + + /** + * Returns a list of atomic queries (NativeQuery or StructuredQuery) contained in this question + */ + atomicQueries(): AtomicQuery[] { + const query = this.query(); + if (query instanceof AtomicQuery) { + return [query]; + } + return []; + } + + /** + * The visualization type of the question + */ + display(): string { + return this._card && this._card.display; + } + setDisplay(display) { + return this.setCard(assoc(this.card(), "display", display)); + } + + visualizationSettings(): VisualizationSettings { + return this._card && this._card.visualization_settings; + } + setVisualizationSettings(settings: VisualizationSettings) { + return this.setCard(assoc(this.card(), "visualization_settings", settings)); + } + + isEmpty(): boolean { + return this.query().isEmpty(); + } + /** + * Question is valid (as far as we know) and can be executed + */ + canRun(): boolean { + return this.query().canRun(); + } + + canWrite(): boolean { + return this._card && this._card.can_write; + } + + /** + * Returns the type of alert that current question supports + * + * The `visualization_settings` in card object doesn't contain default settings, + * so you can provide the complete visualization settings object to `alertType` + * for taking those into account + */ + alertType(visualizationSettings) { + const display = this.display(); + + if (!this.canRun()) { + return null; + } + + const isLineAreaBar = + display === "line" || display === "area" || display === "bar"; + + if (display === "progress") { + return ALERT_TYPE_PROGRESS_BAR_GOAL; + } else if (isLineAreaBar) { + const vizSettings = visualizationSettings + ? visualizationSettings + : this.card().visualization_settings; + + const goalEnabled = vizSettings["graph.show_goal"]; + const hasSingleYAxisColumn = + vizSettings["graph.metrics"] && + vizSettings["graph.metrics"].length === 1; + + // We don't currently support goal alerts for multiseries question + if (goalEnabled && hasSingleYAxisColumn) { + return ALERT_TYPE_TIMESERIES_GOAL; + } else { + return ALERT_TYPE_ROWS; + } + } else { + return ALERT_TYPE_ROWS; + } + } + + /** + * Visualization drill-through and action widget actions + * + * Although most of these are essentially a way to modify the current query, having them as a part + * of Question interface instead of Query interface makes it more convenient to also change the current visualization + */ + summarize(aggregation) { + const tableMetadata = this.tableMetadata(); + return this.setCard(summarize(this.card(), aggregation, tableMetadata)); + } + breakout(b) { + return this.setCard(breakout(this.card(), b)); + } + pivot(breakouts = [], dimensions = []) { + const tableMetadata = this.tableMetadata(); + return this.setCard( + // $FlowFixMe: tableMetadata could be null + pivot(this.card(), tableMetadata, breakouts, dimensions), + ); + } + filter(operator, column, value) { + return this.setCard(filter(this.card(), operator, column, value)); + } + drillUnderlyingRecords(dimensions) { + return this.setCard(drillUnderlyingRecords(this.card(), dimensions)); + } + toUnderlyingRecords(): ?Question { + const newCard = toUnderlyingRecords(this.card()); + if (newCard) { + return this.setCard(newCard); + } + } + toUnderlyingData(): Question { + return this.setDisplay("table"); + } + distribution(column) { + return this.setCard(distribution(this.card(), column)); + } + + composeThisQuery(): ?Question { + const SAVED_QUESTIONS_FAUX_DATABASE = -1337; + + if (this.id()) { + const card = { + display: "table", + dataset_query: { + type: "query", + database: SAVED_QUESTIONS_FAUX_DATABASE, + query: { + "source-table": "card__" + this.id(), + }, + }, + }; + return this.setCard(card); + } + } + + drillPK(field: Field, value: Value): ?Question { + const query = this.query(); + if (query instanceof StructuredQuery) { + return query + .reset() + .setTable(field.table) + .addFilter(["=", ["field-id", field.id], value]) + .question(); + } + } + + // deprecated + tableMetadata(): ?Table { + const query = this.query(); + if (query instanceof StructuredQuery) { + return query.table(); + } else { + return null; + } + } + + mode(): ?Mode { + return Mode.forQuestion(this); + } + + /** + * A user-defined name for the question + */ + displayName(): ?string { + return this._card && this._card.name; + } + + setDisplayName(name: String) { + return this.setCard(assoc(this.card(), "name", name)); + } + + collectionId(): ?number { + return this._card && this._card.collection_id; + } + + setCollectionId(collectionId: number) { + return this.setCard(assoc(this.card(), "collection_id", collectionId)); + } + + id(): number { + return this._card && this._card.id; + } + + isSaved(): boolean { + return !!this.id(); + } + + publicUUID(): string { + return this._card && this._card.public_uuid; + } + + getUrl(originalQuestion?: Question): string { + const isDirty = + !originalQuestion || this.isDirtyComparedTo(originalQuestion); + + return isDirty + ? Urls.question(null, this._serializeForUrl()) + : Urls.question(this.id(), ""); + } + + getAutomaticDashboardUrl(filters /*?: Filter[] = []*/) { + let cellQuery = ""; + if (filters.length > 0) { + const mbqlFilter = filters.length > 1 ? ["and", ...filters] : filters[0]; + cellQuery = `/cell/${Card_DEPRECATED.utf8_to_b64url( + JSON.stringify(mbqlFilter), + )}`; + } + const questionId = this.id(); + if (questionId != null && !isTransientId(questionId)) { + return `/auto/dashboard/question/${questionId}${cellQuery}`; + } else { + const adHocQuery = Card_DEPRECATED.utf8_to_b64url( + JSON.stringify(this.card().dataset_query), + ); + return `/auto/dashboard/adhoc/${adHocQuery}${cellQuery}`; + } + } + + getComparisonDashboardUrl(filters /*?: Filter[] = []*/) { + let cellQuery = ""; + if (filters.length > 0) { + const mbqlFilter = filters.length > 1 ? ["and", ...filters] : filters[0]; + cellQuery = `/cell/${Card_DEPRECATED.utf8_to_b64url( + JSON.stringify(mbqlFilter), + )}`; + } + const questionId = this.id(); + const query = this.query(); + if (query instanceof StructuredQuery) { + const tableId = query.tableId(); + if (tableId) { + if (questionId != null && !isTransientId(questionId)) { + return `/auto/dashboard/question/${questionId}${cellQuery}/compare/table/${tableId}`; } else { - const origCardSerialized = originalQuestion._serializeForUrl({ - includeOriginalCardId: false - }); - const currentCardSerialized = this._serializeForUrl({ - includeOriginalCardId: false - }); - return currentCardSerialized !== origCardSerialized; + const adHocQuery = Card_DEPRECATED.utf8_to_b64url( + JSON.stringify(this.card().dataset_query), + ); + return `/auto/dashboard/adhoc/${adHocQuery}${cellQuery}/compare/table/${tableId}`; } - } - - // Internal methods - - _serializeForUrl({ includeOriginalCardId = true } = {}) { - // TODO Atte Keinänen 5/31/17: Remove code mutation and unnecessary copying - const dataset_query = Utils.copy(this._card.dataset_query); - if (dataset_query.query) { - dataset_query.query = Query_DEPRECATED.cleanQuery( - dataset_query.query - ); - } - - const cardCopy = { - name: this._card.name, - description: this._card.description, - dataset_query: dataset_query, - display: this._card.display, - parameters: this._card.parameters, - visualization_settings: this._card.visualization_settings, - ...(includeOriginalCardId - ? { original_card_id: this._card.original_card_id } - : {}) + } + } + } + + setResultsMetadata(resultsMetadata) { + let metadataColumns = resultsMetadata && resultsMetadata.columns; + let metadataChecksum = resultsMetadata && resultsMetadata.checksum; + + return this.setCard({ + ...this.card(), + result_metadata: metadataColumns, + metadata_checksum: metadataChecksum, + }); + } + + /** + * Runs the query and returns an array containing results for each single query. + * + * If we have a saved and clean single-query question, we use `CardApi.query` instead of a ad-hoc dataset query. + * This way we benefit from caching and query optimizations done by Metabase backend. + */ + async apiGetResults({ + cancelDeferred, + isDirty = false, + ignoreCache = false, + } = {}): Promise<[Dataset]> { + // TODO Atte Keinänen 7/5/17: Should we clean this query with Query.cleanQuery(query) before executing it? + + const canUseCardApiEndpoint = !isDirty && this.isSaved(); + + const parameters = this.parametersList() + // include only parameters that have a value applied + .filter(param => _.has(param, "value")) + // only the superset of parameters object that API expects + .map(param => _.pick(param, "type", "target", "value")); + + if (canUseCardApiEndpoint) { + const queryParams = { + cardId: this.id(), + ignore_cache: ignoreCache, + parameters, + }; + + return [ + await CardApi.query(queryParams, { + cancelled: cancelDeferred.promise, + }), + ]; + } else { + const getDatasetQueryResult = datasetQuery => { + const datasetQueryWithParameters = { + ...datasetQuery, + parameters, }; - return Card_DEPRECATED.utf8_to_b64url(JSON.stringify(cardCopy)); - } + return MetabaseApi.dataset( + datasetQueryWithParameters, + cancelDeferred ? { cancelled: cancelDeferred.promise } : {}, + ); + }; + + const datasetQueries = this.atomicQueries().map(query => + query.datasetQuery(), + ); + return Promise.all(datasetQueries.map(getDatasetQueryResult)); + } + } + + // NOTE: prefer `reduxCreate` so the store is automatically updated + async apiCreate() { + const createdCard = await Questions.api.create(this.card()); + return this.setCard(createdCard); + } + + // NOTE: prefer `reduxUpdate` so the store is automatically updated + async apiUpdate() { + const updatedCard = await Questions.api.update(this.card()); + return this.setCard(updatedCard); + } + + async reduxCreate(dispatch) { + const action = await dispatch(Questions.actions.create(this.card())); + return this.setCard(Questions.HACK_getObjectFromAction(action)); + } + + async reduxUpdate(dispatch) { + const action = await dispatch( + Questions.actions.update({ id: this.id() }, this.card()), + ); + return this.setCard(Questions.HACK_getObjectFromAction(action)); + } + + // TODO: Fix incorrect Flow signature + parameters(): ParameterObject[] { + return getParametersWithExtras(this.card(), this._parameterValues); + } + + parametersList(): ParameterObject[] { + // $FlowFixMe + return (Object.values(this.parameters()): ParameterObject[]); + } + + // predicate function that dermines if the question is "dirty" compared to the given question + isDirtyComparedTo(originalQuestion: Question) { + // TODO Atte Keinänen 6/8/17: Reconsider these rules because they don't completely match + // the current implementation which uses original_card_id for indicating that question has a lineage + + // The rules: + // - if it's new, then it's dirty when + // 1) there is a database/table chosen or + // 2) when there is any content on the native query + // - if it's saved, then it's dirty when + // 1) the current card doesn't match the last saved version + + if (!this._card) { + return false; + } else if (!this._card.id) { + if ( + this._card.dataset_query.query && + this._card.dataset_query.query["source-table"] + ) { + return true; + } else if ( + this._card.dataset_query.type === "native" && + !_.isEmpty(this._card.dataset_query.native.query) + ) { + return true; + } else { + return false; + } + } else { + const origCardSerialized = originalQuestion._serializeForUrl({ + includeOriginalCardId: false, + }); + const currentCardSerialized = this._serializeForUrl({ + includeOriginalCardId: false, + }); + return currentCardSerialized !== origCardSerialized; + } + } + + // Internal methods + _serializeForUrl({ includeOriginalCardId = true } = {}) { + const cleanedQuery = this.query().clean(); + + const cardCopy = { + name: this._card.name, + description: this._card.description, + dataset_query: cleanedQuery.datasetQuery(), + display: this._card.display, + parameters: this._card.parameters, + visualization_settings: this._card.visualization_settings, + ...(includeOriginalCardId + ? { original_card_id: this._card.original_card_id } + : {}), + }; + + return Card_DEPRECATED.utf8_to_b64url(JSON.stringify(cardCopy)); + } } diff --git a/frontend/src/metabase-lib/lib/metadata/AggregationOption.js b/frontend/src/metabase-lib/lib/metadata/AggregationOption.js index 81c32bf99635..e775b3b5aaf8 100644 --- a/frontend/src/metabase-lib/lib/metadata/AggregationOption.js +++ b/frontend/src/metabase-lib/lib/metadata/AggregationOption.js @@ -6,23 +6,23 @@ import type { Field } from "metabase/meta/types/Field"; * Wrapper class for an aggregation object */ export default class AggregationOption extends Base { - name: string; - short: string; - // TODO: Now just a plain object; wrap to a Field wrapper class - fields: Field[]; - validFieldsFilters: [(fields: Field[]) => Field[]]; + name: string; + short: string; + // TODO: Now just a plain object; wrap to a Field wrapper class + fields: Field[]; + validFieldsFilters: [(fields: Field[]) => Field[]]; - /** - * Aggregation has one or more required fields - */ - hasFields(): boolean { - return this.validFieldsFilters.length > 0; - } + /** + * Aggregation has one or more required fields + */ + hasFields(): boolean { + return this.validFieldsFilters.length > 0; + } - toAggregation(): AggregationWrapper { - return new AggregationWrapper( - null, - [this.short].concat(this.fields.map(field => null)) - ); - } + toAggregation(): AggregationWrapper { + return new AggregationWrapper( + null, + [this.short].concat(this.fields.map(field => null)), + ); + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Base.js b/frontend/src/metabase-lib/lib/metadata/Base.js index a4d7e9fda814..5222560f1f63 100644 --- a/frontend/src/metabase-lib/lib/metadata/Base.js +++ b/frontend/src/metabase-lib/lib/metadata/Base.js @@ -1,17 +1,17 @@ export default class Base { - _plainObject = null; - constructor(object = {}) { - this._plainObject = object; - for (const property in object) { - this[property] = object[property]; - } + _plainObject = null; + constructor(object = {}) { + this._plainObject = object; + for (const property in object) { + this[property] = object[property]; } + } - /** - * Get the plain metadata object without hydrated fields. - * Useful for situations where you want serialize the metadata object. - */ - getPlainObject() { - return this._plainObject; - } + /** + * Get the plain metadata object without hydrated fields. + * Useful for situations where you want serialize the metadata object. + */ + getPlainObject() { + return this._plainObject; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Database.js b/frontend/src/metabase-lib/lib/metadata/Database.js index 874751022b05..ed54b686d701 100644 --- a/frontend/src/metabase-lib/lib/metadata/Database.js +++ b/frontend/src/metabase-lib/lib/metadata/Database.js @@ -16,28 +16,28 @@ import type { SchemaName } from "metabase/meta/types/Table"; * Backed by types/Database data structure which matches the backend API contract */ export default class Database extends Base { - // TODO Atte Keinänen 6/11/17: List all fields here (currently only in types/Database) - - displayName: string; - description: ?string; - - tables: Table[]; - schemas: Schema[]; - - tablesInSchema(schemaName: ?SchemaName) { - return this.tables.filter(table => table.schema === schemaName); - } - - schemaNames(): Array { - return _.uniq( - this.tables - .map(table => table.schema) - .filter(schemaName => schemaName != null) - ); - } - - newQuestion(): Question { - // $FlowFixMe - return new Question(); - } + // TODO Atte Keinänen 6/11/17: List all fields here (currently only in types/Database) + + displayName: string; + description: ?string; + + tables: Table[]; + schemas: Schema[]; + + tablesInSchema(schemaName: ?SchemaName) { + return this.tables.filter(table => table.schema === schemaName); + } + + schemaNames(): Array { + return _.uniq( + this.tables + .map(table => table.schema) + .filter(schemaName => schemaName != null), + ); + } + + newQuestion(): Question { + // $FlowFixMe + return new Question(); + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Field.js b/frontend/src/metabase-lib/lib/metadata/Field.js index 26e81cea30db..4e9e2960ae1d 100644 --- a/frontend/src/metabase-lib/lib/metadata/Field.js +++ b/frontend/src/metabase-lib/lib/metadata/Field.js @@ -7,20 +7,23 @@ import { FieldIDDimension } from "../Dimension"; import { getFieldValues } from "metabase/lib/query/field"; import { - isDate, - isNumber, - isNumeric, - isBoolean, - isString, - isSummable, - isCategory, - isDimension, - isMetric, - isPK, - isFK, - isCoordinate, - getIconForField, - getFieldType + isDate, + isTime, + isNumber, + isNumeric, + isBoolean, + isString, + isSummable, + isCategory, + isLocation, + isDimension, + isMetric, + isPK, + isFK, + isEntityName, + isCoordinate, + getIconForField, + getFieldType, } from "metabase/lib/schema_metadata"; import type { FieldValues } from "metabase/meta/types/Field"; @@ -29,98 +32,181 @@ import type { FieldValues } from "metabase/meta/types/Field"; * Wrapper class for field metadata objects. Belongs to a Table. */ export default class Field extends Base { - displayName: string; - description: string; - - table: Table; - - fieldType() { - return getFieldType(this); - } - - isDate() { - return isDate(this); - } - isNumber() { - return isNumber(this); - } - isNumeric() { - return isNumeric(this); - } - isBoolean() { - return isBoolean(this); - } - isString() { - return isString(this); - } - isSummable() { - return isSummable(this); - } - isCategory() { - return isCategory(this); - } - isMetric() { - return isMetric(this); - } - - isCompatibleWith(field: Field) { - return this.isDate() === field.isDate() || - this.isNumeric() === field.isNumeric() || - this.id === field.id; - } - - /** - * Tells if this column can be used in a breakout - * Currently returns `true` for everything expect for aggregation columns - */ - isDimension() { - return isDimension(this); - } - isID() { - return isPK(this) || isFK(this); - } - isPK() { - return isPK(this); - } - isFK() { - return isFK(this); - } - - isCoordinate() { - return isCoordinate(this); - } - - fieldValues(): FieldValues { - return getFieldValues(this._object); - } - - icon() { - return getIconForField(this); - } - - dimension() { - return new FieldIDDimension(null, [this.id], this.metadata); - } - - operator(op) { - if (this.operators_lookup) { - return this.operators_lookup[op]; - } - } - - /** - * Returns a default breakout MBQL clause for this field - * - * Tries to look up a default subdimension (like "Created At: Day" for "Created At" field) - * and if it isn't found, uses the plain field id dimension (like "Product ID") as a fallback. - */ - getDefaultBreakout = () => { - const fieldIdDimension = this.dimension(); - const defaultSubDimension = fieldIdDimension.defaultDimension(); - if (defaultSubDimension) { - return defaultSubDimension.mbql(); - } else { - return fieldIdDimension.mbql(); - } - }; + displayName: string; + description: string; + + table: Table; + name_field: ?Field; + + fieldType() { + return getFieldType(this); + } + + isDate() { + return isDate(this); + } + isTime() { + return isTime(this); + } + isNumber() { + return isNumber(this); + } + isNumeric() { + return isNumeric(this); + } + isBoolean() { + return isBoolean(this); + } + isString() { + return isString(this); + } + isLocation() { + return isLocation(this); + } + isSummable() { + return isSummable(this); + } + isCategory() { + return isCategory(this); + } + isMetric() { + return isMetric(this); + } + + isCompatibleWith(field: Field) { + return ( + this.isDate() === field.isDate() || + this.isNumeric() === field.isNumeric() || + this.id === field.id + ); + } + + /** + * Tells if this column can be used in a breakout + * Currently returns `true` for everything expect for aggregation columns + */ + isDimension() { + return isDimension(this); + } + isID() { + return isPK(this) || isFK(this); + } + isPK() { + return isPK(this); + } + isFK() { + return isFK(this); + } + isEntityName() { + return isEntityName(this); + } + + isCoordinate() { + return isCoordinate(this); + } + + fieldValues(): FieldValues { + return getFieldValues(this._object); + } + + icon() { + return getIconForField(this); + } + + dimension() { + return new FieldIDDimension(null, [this.id], this.metadata); + } + + operator(op) { + if (this.operators_lookup) { + return this.operators_lookup[op]; + } + } + + /** + * Returns a default breakout MBQL clause for this field + * + * Tries to look up a default subdimension (like "Created At: Day" for "Created At" field) + * and if it isn't found, uses the plain field id dimension (like "Product ID") as a fallback. + */ + getDefaultBreakout = () => { + const fieldIdDimension = this.dimension(); + const defaultSubDimension = fieldIdDimension.defaultDimension(); + if (defaultSubDimension) { + return defaultSubDimension.mbql(); + } else { + return fieldIdDimension.mbql(); + } + }; + + /** + * Returns the remapped field, if any + */ + remappedField(): ?Field { + const displayFieldId = + this.dimensions && this.dimensions.human_readable_field_id; + if (displayFieldId != null) { + return this.metadata.fields[displayFieldId]; + } + // this enables "implicit" remappings from type/PK to type/Name on the same table, + // used in FieldValuesWidget, but not table/object detail listings + if (this.name_field) { + return this.name_field; + } + return null; + } + + /** + * Returns the human readable remapped value, if any + */ + remappedValue(value): ?string { + // TODO: Ugh. Should this be handled further up by the parameter widget? + if (this.isNumeric() && typeof value !== "number") { + value = parseFloat(value); + } + return this.remapping && this.remapping.get(value); + } + + /** + * Returns whether the field has a human readable remapped value for this value + */ + hasRemappedValue(value): ?string { + // TODO: Ugh. Should this be handled further up by the parameter widget? + if (this.isNumeric() && typeof value !== "number") { + value = parseFloat(value); + } + return this.remapping && this.remapping.has(value); + } + + /** + * Returns true if this field can be searched, e.x. in filter or parameter widgets + */ + isSearchable(): boolean { + // TODO: ...? + return this.isString(); + } + + /** + * Returns the field to be searched for this field, either the remapped field or itself + */ + parameterSearchField(): ?Field { + let remappedField = this.remappedField(); + if (remappedField && remappedField.isSearchable()) { + return remappedField; + } + if (this.isSearchable()) { + return this; + } + return null; + } + + filterSearchField(): ?Field { + if (this.isPK()) { + if (this.isSearchable()) { + return this; + } + } else { + return this.parameterSearchField(); + } + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Metadata.js b/frontend/src/metabase-lib/lib/metadata/Metadata.js index c4ccb3541063..f268e06cad2c 100644 --- a/frontend/src/metabase-lib/lib/metadata/Metadata.js +++ b/frontend/src/metabase-lib/lib/metadata/Metadata.js @@ -18,29 +18,49 @@ import type { SegmentId } from "metabase/meta/types/Segment"; * Wrapper class for the entire metadata store */ export default class Metadata extends Base { - databases: { [id: DatabaseId]: Database }; - tables: { [id: TableId]: Table }; - fields: { [id: FieldId]: Field }; - metrics: { [id: MetricId]: Metric }; - segments: { [id: SegmentId]: Segment }; - - databasesList(): Database[] { - // $FlowFixMe - return (Object.values(this.databases): Database[]); - } - - tablesList(): Database[] { - // $FlowFixMe - return (Object.values(this.tables): Database[]); - } - - metricsList(): Metric[] { - // $FlowFixMe - return (Object.values(this.metrics): Metric[]); - } - - segmentsList(): Metric[] { - // $FlowFixMe - return (Object.values(this.segments): Segment[]); - } + databases: { [id: DatabaseId]: Database }; + tables: { [id: TableId]: Table }; + fields: { [id: FieldId]: Field }; + metrics: { [id: MetricId]: Metric }; + segments: { [id: SegmentId]: Segment }; + + databasesList(): Database[] { + // $FlowFixMe + return (Object.values(this.databases): Database[]); + } + + tablesList(): Database[] { + // $FlowFixMe + return (Object.values(this.tables): Database[]); + } + + metricsList(): Metric[] { + // $FlowFixMe + return (Object.values(this.metrics): Metric[]); + } + + segmentsList(): Metric[] { + // $FlowFixMe + return (Object.values(this.segments): Segment[]); + } + + segment(segmentId): ?Segment { + return (segmentId != null && this.segments[segmentId]) || null; + } + + metric(metricId): ?Metric { + return (metricId != null && this.metrics[metricId]) || null; + } + + database(databaseId): ?Database { + return (databaseId != null && this.databases[databaseId]) || null; + } + + table(tableId): ?Table { + return (tableId != null && this.tables[tableId]) || null; + } + + field(fieldId): ?Field { + return (fieldId != null && this.fields[fieldId]) || null; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Metric.js b/frontend/src/metabase-lib/lib/metadata/Metric.js index 915b6c27dc65..33aa7f708ffb 100644 --- a/frontend/src/metabase-lib/lib/metadata/Metric.js +++ b/frontend/src/metabase-lib/lib/metadata/Metric.js @@ -9,17 +9,17 @@ import type { Aggregation } from "metabase/meta/types/Query"; * Wrapper class for a metric. Belongs to a {@link Database} and possibly a {@link Table} */ export default class Metric extends Base { - displayName: string; - description: string; + displayName: string; + description: string; - database: Database; - table: Table; + database: Database; + table: Table; - aggregationClause(): Aggregation { - return ["METRIC", this.id]; - } + aggregationClause(): Aggregation { + return ["metric", this.id]; + } - isActive(): boolean { - return !!this.is_active; - } + isActive(): boolean { + return !this.archived; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Schema.js b/frontend/src/metabase-lib/lib/metadata/Schema.js index 02f33ed01070..1b0af9a76d56 100644 --- a/frontend/src/metabase-lib/lib/metadata/Schema.js +++ b/frontend/src/metabase-lib/lib/metadata/Schema.js @@ -8,8 +8,8 @@ import Table from "./Table"; * Wrapper class for a {@link Database} schema. Contains {@link Table}s. */ export default class Schema extends Base { - displayName: string; + displayName: string; - database: Database; - tables: Table[]; + database: Database; + tables: Table[]; } diff --git a/frontend/src/metabase-lib/lib/metadata/Segment.js b/frontend/src/metabase-lib/lib/metadata/Segment.js index 9c1bfad6c6b2..431ba1b0ea4c 100644 --- a/frontend/src/metabase-lib/lib/metadata/Segment.js +++ b/frontend/src/metabase-lib/lib/metadata/Segment.js @@ -9,17 +9,17 @@ import type { FilterClause } from "metabase/meta/types/Query"; * Wrapper class for a segment. Belongs to a {@link Database} and possibly a {@link Table} */ export default class Segment extends Base { - displayName: string; - description: string; + displayName: string; + description: string; - database: Database; - table: Table; + database: Database; + table: Table; - filterClause(): FilterClause { - return ["SEGMENT", this.id]; - } + filterClause(): FilterClause { + return ["segment", this.id]; + } - isActive(): boolean { - return !!this.is_active; - } + isActive(): boolean { + return !this.archived; + } } diff --git a/frontend/src/metabase-lib/lib/metadata/Table.js b/frontend/src/metabase-lib/lib/metadata/Table.js index e1421774b660..5d626e1cc3fd 100644 --- a/frontend/src/metabase-lib/lib/metadata/Table.js +++ b/frontend/src/metabase-lib/lib/metadata/Table.js @@ -1,5 +1,8 @@ /* @flow weak */ +// NOTE: this needs to be imported first due to some cyclical dependency nonsense +import Q_DEPRECATED from "metabase/lib/query"; + import Question from "../Question"; import Base from "./Base"; @@ -7,45 +10,52 @@ import Database from "./Database"; import Field from "./Field"; import type { SchemaName } from "metabase/meta/types/Table"; +import type { FieldMetadata } from "metabase/meta/types/Metadata"; +import type { ConcreteField, DatetimeUnit } from "metabase/meta/types/Query"; import Dimension from "../Dimension"; import _ from "underscore"; -import type { FieldMetadata } from "metabase/meta/types/Metadata"; /** This is the primary way people interact with tables */ export default class Table extends Base { - displayName: string; - description: string; + displayName: string; + description: string; + + schema: ?SchemaName; + db: Database; - schema: ?SchemaName; - db: Database; + fields: FieldMetadata[]; - fields: FieldMetadata[]; + // $FlowFixMe Could be replaced with hydrated database property in selectors/metadata.js (instead / in addition to `table.db`) + get database() { + return this.db; + } - // $FlowFixMe Could be replaced with hydrated database property in selectors/metadata.js (instead / in addition to `table.db`) - get database() { - return this.db; - } + newQuestion(): Question { + // $FlowFixMe + return new Question(); + } - newQuestion(): Question { - // $FlowFixMe - return new Question(); - } + dimensions(): Dimension[] { + return this.fields.map(field => field.dimension()); + } - dimensions(): Dimension[] { - return this.fields.map(field => field.dimension()); - } + dateFields(): Field[] { + return this.fields.filter(field => field.isDate()); + } - dateFields(): Field[] { - return this.fields.filter(field => field.isDate()); - } + aggregations() { + return this.aggregation_options || []; + } - aggregations() { - return this.aggregation_options || []; - } + aggregation(agg) { + return _.findWhere(this.aggregations(), { short: agg }); + } - aggregation(agg) { - return _.findWhere(this.aggregations(), { short: agg }); - } + fieldTarget( + fieldRef: ConcreteField, + ): { field: Field, table: Table, unit?: DatetimeUnit, path: Field[] } { + return Q_DEPRECATED.getFieldTarget(fieldRef, this); + } } diff --git a/frontend/src/metabase-lib/lib/queries/Aggregation.js b/frontend/src/metabase-lib/lib/queries/Aggregation.js index 8e07b96159f8..f32f90021e46 100644 --- a/frontend/src/metabase-lib/lib/queries/Aggregation.js +++ b/frontend/src/metabase-lib/lib/queries/Aggregation.js @@ -1,9 +1,5 @@ -import type { - Aggregation as AggregationObject -} from "metabase/meta/types/Query"; -import { - AggregationClause as AggregationClause_DEPRECATED -} from "metabase/lib/query"; +import type { Aggregation as AggregationObject } from "metabase/meta/types/Query"; +import { AggregationClause as AggregationClause_DEPRECATED } from "metabase/lib/query"; import { MetricId } from "metabase/meta/types/Metric"; import { AggregationOption, Operator } from "metabase/meta/types/Metadata"; import { FieldId } from "metabase/meta/types/Field"; @@ -13,104 +9,103 @@ import StructuredQuery from "metabase-lib/lib/queries/StructuredQuery"; * Wrapper for an aggregation contained by a {@link StructuredQuery} */ export default class Aggregation { - _query: ?StructuredQuery; + _query: ?StructuredQuery; - clause: AggregationObject; + clause: AggregationObject; - constructor( - query?: StructuredQuery, - clause: AggregationObject - ): Aggregation { - this._query = query; - this.clause = clause; - } - - /** - * Gets the aggregation option matching this aggregation - * Returns `null` if the clause isn't in a standard format - */ - getOption(): ?AggregationOption { - if (this._query == null) return null; + constructor(query?: StructuredQuery, clause: AggregationObject): Aggregation { + this._query = query; + this.clause = clause; + } - const operator = this.getOperator(); - return operator - ? this._query - .aggregationOptions() - .find(option => option.short === operator) - : null; + /** + * Gets the aggregation option matching this aggregation + * Returns `null` if the clause isn't in a standard format + */ + getOption(): ?AggregationOption { + if (this._query == null) { + return null; } - /** - * Predicate function to test if a given aggregation clause is fully formed - */ - isValid(): boolean { - return AggregationClause_DEPRECATED.isValid(this.clause); - } + const operator = this.getOperator(); + return operator + ? this._query + .aggregationOptions() + .find(option => option.short === operator) + : null; + } - /** - * Predicate function to test if the given aggregation clause represents a Bare Rows aggregation - */ - isBareRows(): boolean { - return AggregationClause_DEPRECATED.isBareRows(this.clause); - } + /** + * Predicate function to test if a given aggregation clause is fully formed + */ + isValid(): boolean { + return AggregationClause_DEPRECATED.isValid(this.clause); + } - /** - * Predicate function to test if a given aggregation clause represents a standard aggregation - */ - isStandard(): boolean { - return AggregationClause_DEPRECATED.isStandard(this.clause); - } + /** + * Predicate function to test if the given aggregation clause represents a Bare Rows aggregation + */ + isBareRows(): boolean { + return AggregationClause_DEPRECATED.isBareRows(this.clause); + } - getAggregation() { - return AggregationClause_DEPRECATED.getAggregation(this.clause); - } + /** + * Predicate function to test if a given aggregation clause represents a standard aggregation + */ + isStandard(): boolean { + return AggregationClause_DEPRECATED.isStandard(this.clause); + } - /** - * Predicate function to test if a given aggregation clause represents a metric - */ - isMetric(): boolean { - return AggregationClause_DEPRECATED.isMetric(this.clause); - } + getAggregation() { + return AggregationClause_DEPRECATED.getAggregation(this.clause); + } - /** - * Get metricId from a metric aggregation clause - * Returns `null` if the clause doesn't represent a metric - */ - getMetric(): ?MetricId { - return AggregationClause_DEPRECATED.getMetric(this.clause); - } + /** + * Predicate function to test if a given aggregation clause represents a metric + */ + isMetric(): boolean { + return AggregationClause_DEPRECATED.isMetric(this.clause); + } - /** - * Is a custom expression created with the expression editor - */ - isCustom(): boolean { - return AggregationClause_DEPRECATED.isCustom(this.clause); - } + /** + * Get metricId from a metric aggregation clause + * Returns `null` if the clause doesn't represent a metric + */ + getMetric(): ?MetricId { + return AggregationClause_DEPRECATED.getMetric(this.clause); + } - /** - * Get the operator from a standard aggregation clause - * Returns `null` if the clause isn't in a standard format - */ - getOperator(): ?Operator { - return AggregationClause_DEPRECATED.getOperator(this.clause); - } + /** + * Is a custom expression created with the expression editor + */ + isCustom(): boolean { + return AggregationClause_DEPRECATED.isCustom(this.clause); + } - /** - * Get the fieldId from a standard aggregation clause - * Returns `null` if the clause isn't in a standard format - */ - getField(): ?FieldId { - return AggregationClause_DEPRECATED.getField(this.clause); - } + /** + * Get the operator from a standard aggregation clause + * Returns `null` if the clause isn't in a standard format + */ + getOperator(): ?Operator { + return AggregationClause_DEPRECATED.getOperator(this.clause); + } - /** - * Set the fieldId on a standard aggregation clause. - * If the clause isn't in a standard format, no modifications are done. - */ - setField(fieldId: FieldId): Aggregation { - return new Aggregation( - this._query, - AggregationClause_DEPRECATED.setField(this.clause, fieldId) - ); - } + /** + * Get the fieldId from a standard aggregation clause + * Returns `null` if the clause isn't in a standard format + */ + getField(): ?FieldId { + return AggregationClause_DEPRECATED.getField(this.clause); + } + + /** + * Set the fieldId on a standard aggregation clause. + * If the clause isn't in a standard format, no modifications are done. + */ + setField(fieldId: FieldId): Aggregation { + return new Aggregation( + this._query, + AggregationClause_DEPRECATED.setField(this.clause, fieldId), + ); + } } diff --git a/frontend/src/metabase-lib/lib/queries/AtomicQuery.js b/frontend/src/metabase-lib/lib/queries/AtomicQuery.js index ad923902825d..d40ecb9e5b17 100644 --- a/frontend/src/metabase-lib/lib/queries/AtomicQuery.js +++ b/frontend/src/metabase-lib/lib/queries/AtomicQuery.js @@ -8,22 +8,22 @@ import type Database from "metabase-lib/lib/metadata/Database"; * and form a single MBQL / native query clause */ export default class AtomicQuery extends Query { - /** - * Tables this query could use, if the database is set - */ - tables(): ?(Table[]) { - return null; - } + /** + * Tables this query could use, if the database is set + */ + tables(): ?(Table[]) { + return null; + } - databaseId(): ?DatabaseId { - return null; - } + databaseId(): ?DatabaseId { + return null; + } - database(): ?Database { - return null; - } + database(): ?Database { + return null; + } - engine(): ?DatabaseEngine { - return null; - } + engine(): ?DatabaseEngine { + return null; + } } diff --git a/frontend/src/metabase-lib/lib/queries/NativeQuery.js b/frontend/src/metabase-lib/lib/queries/NativeQuery.js index 19690b60e46e..6cef33dd1585 100644 --- a/frontend/src/metabase-lib/lib/queries/NativeQuery.js +++ b/frontend/src/metabase-lib/lib/queries/NativeQuery.js @@ -11,256 +11,277 @@ import { humanize } from "metabase/lib/formatting"; import Utils from "metabase/lib/utils"; import { - getEngineNativeAceMode, - getEngineNativeType, - getEngineNativeRequiresTable + getEngineNativeAceMode, + getEngineNativeType, + getEngineNativeRequiresTable, } from "metabase/lib/engine"; -import { chain, assoc, getIn, assocIn } from "icepick"; +import { chain, assoc, getIn, assocIn, updateIn } from "icepick"; import _ from "underscore"; import type { - DatasetQuery, - NativeDatasetQuery + DatasetQuery, + NativeDatasetQuery, } from "metabase/meta/types/Card"; import type { TemplateTags, TemplateTag } from "metabase/meta/types/Query"; import type { DatabaseEngine, DatabaseId } from "metabase/meta/types/Database"; import AtomicQuery from "metabase-lib/lib/queries/AtomicQuery"; export const NATIVE_QUERY_TEMPLATE: NativeDatasetQuery = { - database: null, - type: "native", - native: { - query: "", - template_tags: {} - } + database: null, + type: "native", + native: { + query: "", + "template-tags": {}, + }, }; export default class NativeQuery extends AtomicQuery { - // For Flow type completion - _nativeDatasetQuery: NativeDatasetQuery; - - constructor( - question: Question, - datasetQuery: DatasetQuery = NATIVE_QUERY_TEMPLATE - ) { - super(question, datasetQuery); - - this._nativeDatasetQuery = (datasetQuery: NativeDatasetQuery); - } - - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { - return datasetQuery.type === NATIVE_QUERY_TEMPLATE.type; - } - - /* Query superclass methods */ - - canRun() { - return this.databaseId() != null && - this.queryText().length > 0 && - (!this.requiresTable() || this.collection()); - } - - isEmpty() { - return this.databaseId() == null || this.queryText().length == 0; + // For Flow type completion + _nativeDatasetQuery: NativeDatasetQuery; + + constructor( + question: Question, + datasetQuery: DatasetQuery = NATIVE_QUERY_TEMPLATE, + ) { + super(question, datasetQuery); + + this._nativeDatasetQuery = (datasetQuery: NativeDatasetQuery); + } + + static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + return datasetQuery.type === NATIVE_QUERY_TEMPLATE.type; + } + + /* Query superclass methods */ + + canRun() { + return ( + this.databaseId() != null && + this.queryText().length > 0 && + (!this.requiresTable() || this.collection()) + ); + } + + isEmpty() { + return this.databaseId() == null || this.queryText().length == 0; + } + + databases(): Database[] { + return super + .databases() + .filter(database => database.native_permissions === "write"); + } + + /* AtomicQuery superclass methods */ + + tables(): ?(Table[]) { + const database = this.database(); + return (database && database.tables) || null; + } + + databaseId(): ?DatabaseId { + // same for both structured and native + return this._nativeDatasetQuery.database; + } + database(): ?Database { + const databaseId = this.databaseId(); + return databaseId != null ? this._metadata.databases[databaseId] : null; + } + engine(): ?DatabaseEngine { + const database = this.database(); + return database && database.engine; + } + + /* Methods unique to this query type */ + + /** + * @returns a new query with the provided Database set. + */ + setDatabase(database: Database): NativeQuery { + if (database.id !== this.databaseId()) { + // TODO: this should reset the rest of the query? + return new NativeQuery( + this._originalQuestion, + assoc(this.datasetQuery(), "database", database.id), + ); + } else { + return this; } - - databases(): Database[] { - return super - .databases() - .filter(database => database.native_permissions === "write"); + } + + hasWritePermission(): boolean { + const database = this.database(); + return database != null && database.native_permissions === "write"; + } + + supportsNativeParameters(): boolean { + const database = this.database(); + return ( + database != null && _.contains(database.features, "native-parameters") + ); + } + + table(): ?Table { + const database = this.database(); + const collection = this.collection(); + if (!database || !collection) { + return null; } - - /* AtomicQuery superclass methods */ - - tables(): ?(Table[]) { - const database = this.database(); - return (database && database.tables) || null; - } - - databaseId(): ?DatabaseId { - // same for both structured and native - return this._nativeDatasetQuery.database; - } - database(): ?Database { - const databaseId = this.databaseId(); - return databaseId != null ? this._metadata.databases[databaseId] : null; - } - engine(): ?DatabaseEngine { - const database = this.database(); - return database && database.engine; - } - - /* Methods unique to this query type */ - - /** - * @returns a new query with the provided Database set. - */ - setDatabase(database: Database): NativeQuery { - if (database.id !== this.databaseId()) { - // TODO: this should reset the rest of the query? - return new NativeQuery( - this._originalQuestion, - assoc(this.datasetQuery(), "database", database.id) - ); + return _.findWhere(database.tables, { name: collection }) || null; + } + + queryText(): string { + return getIn(this.datasetQuery(), ["native", "query"]) || ""; + } + + updateQueryText(newQueryText: string): Query { + return new NativeQuery( + this._originalQuestion, + chain(this._datasetQuery) + .assocIn(["native", "query"], newQueryText) + .assocIn( + ["native", "template-tags"], + this._getUpdatedTemplateTags(newQueryText), + ) + .value(), + ); + } + + collection(): ?string { + return getIn(this.datasetQuery(), ["native", "collection"]); + } + + updateCollection(newCollection: string) { + return new NativeQuery( + this._originalQuestion, + assocIn(this._datasetQuery, ["native", "collection"], newCollection), + ); + } + + setParameterIndex(id: string, newIndex: number) { + // NOTE: currently all NativeQuery parameters are implicitly generated from + // template tags, and the order is determined by the key order + return new NativeQuery( + this._originalQuestion, + updateIn( + this._datasetQuery, + ["native", "template_tags"], + templateTags => { + const entries = Array.from(Object.entries(templateTags)); + const oldIndex = _.findIndex(entries, entry => entry[1].id === id); + entries.splice(newIndex, 0, entries.splice(oldIndex, 1)[0]); + return _.object(entries); + }, + ), + ); + } + + lineCount(): number { + const queryText = this.queryText(); + return queryText ? countLines(queryText) : 0; + } + + /** + * The ACE Editor mode name, e.g. 'ace/mode/json' + */ + aceMode(): string { + return getEngineNativeAceMode(this.engine()); + } + + /** + * Name used to describe the text written in that mode, e.g. 'JSON'. Used to fill in the blank in 'This question is written in _______'. + */ + nativeQueryLanguage() { + return getEngineNativeType(this.engine()).toUpperCase(); + } + + /** + * Whether the DB selector should be a DB + Table selector. Mongo needs both DB + Table. + */ + requiresTable() { + return getEngineNativeRequiresTable(this.engine()); + } + + // $FlowFixMe + templateTags(): TemplateTag[] { + return Object.values(this.templateTagsMap()); + } + templateTagsMap(): TemplateTags { + return getIn(this.datasetQuery(), ["native", "template-tags"]) || {}; + } + + setDatasetQuery(datasetQuery: DatasetQuery): NativeQuery { + return new NativeQuery(this._originalQuestion, datasetQuery); + } + + /** + * special handling for NATIVE cards to automatically detect parameters ... {{varname}} + */ + _getUpdatedTemplateTags(queryText: string): TemplateTags { + if (queryText && this.supportsNativeParameters()) { + let tags = []; + + // look for variable usage in the query (like '{{varname}}'). we only allow alphanumeric characters for the variable name + // a variable name can optionally end with :start or :end which is not considered part of the actual variable name + // expected pattern is like mustache templates, so we are looking for something like {{category}} or {{date:start}} + // anything that doesn't match our rule is ignored, so {{&foo!}} would simply be ignored + let match, + re = /\{\{\s*([A-Za-z0-9_]+?)\s*\}\}/g; + while ((match = re.exec(queryText)) != null) { + tags.push(match[1]); + } + + // eliminate any duplicates since it's allowed for a user to reference the same variable multiple times + const existingTemplateTags = this.templateTagsMap(); + + tags = _.uniq(tags); + let existingTags = Object.keys(existingTemplateTags); + + // if we ended up with any variables in the query then update the card parameters list accordingly + if (tags.length > 0 || existingTags.length > 0) { + let newTags = _.difference(tags, existingTags); + let oldTags = _.difference(existingTags, tags); + + let templateTags = { ...existingTemplateTags }; + if (oldTags.length === 1 && newTags.length === 1) { + // renaming + templateTags[newTags[0]] = { ...templateTags[oldTags[0]] }; + + if (templateTags[newTags[0]].display_name === humanize(oldTags[0])) { + templateTags[newTags[0]].display_name = humanize(newTags[0]); + } + + templateTags[newTags[0]].name = newTags[0]; + delete templateTags[oldTags[0]]; } else { - return this; + // remove old vars + for (const name of oldTags) { + delete templateTags[name]; + } + + // create new vars + for (let tagName of newTags) { + templateTags[tagName] = { + id: Utils.uuid(), + name: tagName, + display_name: humanize(tagName), + type: null, + }; + } } - } - hasWritePermission(): boolean { - const database = this.database(); - return database != null && database.native_permissions === "write"; - } - - supportsNativeParameters(): boolean { - const database = this.database(); - return database != null && - _.contains(database.features, "native-parameters"); - } - - table(): ?Table { - const database = this.database(); - const collection = this.collection(); - if (!database || !collection) { - return null; + // ensure all tags have an id since we need it for parameter values to work + // $FlowFixMe + for (const tag: TemplateTag of Object.values(templateTags)) { + if (tag.id == undefined) { + tag.id = Utils.uuid(); + } } - return _.findWhere(database.tables, { name: collection }) || null; - } - - queryText(): string { - return getIn(this.datasetQuery(), ["native", "query"]) || ""; - } - - updateQueryText(newQueryText: string): Query { - return new NativeQuery( - this._originalQuestion, - chain(this._datasetQuery) - .assocIn(["native", "query"], newQueryText) - .assocIn( - ["native", "template_tags"], - this._getUpdatedTemplateTags(newQueryText) - ) - .value() - ); - } - - collection(): ?string { - return getIn(this.datasetQuery(), ["native", "collection"]); - } - - updateCollection(newCollection: string) { - return new NativeQuery( - this._originalQuestion, - assocIn(this._datasetQuery, ["native", "collection"], newCollection) - ); - } - - lineCount(): number { - const queryText = this.queryText(); - return queryText ? countLines(queryText) : 0; - } - - /** - * The ACE Editor mode name, e.g. 'ace/mode/json' - */ - aceMode(): string { - return getEngineNativeAceMode(this.engine()); - } - /** - * Name used to describe the text written in that mode, e.g. 'JSON'. Used to fill in the blank in 'This question is written in _______'. - */ - nativeQueryLanguage() { - return getEngineNativeType(this.engine()).toUpperCase(); - } - - /** - * Whether the DB selector should be a DB + Table selector. Mongo needs both DB + Table. - */ - requiresTable() { - return getEngineNativeRequiresTable(this.engine()); - } - - // $FlowFixMe - templateTags(): TemplateTag[] { - return Object.values(this.templateTagsMap()); - } - templateTagsMap(): TemplateTags { - return getIn(this.datasetQuery(), ["native", "template_tags"]) || {}; - } - - /** - * special handling for NATIVE cards to automatically detect parameters ... {{varname}} - */ - _getUpdatedTemplateTags(queryText: string): TemplateTags { - if (queryText && this.supportsNativeParameters()) { - let tags = []; - - // look for variable usage in the query (like '{{varname}}'). we only allow alphanumeric characters for the variable name - // a variable name can optionally end with :start or :end which is not considered part of the actual variable name - // expected pattern is like mustache templates, so we are looking for something like {{category}} or {{date:start}} - // anything that doesn't match our rule is ignored, so {{&foo!}} would simply be ignored - let match, re = /\{\{([A-Za-z0-9_]+?)\}\}/g; - while ((match = re.exec(queryText)) != null) { - tags.push(match[1]); - } - - // eliminate any duplicates since it's allowed for a user to reference the same variable multiple times - const existingTemplateTags = this.templateTagsMap(); - - tags = _.uniq(tags); - let existingTags = Object.keys(existingTemplateTags); - - // if we ended up with any variables in the query then update the card parameters list accordingly - if (tags.length > 0 || existingTags.length > 0) { - let newTags = _.difference(tags, existingTags); - let oldTags = _.difference(existingTags, tags); - - let templateTags = { ...existingTemplateTags }; - if (oldTags.length === 1 && newTags.length === 1) { - // renaming - templateTags[newTags[0]] = { ...templateTags[oldTags[0]] }; - - if ( - templateTags[newTags[0]].display_name === - humanize(oldTags[0]) - ) { - templateTags[newTags[0]].display_name = humanize( - newTags[0] - ); - } - - templateTags[newTags[0]].name = newTags[0]; - delete templateTags[oldTags[0]]; - } else { - // remove old vars - for (const name of oldTags) { - delete templateTags[name]; - } - - // create new vars - for (let tagName of newTags) { - templateTags[tagName] = { - id: Utils.uuid(), - name: tagName, - display_name: humanize(tagName), - type: null - }; - } - } - - // ensure all tags have an id since we need it for parameter values to work - // $FlowFixMe - for (const tag: TemplateTag of Object.values(templateTags)) { - if (tag.id == undefined) { - tag.id = Utils.uuid(); - } - } - - return templateTags; - } - } - return {}; + return templateTags; + } } + return {}; + } } diff --git a/frontend/src/metabase-lib/lib/queries/Query.js b/frontend/src/metabase-lib/lib/queries/Query.js index 8edc0521592d..6599c22c7422 100644 --- a/frontend/src/metabase-lib/lib/queries/Query.js +++ b/frontend/src/metabase-lib/lib/queries/Query.js @@ -11,84 +11,94 @@ import { memoize } from "metabase-lib/lib/utils"; * An abstract class for all query types (StructuredQuery & NativeQuery) */ export default class Query { - _metadata: Metadata; - - /** - * Note that Question is not always in sync with _datasetQuery, - * calling question() will always merge the latest _datasetQuery to the question object - */ - _originalQuestion: Question; - _datasetQuery: DatasetQuery; - - constructor(question: Question, datasetQuery: DatasetQuery) { - this._metadata = question._metadata; - this._datasetQuery = datasetQuery; - this._originalQuestion = question; - } + _metadata: Metadata; - /** - * Returns a question updated with the current dataset query. - * Can only be applied to query that is a direct child of the question. - */ - @memoize question(): Question { - const isDirectChildOfQuestion = typeof this._originalQuestion.query() === - typeof this; - - if (isDirectChildOfQuestion) { - return this._originalQuestion.setQuery(this); - } else { - throw new Error( - "Can't derive a question from a query that is a child of other query" - ); - } - } + /** + * Note that Question is not always in sync with _datasetQuery, + * calling question() will always merge the latest _datasetQuery to the question object + */ + _originalQuestion: Question; + _datasetQuery: DatasetQuery; - /** - * Convenience method for accessing the global metadata - */ - metadata() { - return this._metadata; - } + constructor(question: Question, datasetQuery: DatasetQuery) { + this._metadata = question._metadata; + this._datasetQuery = datasetQuery; + this._originalQuestion = question; + } - /** - * Does this query have the sufficient metadata for editing it? - */ - isEditable(): boolean { - return true; - } + /** + * Returns a question updated with the current dataset query. + * Can only be applied to query that is a direct child of the question. + */ + @memoize + question(): Question { + const isDirectChildOfQuestion = + typeof this._originalQuestion.query() === typeof this; - /** - * Returns the dataset_query object underlying this Query - */ - datasetQuery(): DatasetQuery { - return this._datasetQuery; + if (isDirectChildOfQuestion) { + return this._originalQuestion.setQuery(this); + } else { + throw new Error( + "Can't derive a question from a query that is a child of other query", + ); } + } - /** - * Query is considered empty, i.e. it is in a plain state with no properties / query clauses set - */ - isEmpty(): boolean { - return false; - } + clean(): Query { + return this; + } - /** - * Query is valid (as far as we know) and can be executed - */ - canRun(): boolean { - return false; - } + /** + * Convenience method for accessing the global metadata + */ + metadata() { + return this._metadata; + } - /** - * Databases this query could use - */ - databases(): Database[] { - return this._metadata.databasesList(); - } + /** + * Does this query have the sufficient metadata for editing it? + */ + isEditable(): boolean { + return true; + } - /** - * Helper for updating with functions that expect a DatasetQuery object - */ - update(fn: (datasetQuery: DatasetQuery) => void) { - return fn(this.datasetQuery()); - } + /** + * Returns the dataset_query object underlying this Query + */ + datasetQuery(): DatasetQuery { + return this._datasetQuery; + } + + setDatasetQuery(datasetQuery: DatasetQuery): Query { + return this; + } + + /** + * + * Query is considered empty, i.e. it is in a plain state with no properties / query clauses set + */ + isEmpty(): boolean { + return false; + } + + /** + * Query is valid (as far as we know) and can be executed + */ + canRun(): boolean { + return false; + } + + /** + * Databases this query could use + */ + databases(): Database[] { + return this._metadata.databasesList(); + } + + /** + * Helper for updating with functions that expect a DatasetQuery object + */ + update(fn: (datasetQuery: DatasetQuery) => void) { + return fn(this.datasetQuery()); + } } diff --git a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js index 12b78b08e846..bc342fe5b0ed 100644 --- a/frontend/src/metabase-lib/lib/queries/StructuredQuery.js +++ b/frontend/src/metabase-lib/lib/queries/StructuredQuery.js @@ -6,8 +6,8 @@ import * as Q from "metabase/lib/query/query"; import Q_deprecated, { - AggregationClause, - NamedClause + AggregationClause, + NamedClause, } from "metabase/lib/query"; import { format as formatExpression } from "metabase/lib/expressions/formatter"; import { getAggregator } from "metabase/lib/schema_metadata"; @@ -16,26 +16,26 @@ import _ from "underscore"; import { chain, assoc, updateIn } from "icepick"; import type { - StructuredQuery as StructuredQueryObject, - Aggregation, - Breakout, - Filter, - LimitClause, - OrderBy + StructuredQuery as StructuredQueryObject, + Aggregation, + Breakout, + Filter, + LimitClause, + OrderBy, } from "metabase/meta/types/Query"; import type { - DatasetQuery, - StructuredDatasetQuery + DatasetQuery, + StructuredDatasetQuery, } from "metabase/meta/types/Card"; import type { - TableMetadata, - DimensionOptions + TableMetadata, + DimensionOptions, } from "metabase/meta/types/Metadata"; import Dimension, { - FKDimension, - ExpressionDimension, - AggregationDimension + FKDimension, + ExpressionDimension, + AggregationDimension, } from "metabase-lib/lib/Dimension"; import type Table from "../metadata/Table"; @@ -47,689 +47,733 @@ import type { TableId } from "metabase/meta/types/Table"; import AtomicQuery from "./AtomicQuery"; import AggregationWrapper from "./Aggregation"; import AggregationOption from "metabase-lib/lib/metadata/AggregationOption"; +import Utils from "metabase/lib/utils"; + +import { isSegmentFilter } from "metabase/lib/query/filter"; export const STRUCTURED_QUERY_TEMPLATE = { - database: null, - type: "query", - query: { - source_table: null - } + database: null, + type: "query", + query: { + "source-table": null, + }, }; /** * A wrapper around an MBQL (`query` type @type {DatasetQuery}) object */ export default class StructuredQuery extends AtomicQuery { - static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { - return datasetQuery.type === STRUCTURED_QUERY_TEMPLATE.type; - } - - // For Flow type completion - _structuredDatasetQuery: StructuredDatasetQuery; - - /** - * Creates a new StructuredQuery based on the provided DatasetQuery object - */ - constructor( - question: Question, - datasetQuery: DatasetQuery = STRUCTURED_QUERY_TEMPLATE - ) { - super(question, datasetQuery); - - this._structuredDatasetQuery = (datasetQuery: StructuredDatasetQuery); - } - - static newStucturedQuery( - { - question, - databaseId, - tableId - }: { question: Question, databaseId?: DatabaseId, tableId?: TableId } - ) { - const datasetQuery = { - ...STRUCTURED_QUERY_TEMPLATE, - database: databaseId || null, - query: { - source_table: tableId || null - } - }; - - return new StructuredQuery(question, datasetQuery); - } - - /* Query superclass methods */ - - /** - * @returns true if this is new query that hasn't been modified yet. - */ - isEmpty() { - return !this.databaseId(); - } - - /** - * @returns true if this query is in a state where it can be run. - */ - canRun() { - return Q_deprecated.canRun(this.query()); - } - - /** - * @returns true if this query is in a state where it can be edited. Must have database and table set, and metadata for the table loaded. - */ - isEditable(): boolean { - return !!this.tableMetadata(); - } - - /* AtomicQuery superclass methods */ - - /** - * @returns all tables in the currently selected database that can be used. - */ - tables(): ?(Table[]) { - const database = this.database(); - return (database && database.tables) || null; - } - - /** - * @returns the currently selected database ID, if any is selected. - */ - databaseId(): ?DatabaseId { - // same for both structured and native - return this._structuredDatasetQuery.database; - } - - /** - * @returns the currently selected database metadata, if a database is selected and loaded. - */ - database(): ?Database { - const databaseId = this.databaseId(); - return databaseId != null ? this._metadata.databases[databaseId] : null; - } - - /** - * @returns the database engine object, if a database is selected and loaded. - */ - engine(): ?DatabaseEngine { - const database = this.database(); - return database && database.engine; - } - - /* Methods unique to this query type */ - - /** - * @returns a new reset @type {StructuredQuery} with the same parent @type {Question} - */ - reset(): StructuredQuery { - return new StructuredQuery(this._originalQuestion); - } - - /** - * @returns the underlying MBQL query object - */ - query(): StructuredQueryObject { - return this._structuredDatasetQuery.query; - } - - /** - * @returns a new query with the provided Database set. - */ - setDatabase(database: Database): StructuredQuery { - if (database.id !== this.databaseId()) { - // TODO: this should reset the rest of the query? - return new StructuredQuery( - this._originalQuestion, - assoc(this.datasetQuery(), "database", database.id) - ); - } else { - return this; - } - } - - /** - * @returns a new query with the provided Table set. - */ - setTable(table: Table): StructuredQuery { - if (table.id !== this.tableId()) { - return new StructuredQuery( - this._originalQuestion, - chain(this.datasetQuery()) - .assoc("database", table.database.id) - .assocIn(["query", "source_table"], table.id) - .value() - ); - } else { - return this; - } - } - - /** - * @returns the table ID, if a table is selected. - */ - tableId(): ?TableId { - return this.query().source_table; - } - - /** - * @returns the table object, if a table is selected and loaded. - * FIXME: actual return type should be `?Table` - */ - table(): Table { - return this._metadata.tables[this.tableId()]; - } - - /** - * @deprecated Alias of `table()`. Use only when partially porting old code that uses @type {TableMetadata} object. - */ - tableMetadata(): ?TableMetadata { - return this.table(); - } - - // AGGREGATIONS - - /** - * @returns an array of MBQL @type {Aggregation}s. - */ - aggregations(): Aggregation[] { - return Q.getAggregations(this.query()); - } - - /** - * @returns an array of aggregation wrapper objects - * TODO Atte Keinänen 6/11/17: Make the wrapper objects the standard format for aggregations - */ - aggregationsWrapped(): AggregationWrapper[] { - return this.aggregations().map( - agg => new AggregationWrapper(this, agg) - ); - } - - /** - * @returns an array of aggregation options for the currently selected table - */ - aggregationOptions(): AggregationOption[] { - // TODO Should `aggregation_options` be wrapped already in selectors/metadata.js? - const optionObjects = this.table() && this.table().aggregations(); - return optionObjects - ? optionObjects.map(agg => new AggregationOption(agg)) - : []; - } - - /** - * @returns an array of aggregation options for the currently selected table, excluding the "rows" pseudo-aggregation - */ - aggregationOptionsWithoutRows(): AggregationOption[] { - return this.aggregationOptions().filter( - option => option.short !== "rows" - ); - } - - /** - * @returns the field options for the provided aggregation - */ - aggregationFieldOptions(agg): DimensionOptions { - const aggregation = this.table().aggregation(agg); - if (aggregation) { - const fieldOptions = this.fieldOptions(field => { - return aggregation.validFieldsFilters[0]([field]).length === 1; - }); - - // HACK Atte Keinänen 6/18/17: Using `fieldOptions` with a field filter function - // ends up often omitting all expressions because the field object of ExpressionDimension is empty. - // Expressions can be applied to all aggregations so we can simply add all expressions to the - // dimensions list in this hack. - // - // A real solution would have a `dimensionOptions` method instead of `fieldOptions` which would - // enable filtering based on dimension properties. - return { - ...fieldOptions, - dimensions: _.uniq([ - ...this.expressionDimensions(), - ...fieldOptions.dimensions.filter( - d => !(d instanceof ExpressionDimension) - ) - ]) - }; - } else { - return { count: 0, fks: [], dimensions: [] }; - } - } - - /** - * @returns true if the aggregation can be removed - */ - canRemoveAggregation(): boolean { - return this.aggregations().length > 1; - } - - /** - * @returns true if the query has no aggregation - */ - isBareRows(): boolean { - return Q.isBareRows(this.query()); - } - - /** - * @returns the formatted named of the aggregation at the provided index. - */ - aggregationName(index: number = 0): ?string { - const aggregation = this.aggregations()[index]; - if (NamedClause.isNamed(aggregation)) { - return NamedClause.getName(aggregation); - } else if (AggregationClause.isCustom(aggregation)) { - return formatExpression(aggregation, { - tableMetadata: this.tableMetadata(), - customFields: this.expressions() - }); - } else if (AggregationClause.isMetric(aggregation)) { - const metricId = AggregationClause.getMetric(aggregation); - const metric = this._metadata.metrics[metricId]; - if (metric) { - return metric.name; - } - } else { - const selectedAggregation = getAggregator( - AggregationClause.getOperator(aggregation) - ); - if (selectedAggregation) { - let aggregationName = selectedAggregation.name.replace( - " of ...", - "" - ); - const fieldId = Q_deprecated.getFieldTargetId( - AggregationClause.getField(aggregation) - ); - const field = fieldId && this._metadata.fields[fieldId]; - if (field) { - aggregationName += " of " + field.display_name; - } - return aggregationName; - } - } - return null; - } - - /** - * @returns {StructuredQuery} new query with the provided MBQL @type {Aggregation} added. - */ - addAggregation(aggregation: Aggregation): StructuredQuery { - return this._updateQuery(Q.addAggregation, arguments); - } - - /** - * @returns {StructuredQuery} new query with the MBQL @type {Aggregation} updated at the provided index. - */ - updateAggregation( - index: number, - aggregation: Aggregation - ): StructuredQuery { - return this._updateQuery(Q.updateAggregation, arguments); - } - - /** - * @returns {StructuredQuery} new query with the aggregation at the provided index removed. - */ - removeAggregation(index: number): StructuredQuery { - return this._updateQuery(Q.removeAggregation, arguments); - } - - /** - * @returns {StructuredQuery} new query with all aggregations removed. - */ - clearAggregations(): StructuredQuery { - return this._updateQuery(Q.clearAggregations, arguments); - } - - // BREAKOUTS - - /** - * @returns An array of MBQL @type {Breakout}s. - */ - breakouts(): Breakout[] { - return Q.getBreakouts(this.query()); - } - - /** - * @param includedBreakout The breakout to include even if it's already used - * @param fieldFilter An option @type {Field} predicate to filter out options - * @returns @type {DimensionOptions} that can be used as breakouts, excluding used breakouts, unless @param {breakout} is provided. - */ - breakoutOptions(includedBreakout?: any, fieldFilter = () => true) { - // the set of field ids being used by other breakouts - const usedFields = new Set( - this.breakouts() - .filter(b => !_.isEqual(b, includedBreakout)) - .map(b => Q_deprecated.getFieldTargetId(b)) - ); - - return this.fieldOptions( - field => fieldFilter(field) && !usedFields.has(field.id) + static isDatasetQueryType(datasetQuery: DatasetQuery): boolean { + return datasetQuery.type === STRUCTURED_QUERY_TEMPLATE.type; + } + + // For Flow type completion + _structuredDatasetQuery: StructuredDatasetQuery; + + /** + * Creates a new StructuredQuery based on the provided DatasetQuery object + */ + constructor( + question: Question, + datasetQuery: DatasetQuery = STRUCTURED_QUERY_TEMPLATE, + ) { + super(question, datasetQuery); + + this._structuredDatasetQuery = (datasetQuery: StructuredDatasetQuery); + } + + static newStucturedQuery({ + question, + databaseId, + tableId, + }: { + question: Question, + databaseId?: DatabaseId, + tableId?: TableId, + }) { + const datasetQuery = { + ...STRUCTURED_QUERY_TEMPLATE, + database: databaseId || null, + query: { + "source-table": tableId || null, + }, + }; + + return new StructuredQuery(question, datasetQuery); + } + + /* Query superclass methods */ + + /** + * @returns true if this is new query that hasn't been modified yet. + */ + isEmpty() { + return !this.databaseId(); + } + + /** + * @returns true if this query is in a state where it can be run. + */ + canRun() { + return Q_deprecated.canRun(this.query()); + } + + /** + * @returns true if this query is in a state where it can be edited. Must have database and table set, and metadata for the table loaded. + */ + isEditable(): boolean { + return !!this.tableMetadata(); + } + + /* AtomicQuery superclass methods */ + + /** + * @returns all tables in the currently selected database that can be used. + */ + tables(): ?(Table[]) { + const database = this.database(); + return (database && database.tables) || null; + } + + /** + * @returns the currently selected database ID, if any is selected. + */ + databaseId(): ?DatabaseId { + // same for both structured and native + return this._structuredDatasetQuery.database; + } + + /** + * @returns the currently selected database metadata, if a database is selected and loaded. + */ + database(): ?Database { + const databaseId = this.databaseId(); + return databaseId != null ? this._metadata.databases[databaseId] : null; + } + + /** + * @returns the database engine object, if a database is selected and loaded. + */ + engine(): ?DatabaseEngine { + const database = this.database(); + return database && database.engine; + } + + /* Methods unique to this query type */ + + /** + * @returns a new reset @type {StructuredQuery} with the same parent @type {Question} + */ + reset(): StructuredQuery { + return new StructuredQuery(this._originalQuestion); + } + + /** + * @returns the underlying MBQL query object + */ + query(): StructuredQueryObject { + return this._structuredDatasetQuery.query; + } + + setQuery(query: StructuredQueryObject): StructuredQuery { + return this._updateQuery(() => query, []); + } + + updateQuery( + fn: (q: StructuredQueryObject) => StructuredQueryObject, + ): StructuredQuery { + return this._updateQuery(fn, []); + } + + /** + * @returns a new query with the provided Database set. + */ + setDatabase(database: Database): StructuredQuery { + if (database.id !== this.databaseId()) { + // TODO: this should reset the rest of the query? + return new StructuredQuery( + this._originalQuestion, + assoc(this.datasetQuery(), "database", database.id), + ); + } else { + return this; + } + } + + /** + * @returns a new query with the provided Table set. + */ + setTable(table: Table): StructuredQuery { + if (table.id !== this.tableId()) { + return new StructuredQuery( + this._originalQuestion, + chain(this.datasetQuery()) + .assoc("database", table.database.id) + .assocIn(["query", "source-table"], table.id) + .value(), + ); + } else { + return this; + } + } + + /** + * @returns the table ID, if a table is selected. + */ + tableId(): ?TableId { + return this.query()["source-table"]; + } + + /** + * @returns the table object, if a table is selected and loaded. + * FIXME: actual return type should be `?Table` + */ + table(): Table { + return this._metadata.tables[this.tableId()]; + } + + /** + * @deprecated Alias of `table()`. Use only when partially porting old code that uses @type {TableMetadata} object. + */ + tableMetadata(): ?TableMetadata { + return this.table(); + } + + clean() { + const datasetQuery = this.datasetQuery(); + if (datasetQuery.query) { + const query = Utils.copy(datasetQuery.query); + + return this.setDatasetQuery({ + ...datasetQuery, + query: Q_deprecated.cleanQuery(query), + }); + } else { + return this; + } + } + + // AGGREGATIONS + + /** + * @returns an array of MBQL @type {Aggregation}s. + */ + aggregations(): Aggregation[] { + return Q.getAggregations(this.query()); + } + + /** + * @returns an array of aggregation wrapper objects + * TODO Atte Keinänen 6/11/17: Make the wrapper objects the standard format for aggregations + */ + aggregationsWrapped(): AggregationWrapper[] { + return this.aggregations().map(agg => new AggregationWrapper(this, agg)); + } + + /** + * @returns an array of aggregation options for the currently selected table + */ + aggregationOptions(): AggregationOption[] { + // TODO Should `aggregation_options` be wrapped already in selectors/metadata.js? + const optionObjects = this.table() && this.table().aggregations(); + return optionObjects + ? optionObjects.map(agg => new AggregationOption(agg)) + : []; + } + + /** + * @returns an array of aggregation options for the currently selected table + */ + aggregationOptionsWithoutRows(): AggregationOption[] { + return this.aggregationOptions().filter(option => option.short !== "rows"); + } + + /** + * @returns the field options for the provided aggregation + */ + aggregationFieldOptions(agg): DimensionOptions { + const aggregation = this.table().aggregation(agg); + if (aggregation) { + const fieldOptions = this.fieldOptions(field => { + return aggregation.validFieldsFilters[0]([field]).length === 1; + }); + + // HACK Atte Keinänen 6/18/17: Using `fieldOptions` with a field filter function + // ends up often omitting all expressions because the field object of ExpressionDimension is empty. + // Expressions can be applied to all aggregations so we can simply add all expressions to the + // dimensions list in this hack. + // + // A real solution would have a `dimensionOptions` method instead of `fieldOptions` which would + // enable filtering based on dimension properties. + return { + ...fieldOptions, + dimensions: _.uniq([ + ...this.expressionDimensions(), + ...fieldOptions.dimensions.filter( + d => !(d instanceof ExpressionDimension), + ), + ]), + }; + } else { + return { count: 0, fks: [], dimensions: [] }; + } + } + + /** + * @returns true if the aggregation can be removed + */ + canRemoveAggregation(): boolean { + return this.aggregations().length > 1; + } + + /** + * @returns true if the query has no aggregation + */ + isBareRows(): boolean { + return Q.isBareRows(this.query()); + } + + /** + * @returns the formatted named of the aggregation at the provided index. + */ + aggregationName(index: number = 0): ?string { + const aggregation = this.aggregations()[index]; + if (NamedClause.isNamed(aggregation)) { + return NamedClause.getName(aggregation); + } else if (AggregationClause.isCustom(aggregation)) { + return formatExpression(aggregation, { + tableMetadata: this.tableMetadata(), + customFields: this.expressions(), + }); + } else if (AggregationClause.isMetric(aggregation)) { + const metricId = AggregationClause.getMetric(aggregation); + const metric = this._metadata.metrics[metricId]; + if (metric) { + return metric.name; + } + } else { + const selectedAggregation = getAggregator( + AggregationClause.getOperator(aggregation), + ); + if (selectedAggregation) { + let aggregationName = selectedAggregation.name.replace(" of ...", ""); + const fieldId = Q_deprecated.getFieldTargetId( + AggregationClause.getField(aggregation), ); - } - - /** - * @returns whether a new breakout can be added or not - */ - canAddBreakout(): boolean { - return this.breakoutOptions().count > 0; - } - - /** - * @returns whether the current query has a valid breakout - */ - hasValidBreakout(): boolean { - return Q_deprecated.hasValidBreakout(this.query()); - } - - /** - * @returns {StructuredQuery} new query with the provided MBQL @type {Breakout} added. - */ - addBreakout(breakout: Breakout) { - return this._updateQuery(Q.addBreakout, arguments); - } - - /** - * @returns {StructuredQuery} new query with the MBQL @type {Breakout} updated at the provided index. - */ - updateBreakout(index: number, breakout: Breakout) { - return this._updateQuery(Q.updateBreakout, arguments); - } - - /** - * @returns {StructuredQuery} new query with the breakout at the provided index removed. - */ - removeBreakout(index: number) { - return this._updateQuery(Q.removeBreakout, arguments); - } - /** - * @returns {StructuredQuery} new query with all breakouts removed. - */ - clearBreakouts() { - return this._updateQuery(Q.clearBreakouts, arguments); - } - - // FILTERS - - /** - * @returns An array of MBQL @type {Filter}s. - */ - filters(): Filter[] { - return Q.getFilters(this.query()); - } - - /** - * @returns @type {DimensionOptions} that can be used in filters. - */ - filterFieldOptions(): DimensionOptions { - return this.fieldOptions(); - } - - /** - * @returns @type {Segment}s that can be used as filters. - * TODO: exclude used segments - */ - filterSegmentOptions(): Segment[] { - return this.table().segments.filter(sgmt => sgmt.is_active === true); - } - - /** - * @returns whether a new filter can be added or not - */ - canAddFilter(): boolean { - return Q.canAddFilter(this.query()) && - (this.filterFieldOptions().count > 0 || - this.filterSegmentOptions().length > 0); - } - - /** - * @returns {StructuredQuery} new query with the provided MBQL @type {Filter} added. - */ - addFilter(filter: Filter) { - return this._updateQuery(Q.addFilter, arguments); - } - - /** - * @returns {StructuredQuery} new query with the MBQL @type {Filter} updated at the provided index. - */ - updateFilter(index: number, filter: Filter) { - return this._updateQuery(Q.updateFilter, arguments); - } - - /** - * @returns {StructuredQuery} new query with the filter at the provided index removed. - */ - removeFilter(index: number) { - return this._updateQuery(Q.removeFilter, arguments); - } - - /** - * @returns {StructuredQuery} new query with all filters removed. - */ - clearFilters() { - return this._updateQuery(Q.clearFilters, arguments); - } - - // SORTS - - // TODO: standardize SORT vs ORDER_BY terminology - - sorts(): OrderBy[] { - return Q.getOrderBys(this.query()); - } - sortOptions(sort): DimensionOptions { - let sortOptions = { count: 0, dimensions: [], fks: [] }; - // in bare rows all fields are sortable, otherwise we only sort by our breakout columns - if (this.isBareRows()) { - const usedFields = new Set( - this.sorts() - .filter(b => !_.isEqual(b, sort)) - .map(b => Q_deprecated.getFieldTargetId(b[0])) - ); - - return this.fieldOptions(field => !usedFields.has(field.id)); - } else if (this.hasValidBreakout()) { - for (const breakout of this.breakouts()) { - sortOptions.dimensions.push( - Dimension.parseMBQL(breakout, this._metadata) - ); - sortOptions.count++; - } - for (const [index, aggregation] of this.aggregations().entries()) { - if (Q_deprecated.canSortByAggregateField(this.query(), index)) { - sortOptions.dimensions.push( - new AggregationDimension( - null, - [index], - this._metadata, - aggregation[0] - ) - ); - sortOptions.count++; - } - } + const field = fieldId && this._metadata.fields[fieldId]; + if (field) { + aggregationName += " of " + field.display_name; } - return sortOptions; - } - canAddSort(): boolean { - const sorts = this.sorts(); - return this.sortOptions().count > 0 && - (sorts.length === 0 || sorts[sorts.length - 1][0] != null); - } - - addSort(order_by: OrderBy) { - return this._updateQuery(Q.addOrderBy, arguments); - } - updateSort(index: number, order_by: OrderBy) { - return this._updateQuery(Q.updateOrderBy, arguments); - } - removeSort(index: number) { - return this._updateQuery(Q.removeOrderBy, arguments); - } - clearSort() { - return this._updateQuery(Q.clearOrderBy, arguments); - } - replaceSort(order_by: OrderBy) { - return this.clearSort().addSort(order_by); - } - - // LIMIT - - limit(): ?number { - return Q.getLimit(this.query()); - } - updateLimit(limit: LimitClause) { - return this._updateQuery(Q.updateLimit, arguments); - } - clearLimit() { - return this._updateQuery(Q.clearLimit, arguments); - } - - // EXPRESSIONS - - expressions(): { [key: string]: any } { - return Q.getExpressions(this.query()); - } - - updateExpression(name, expression, oldName) { - return this._updateQuery(Q.updateExpression, arguments); - } - - removeExpression(name) { - return this._updateQuery(Q.removeExpression, arguments); - } - - // FIELD OPTIONS - - // TODO Atte Keinänen 6/18/17: Refactor to dimensionOptions which takes a dimensionFilter - // See aggregationFieldOptions for an explanation why that covers more use cases - fieldOptions(fieldFilter = () => true): DimensionOptions { - const fieldOptions = { - count: 0, - fks: [], - dimensions: [] - }; - - const table = this.tableMetadata(); - if (table) { - const dimensionFilter = dimension => { - const field = dimension.field && dimension.field(); - return !field || (field.isDimension() && fieldFilter(field)); - }; - - const dimensionIsFKReference = dimension => - dimension.field && - dimension.field() && - dimension.field().isFK(); - - const filteredNonFKDimensions = this.dimensions() - .filter(dimensionFilter) - .filter(d => !dimensionIsFKReference(d)); - - for (const dimension of filteredNonFKDimensions) { - fieldOptions.count++; - fieldOptions.dimensions.push(dimension); - } - - const fkDimensions = this.dimensions().filter( - dimensionIsFKReference - ); - for (const dimension of fkDimensions) { - const fkDimensions = dimension - .dimensions([FKDimension]) - .filter(dimensionFilter); - - if (fkDimensions.length > 0) { - fieldOptions.count += fkDimensions.length; - fieldOptions.fks.push({ - field: dimension.field(), - dimension: dimension, - dimensions: fkDimensions - }); - } - } - } - - return fieldOptions; - } - - // DIMENSIONS - - dimensions(): Dimension[] { - return [...this.expressionDimensions(), ...this.tableDimensions()]; - } - - tableDimensions(): Dimension[] { - const table: Table = this.table(); - return table ? table.dimensions() : []; - } - - expressionDimensions(): Dimension[] { - return Object.entries(this.expressions()).map(([ - expressionName, - expression - ]) => { - return new ExpressionDimension(null, [expressionName]); - }); - } - - aggregationDimensions() { - return this.breakouts().map(breakout => - Dimension.parseMBQL(breakout, this._metadata)); - } - - metricDimensions() { - return this.aggregations().map( - (aggregation, index) => - new AggregationDimension( - null, - [index], - this._metadata, - aggregation[0] - ) + return aggregationName; + } + } + return null; + } + + /** + * @returns {StructuredQuery} new query with the provided MBQL @type {Aggregation} added. + */ + addAggregation(aggregation: Aggregation): StructuredQuery { + return this._updateQuery(Q.addAggregation, arguments); + } + + /** + * @returns {StructuredQuery} new query with the MBQL @type {Aggregation} updated at the provided index. + */ + updateAggregation(index: number, aggregation: Aggregation): StructuredQuery { + return this._updateQuery(Q.updateAggregation, arguments); + } + + /** + * @returns {StructuredQuery} new query with the aggregation at the provided index removed. + */ + removeAggregation(index: number): StructuredQuery { + return this._updateQuery(Q.removeAggregation, arguments); + } + + /** + * @returns {StructuredQuery} new query with all aggregations removed. + */ + clearAggregations(): StructuredQuery { + return this._updateQuery(Q.clearAggregations, arguments); + } + + // BREAKOUTS + + /** + * @returns An array of MBQL @type {Breakout}s. + */ + breakouts(): Breakout[] { + return Q.getBreakouts(this.query()); + } + + /** + * @param includedBreakout The breakout to include even if it's already used + * @param fieldFilter An option @type {Field} predicate to filter out options + * @returns @type {DimensionOptions} that can be used as breakouts, excluding used breakouts, unless @param {breakout} is provided. + */ + breakoutOptions(includedBreakout?: any, fieldFilter = () => true) { + // the set of field ids being used by other breakouts + const usedFields = new Set( + this.breakouts() + .filter(b => !_.isEqual(b, includedBreakout)) + .map(b => Q_deprecated.getFieldTargetId(b)), + ); + + return this.fieldOptions( + field => fieldFilter(field) && !usedFields.has(field.id), + ); + } + + /** + * @returns whether a new breakout can be added or not + */ + canAddBreakout(): boolean { + return this.breakoutOptions().count > 0; + } + + /** + * @returns whether the current query has a valid breakout + */ + hasValidBreakout(): boolean { + return Q_deprecated.hasValidBreakout(this.query()); + } + + /** + * @returns {StructuredQuery} new query with the provided MBQL @type {Breakout} added. + */ + addBreakout(breakout: Breakout) { + return this._updateQuery(Q.addBreakout, arguments); + } + + /** + * @returns {StructuredQuery} new query with the MBQL @type {Breakout} updated at the provided index. + */ + updateBreakout(index: number, breakout: Breakout) { + return this._updateQuery(Q.updateBreakout, arguments); + } + + /** + * @returns {StructuredQuery} new query with the breakout at the provided index removed. + */ + removeBreakout(index: number) { + return this._updateQuery(Q.removeBreakout, arguments); + } + /** + * @returns {StructuredQuery} new query with all breakouts removed. + */ + clearBreakouts() { + return this._updateQuery(Q.clearBreakouts, arguments); + } + + // FILTERS + + /** + * @returns An array of MBQL @type {Filter}s. + */ + filters(): Filter[] { + return Q.getFilters(this.query()); + } + + /** + * @returns @type {DimensionOptions} that can be used in filters. + */ + filterFieldOptions(): DimensionOptions { + return this.fieldOptions(); + } + + /** + * @returns @type {Segment}s that can be used as filters. + */ + filterSegmentOptions(): Segment[] { + return this.table().segments.filter( + sgmt => sgmt.archived === false && !this.segments().includes(sgmt), + ); + } + + /** + * @returns @type {Segment}s that are currently applied to the question + */ + segments() { + return this.filters() + .filter(f => isSegmentFilter(f)) + .map(segmentFilter => { + // segment id is stored as the second part of the filter clause + // e.x. ["segment", 1] + const segmentId = segmentFilter[1]; + return this.metadata().segment(segmentId); + }); + } + + /** + * @returns whether a new filter can be added or not + */ + canAddFilter(): boolean { + return ( + Q.canAddFilter(this.query()) && + (this.filterFieldOptions().count > 0 || + this.filterSegmentOptions().length > 0) + ); + } + + /** + * @returns {StructuredQuery} new query with the provided MBQL @type {Filter} added. + */ + addFilter(filter: Filter) { + return this._updateQuery(Q.addFilter, arguments); + } + + /** + * @returns {StructuredQuery} new query with the MBQL @type {Filter} updated at the provided index. + */ + updateFilter(index: number, filter: Filter) { + return this._updateQuery(Q.updateFilter, arguments); + } + + /** + * @returns {StructuredQuery} new query with the filter at the provided index removed. + */ + removeFilter(index: number) { + return this._updateQuery(Q.removeFilter, arguments); + } + + /** + * @returns {StructuredQuery} new query with all filters removed. + */ + clearFilters() { + return this._updateQuery(Q.clearFilters, arguments); + } + + // SORTS + + // TODO: standardize SORT vs ORDER_BY terminology + + sorts(): OrderBy[] { + return Q.getOrderBys(this.query()); + } + sortOptions(sort): DimensionOptions { + let sortOptions = { count: 0, dimensions: [], fks: [] }; + // in bare rows all fields are sortable, otherwise we only sort by our breakout columns + if (this.isBareRows()) { + const usedFields = new Set( + this.sorts() + .filter(b => !_.isEqual(b, sort)) + .map(b => Q_deprecated.getFieldTargetId(b[1])), + ); + + return this.fieldOptions(field => !usedFields.has(field.id)); + } else if (this.hasValidBreakout()) { + for (const breakout of this.breakouts()) { + sortOptions.dimensions.push( + Dimension.parseMBQL(breakout, this._metadata), ); - } - - fieldReferenceForColumn(column) { - if (column.fk_field_id != null) { - return ["fk->", column.fk_field_id, column.id]; - } else if (column.id != null) { - return ["field-id", column.id]; - } else if (column["expression-name"] != null) { - return ["expression", column["expression-name"]]; - } else if (column.source === "aggregation") { - // FIXME: aggregations > 0? - return ["aggregation", 0]; + sortOptions.count++; + } + for (const [index, aggregation] of this.aggregations().entries()) { + if (Q_deprecated.canSortByAggregateField(this.query(), index)) { + sortOptions.dimensions.push( + new AggregationDimension( + null, + [index], + this._metadata, + aggregation[0], + ), + ); + sortOptions.count++; } - } - - parseFieldReference(fieldRef): ?Dimension { - const dimension = Dimension.parseMBQL(fieldRef, this._metadata); - if (dimension) { - // HACK - if (dimension instanceof AggregationDimension) { - dimension._displayName = this.aggregations()[ - dimension._args[0] - ][0]; - } - return dimension; + } + } + return sortOptions; + } + canAddSort(): boolean { + const sorts = this.sorts(); + return ( + this.sortOptions().count > 0 && + (sorts.length === 0 || sorts[sorts.length - 1][0] != null) + ); + } + + addSort(orderBy: OrderBy) { + return this._updateQuery(Q.addOrderBy, arguments); + } + updateSort(index: number, orderBy: OrderBy) { + return this._updateQuery(Q.updateOrderBy, arguments); + } + removeSort(index: number) { + return this._updateQuery(Q.removeOrderBy, arguments); + } + clearSort() { + return this._updateQuery(Q.clearOrderBy, arguments); + } + replaceSort(orderBy: OrderBy) { + return this.clearSort().addSort(orderBy); + } + + // LIMIT + + limit(): ?number { + return Q.getLimit(this.query()); + } + updateLimit(limit: LimitClause) { + return this._updateQuery(Q.updateLimit, arguments); + } + clearLimit() { + return this._updateQuery(Q.clearLimit, arguments); + } + + // EXPRESSIONS + + expressions(): { [key: string]: any } { + return Q.getExpressions(this.query()); + } + + updateExpression(name, expression, oldName) { + return this._updateQuery(Q.updateExpression, arguments); + } + + removeExpression(name) { + return this._updateQuery(Q.removeExpression, arguments); + } + + // FIELDS + /** + * Returns dimension options that can appear in the `fields` clause + */ + fieldsOptions(dimensionFilter = () => true): DimensionOptions { + if (this.isBareRows() && this.breakouts().length === 0) { + return this.dimensionOptions(dimensionFilter); + } + // TODO: allow adding fields connected by broken out PKs? + return { count: 0, dimensions: [], fks: [] }; + } + + // DIMENSION OPTIONS + + // TODO Atte Keinänen 6/18/17: Refactor to dimensionOptions which takes a dimensionFilter + // See aggregationFieldOptions for an explanation why that covers more use cases + dimensionOptions(dimensionFilter = () => true): DimensionOptions { + const dimensionOptions = { + count: 0, + fks: [], + dimensions: [], + }; + + const table = this.tableMetadata(); + if (table) { + const dimensionIsFKReference = dimension => + dimension.field && dimension.field() && dimension.field().isFK(); + + const filteredNonFKDimensions = this.dimensions().filter(dimensionFilter); + // .filter(d => !dimensionIsFKReference(d)); + + for (const dimension of filteredNonFKDimensions) { + dimensionOptions.count++; + dimensionOptions.dimensions.push(dimension); + } + + const fkDimensions = this.dimensions().filter(dimensionIsFKReference); + for (const dimension of fkDimensions) { + const fkDimensions = dimension + .dimensions([FKDimension]) + .filter(dimensionFilter); + + if (fkDimensions.length > 0) { + dimensionOptions.count += fkDimensions.length; + dimensionOptions.fks.push({ + field: dimension.field(), + dimension: dimension, + dimensions: fkDimensions, + }); } - } - - setDatasetQuery(datasetQuery: DatasetQuery): StructuredQuery { - return new StructuredQuery(this._originalQuestion, datasetQuery); - } - - // INTERNAL - - _updateQuery( - updateFunction: ( - query: StructuredQueryObject, - ...args: any[] - ) => StructuredQueryObject, - args: any[] - ): StructuredQuery { - return this.setDatasetQuery( - updateIn(this._datasetQuery, ["query"], query => - updateFunction(query, ...args)) - ); - } + } + } + + return dimensionOptions; + } + + // FIELD OPTIONS + + fieldOptions(fieldFilter = () => true) { + const dimensionFilter = dimension => { + const field = dimension.field && dimension.field(); + return !field || (field.isDimension() && fieldFilter(field)); + }; + return this.dimensionOptions(dimensionFilter); + } + + // DIMENSIONS + + dimensions(): Dimension[] { + return [...this.expressionDimensions(), ...this.tableDimensions()]; + } + + tableDimensions(): Dimension[] { + const table: Table = this.table(); + return table ? table.dimensions() : []; + } + + expressionDimensions(): Dimension[] { + return Object.entries(this.expressions()).map( + ([expressionName, expression]) => { + return new ExpressionDimension(null, [expressionName]); + }, + ); + } + + aggregationDimensions() { + return this.breakouts().map(breakout => + Dimension.parseMBQL(breakout, this._metadata), + ); + } + + metricDimensions() { + return this.aggregations().map( + (aggregation, index) => + new AggregationDimension(null, [index], this._metadata, aggregation[0]), + ); + } + + fieldReferenceForColumn(column) { + if (column.fk_field_id != null) { + return ["fk->", column.fk_field_id, column.id]; + } else if (column.id != null) { + return ["field-id", column.id]; + } else if (column["expression-name"] != null) { + return ["expression", column["expression-name"]]; + } else if (column.source === "aggregation") { + // FIXME: aggregations > 0? + return ["aggregation", 0]; + } + } + + parseFieldReference(fieldRef): ?Dimension { + const dimension = Dimension.parseMBQL(fieldRef, this._metadata); + if (dimension) { + // HACK + if (dimension instanceof AggregationDimension) { + dimension._displayName = this.aggregations()[dimension._args[0]][0]; + } + return dimension; + } + } + + setDatasetQuery(datasetQuery: DatasetQuery): StructuredQuery { + return new StructuredQuery(this._originalQuestion, datasetQuery); + } + + // INTERNAL + + _updateQuery( + updateFunction: ( + query: StructuredQueryObject, + ...args: any[] + ) => StructuredQueryObject, + args: any[], + ): StructuredQuery { + return this.setDatasetQuery( + updateIn(this._datasetQuery, ["query"], query => + updateFunction(query, ...args), + ), + ); + } } diff --git a/frontend/src/metabase-lib/lib/utils.js b/frontend/src/metabase-lib/lib/utils.js index d8fceab79d85..0faf4f700324 100644 --- a/frontend/src/metabase-lib/lib/utils.js +++ b/frontend/src/metabase-lib/lib/utils.js @@ -1,35 +1,32 @@ export function nyi(target, key, descriptor) { - let method = descriptor.value; - descriptor.value = function() { - console.warn( - "Method not yet implemented: " + - target.constructor.name + - "::" + - key - ); - return method.apply(this, arguments); - }; - return descriptor; + let method = descriptor.value; + descriptor.value = function() { + console.warn( + "Method not yet implemented: " + target.constructor.name + "::" + key, + ); + return method.apply(this, arguments); + }; + return descriptor; } let memoized = new WeakMap(); function getWithFallback(map, key, fallback) { - if (!map.has(key)) { - map.set(key, fallback()); - } - return map.get(key); + if (!map.has(key)) { + map.set(key, fallback()); + } + return map.get(key); } export function memoize(target, name, descriptor) { - let method = target[name]; - descriptor.value = function(...args) { - const path = [this, method, ...args]; - const last = path.pop(); - const map = path.reduce( - (map, key) => getWithFallback(map, key, () => new Map()), - memoized - ); - return getWithFallback(map, last, () => method.apply(this, args)); - }; + let method = target[name]; + descriptor.value = function(...args) { + const path = [this, method, ...args]; + const last = path.pop(); + const map = path.reduce( + (map, key) => getWithFallback(map, key, () => new Map()), + memoized, + ); + return getWithFallback(map, last, () => method.apply(this, args)); + }; } diff --git a/frontend/src/metabase-shared/color_selector.js b/frontend/src/metabase-shared/color_selector.js new file mode 100644 index 000000000000..f995d006af6f --- /dev/null +++ b/frontend/src/metabase-shared/color_selector.js @@ -0,0 +1,45 @@ +// This runs in the Nashorn JavaScript engine and there are some limitations +// +// 1. This is not currently automatically built with the rest of the application, please run `yarn build-shared` after modifying +// 2. Avoid including unecessary libraries as the JS engine takes a long time to parse and execute them +// 3. Related to #2, we aren't currently including `babel-polyfill` so don't use features that require it, e.x. iterables / for-of + +import { makeCellBackgroundGetter } from "metabase/visualizations/lib/table_format"; + +global.console = { + log: print, + warn: print, + error: print, +}; + +global.makeCellBackgroundGetter = function( + rowsJavaList, + colsJSON, + settingsJSON, +) { + const rows = rowsJavaList; + const cols = JSON.parse(colsJSON); + const settings = JSON.parse(settingsJSON); + try { + const getter = makeCellBackgroundGetter(rows, cols, settings); + return (value, rowIndex, colName) => { + const color = getter(value, rowIndex, colName); + if (color) { + return roundColor(color); + } + return null; + }; + } catch (e) { + print("ERROR", e); + return () => null; + } +}; + +// HACK: d3 may return rgb values with decimals but the rendering engine used for pulses doesn't support that +function roundColor(color) { + return color.replace( + /rgba\((\d+(?:\.\d+)),\s*(\d+(?:\.\d+)),\s*(\d+(?:\.\d+)),\s*(\d+\.\d+)\)/, + (_, r, g, b, a) => + `rgba(${Math.round(r)},${Math.round(g)},${Math.round(b)},${a})`, + ); +} diff --git a/frontend/src/metabase-shared/dependencies/d3.js b/frontend/src/metabase-shared/dependencies/d3.js new file mode 100644 index 000000000000..39d2b264bf9d --- /dev/null +++ b/frontend/src/metabase-shared/dependencies/d3.js @@ -0,0 +1,9 @@ +// minimal set of d3 functions needed for color_selector.js + +import { scaleLinear } from "d3-scale"; + +export default { + scale: { + linear: scaleLinear, + }, +}; diff --git a/frontend/src/metabase/App.jsx b/frontend/src/metabase/App.jsx index 389ac21c4902..32d4d78077e6 100644 --- a/frontend/src/metabase/App.jsx +++ b/frontend/src/metabase/App.jsx @@ -1,43 +1,72 @@ /* @flow weak */ -import React, {Component} from "react"; -import {connect} from "react-redux"; - +import React, { Component } from "react"; +import { connect } from "react-redux"; +import ScrollToTop from "metabase/hoc/ScrollToTop"; import Navbar from "metabase/nav/containers/Navbar.jsx"; import UndoListing from "metabase/containers/UndoListing"; -import NotFound from "metabase/components/NotFound.jsx"; -import Unauthorized from "metabase/components/Unauthorized.jsx"; -import Archived from "metabase/components/Archived.jsx"; +import { + Archived, + NotFound, + GenericError, + Unauthorized, +} from "metabase/containers/ErrorPages"; const mapStateToProps = (state, props) => ({ - errorPage: state.app.errorPage + errorPage: state.app.errorPage, + currentUser: state.currentUser, }); -const getErrorComponent = ({status, data, context}) => { - if (status === 403) { - return - } else if (data && data.error_code === "archived" && context === "dashboard") { - return - } else if (data && data.error_code === "archived" && context === "query-builder") { - return - } else { - return - } -} +const getErrorComponent = ({ status, data, context }) => { + if (status === 403) { + return ; + } else if (status === 404) { + return ; + } else if ( + data && + data.error_code === "archived" && + context === "dashboard" + ) { + return ; + } else if ( + data && + data.error_code === "archived" && + context === "query-builder" + ) { + return ; + } else { + return ; + } +}; @connect(mapStateToProps) export default class App extends Component { - render() { - const { children, location, errorPage } = this.props; - - return ( -
- - { errorPage ? getErrorComponent(errorPage) : children } - -
- ) + state = { + hasError: false, + }; + + componentDidCatch(error, info) { + console.error("Error caught in ", error, info); + this.setState({ hasError: true }); + } + + render() { + const { children, currentUser, location, errorPage } = this.props; + + if (this.state.hasError) { + return
😢
; } + + return ( + +
+ {currentUser && } + {errorPage ? getErrorComponent(errorPage) : children} + +
+
+ ); + } } diff --git a/frontend/src/metabase/admin/admin.js b/frontend/src/metabase/admin/admin.js index 3fafa57f7edc..58e29ac79938 100644 --- a/frontend/src/metabase/admin/admin.js +++ b/frontend/src/metabase/admin/admin.js @@ -11,9 +11,9 @@ import settings from "metabase/admin/settings/settings"; import { combineReducers } from "metabase/lib/redux"; export default combineReducers({ - databases, - datamodel, - people, - permissions, - settings -}) + databases, + datamodel, + people, + permissions, + settings, +}); diff --git a/frontend/src/metabase/admin/databases/components/CreatedDatabaseModal.jsx b/frontend/src/metabase/admin/databases/components/CreatedDatabaseModal.jsx index 7db4fd4a3abf..7ac6ee83bcd0 100644 --- a/frontend/src/metabase/admin/databases/components/CreatedDatabaseModal.jsx +++ b/frontend/src/metabase/admin/databases/components/CreatedDatabaseModal.jsx @@ -1,39 +1,38 @@ +/* @flow */ import React, { Component } from "react"; -import PropTypes from "prop-types"; import { Link } from "react-router"; +import { t } from "c-3po"; import ModalContent from "metabase/components/ModalContent.jsx"; -import * as Urls from "metabase/lib/urls"; - +type Props = { + databaseId: number, + onClose: () => void, + onDone: () => void, +}; export default class CreatedDatabaseModal extends Component { - static propTypes = { - databaseId: PropTypes.number.isRequired, - onClose: PropTypes.func.isRequired, - onDone: PropTypes.func.isRequired - }; + props: Props; - render() { - const { onClose, onDone, databaseId } = this.props; - return ( - -
-

- We're analyzing its schema now to make some educated guesses about its - metadata. View this - database in the Data Model section to see what we've found and to - make edits, or ask a question about - this database. -

-
+ render() { + const { onClose, onDone, databaseId } = this.props; + return ( + +
+

+ {t`We took a look at your data, and we have some automated explorations that we can show you!`} +

+
-
- -
-
- ); - } +
+ {t`I'm good thanks`} + + {t`Explore this data`} + +
+
+ ); + } } diff --git a/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx b/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx index e49cb834bac9..c05c94b2f865 100644 --- a/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx +++ b/frontend/src/metabase/admin/databases/components/DatabaseEditForms.jsx @@ -2,55 +2,81 @@ import React, { Component } from "react"; import PropTypes from "prop-types"; import cx from "classnames"; import DatabaseDetailsForm from "metabase/components/DatabaseDetailsForm.jsx"; +import { t } from "c-3po"; export default class DatabaseEditForms extends Component { - static propTypes = { - database: PropTypes.object, - details: PropTypes.object, - engines: PropTypes.object.isRequired, - hiddenFields: PropTypes.object, - selectEngine: PropTypes.func.isRequired, - save: PropTypes.func.isRequired, - formState: PropTypes.object - }; + static propTypes = { + database: PropTypes.object, + details: PropTypes.object, + engines: PropTypes.object.isRequired, + hiddenFields: PropTypes.object, + selectEngine: PropTypes.func.isRequired, + save: PropTypes.func.isRequired, + formState: PropTypes.object, + }; - render() { - let { database, details, hiddenFields, engines, formState: { formError, formSuccess, isSubmitting } } = this.props; + render() { + let { + database, + details, + hiddenFields, + engines, + formState: { formError, formSuccess, isSubmitting }, + } = this.props; - let errors = {}; - return ( -
-
- - -
- { database.engine ? - this.props.save({ - ...database, - id: this.props.database.id - }, database.details)} - isNewDatabase={!database.id} - submitButtonText={'Save'} - submitting={isSubmitting}> - - : null - } -
- ); - } + let errors = {}; + return ( +
+
+ + +
+ {database.engine ? ( + + this.props.save( + { + ...database, + id: this.props.database.id, + }, + database.details, + ) + } + isNewDatabase={!database.id} + submitButtonText={t`Save`} + submitting={isSubmitting} + /> + ) : null} +
+ ); + } } diff --git a/frontend/src/metabase/admin/databases/components/DatabaseSchedulingForm.jsx b/frontend/src/metabase/admin/databases/components/DatabaseSchedulingForm.jsx index 405fdb9a0460..48e973828284 100644 --- a/frontend/src/metabase/admin/databases/components/DatabaseSchedulingForm.jsx +++ b/frontend/src/metabase/admin/databases/components/DatabaseSchedulingForm.jsx @@ -2,187 +2,230 @@ import React, { Component } from "react"; import cx from "classnames"; import _ from "underscore"; import { assocIn } from "icepick"; +import { t } from "c-3po"; import FormMessage from "metabase/components/form/FormMessage"; - import SchedulePicker from "metabase/components/SchedulePicker"; -import MetabaseAnalytics from "metabase/lib/analytics"; import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper"; -export const SyncOption = ({ selected, name, children, select }) => -
select(name.toLowerCase()) }> +import MetabaseAnalytics from "metabase/lib/analytics"; +import colors from "metabase/lib/colors"; + +export const SyncOption = ({ selected, name, children, select }) => ( +
select(name.toLowerCase())} + > +
+ {selected && (
- { selected && -
- } -
-
-
-

{name}

-
- { selected && children &&
{children}
} -
+ className="circle" + style={{ + width: 8, + height: 8, + backgroundColor: selected ? colors["brand"] : colors["text-light"], + }} + /> + )}
- +
+
+

{name}

+
+ {selected && children &&
{children}
} +
+
+); export default class DatabaseSchedulingForm extends Component { - constructor(props) { - super(); - - this.state = { - unsavedDatabase: props.database - } - } - - updateSchemaSyncSchedule = (newSchedule, changedProp) => { - MetabaseAnalytics.trackEvent( - "DatabaseSyncEdit", - "SchemaSyncSchedule:" + changedProp.name, - changedProp.value - ); - - this.setState(assocIn(this.state, ["unsavedDatabase", "schedules", "metadata_sync"], newSchedule)); - } - - updateFieldScanSchedule = (newSchedule, changedProp) => { - MetabaseAnalytics.trackEvent( - "DatabaseSyncEdit", - "FieldScanSchedule:" + changedProp.name, - changedProp.value - ); - - this.setState(assocIn(this.state, ["unsavedDatabase", "schedules", "cache_field_values"], newSchedule)); - } - - setIsFullSyncIsOnDemand = (isFullSync, isOnDemand) => { - // TODO: Add event tracking - let state = assocIn(this.state, ["unsavedDatabase", "is_full_sync"], isFullSync); - state = assocIn(state, ["unsavedDatabase", "is_on_demand"], isOnDemand); - this.setState(state); - } - - onSubmitForm = (event) => { - event.preventDefault(); - - const { unsavedDatabase } = this.state - this.props.save(unsavedDatabase, unsavedDatabase.details); - } - - render() { - const { submitButtonText, formState: { formError, formSuccess, isSubmitting } } = this.props - const { unsavedDatabase } = this.state - - return ( - - { () => -
- -
-
-

- To do some of its magic, Metabase needs to scan your database. We will also rescan it periodically to keep the metadata up-to-date. You can control when the periodic rescans happen below. -

-
- -
-

Database syncing

-

This is a lightweight process that checks for + constructor(props) { + super(); + + this.state = { + unsavedDatabase: props.database, + }; + } + + updateSchemaSyncSchedule = (newSchedule, changedProp) => { + MetabaseAnalytics.trackEvent( + "DatabaseSyncEdit", + "SchemaSyncSchedule:" + changedProp.name, + changedProp.value, + ); + + this.setState( + assocIn( + this.state, + ["unsavedDatabase", "schedules", "metadata_sync"], + newSchedule, + ), + ); + }; + + updateFieldScanSchedule = (newSchedule, changedProp) => { + MetabaseAnalytics.trackEvent( + "DatabaseSyncEdit", + "FieldScanSchedule:" + changedProp.name, + changedProp.value, + ); + + this.setState( + assocIn( + this.state, + ["unsavedDatabase", "schedules", "cache_field_values"], + newSchedule, + ), + ); + }; + + setIsFullSyncIsOnDemand = (isFullSync, isOnDemand) => { + // TODO: Add event tracking + let state = assocIn( + this.state, + ["unsavedDatabase", "is_full_sync"], + isFullSync, + ); + state = assocIn(state, ["unsavedDatabase", "is_on_demand"], isOnDemand); + this.setState(state); + }; + + onSubmitForm = event => { + event.preventDefault(); + + const { unsavedDatabase } = this.state; + this.props.save(unsavedDatabase, unsavedDatabase.details); + }; + + render() { + const { + submitButtonText, + formState: { formError, formSuccess, isSubmitting }, + } = this.props; + const { unsavedDatabase } = this.state; + + return ( + + {() => ( + +

+
+

+ {t`To do some of its magic, Metabase needs to scan your database. We will also rescan it periodically to keep the metadata up-to-date. You can control when the periodic rescans happen below.`} +

+
+ +
+

{t`Database syncing`}

+

{t`This is a lightweight process that checks for updates to this database’s schema. In most cases, you should be fine leaving this - set to sync hourly.

- -
- -
-

Scanning for Filter Values

-

Metabase can scan the values present in each + set to sync hourly.`}

+ +
+ +
+

{t`Scanning for Filter Values`}

+

{t`Metabase can scan the values present in each field in this database to enable checkbox filters in dashboards and questions. This can be a somewhat resource-intensive process, particularly if you have a very large - database.

- -

When should Metabase automatically scan and cache field values?

-
    -
  1. - this.setIsFullSyncIsOnDemand(true, false)} - > - -
    - -
    -
    -
  2. -
  3. - this.setIsFullSyncIsOnDemand(false, true)} - > -

    - When a user adds a new filter to a dashboard or a SQL question, Metabase will - scan the field(s) mapped to that filter in order to show the list of selectable values. -

    -
    -
  4. -
  5. - this.setIsFullSyncIsOnDemand(false, false)} - /> -
  6. -
-
- -
-
- - -
- - } - - ) - } + database.`}

+ +

{t`When should Metabase automatically scan and cache field values?`}

+
    +
  1. + this.setIsFullSyncIsOnDemand(true, false)} + > +
    + +
    +
    +
  2. +
  3. + this.setIsFullSyncIsOnDemand(false, true)} + > +

    + {t`When a user adds a new filter to a dashboard or a SQL question, Metabase will + scan the field(s) mapped to that filter in order to show the list of selectable values.`} +

    +
    +
  4. +
  5. + this.setIsFullSyncIsOnDemand(false, false)} + /> +
  6. +
+
+
+
+ + +
+ + )} +
+ ); + } } diff --git a/frontend/src/metabase/admin/databases/components/DeleteDatabaseModal.jsx b/frontend/src/metabase/admin/databases/components/DeleteDatabaseModal.jsx index 60c6e7a4c43d..ef008e5455fa 100644 --- a/frontend/src/metabase/admin/databases/components/DeleteDatabaseModal.jsx +++ b/frontend/src/metabase/admin/databases/components/DeleteDatabaseModal.jsx @@ -1,80 +1,94 @@ import React, { Component } from "react"; import PropTypes from "prop-types"; +import { t, jt } from "c-3po"; +import Button from "metabase/components/Button"; import ModalContent from "metabase/components/ModalContent.jsx"; -import cx from "classnames"; - export default class DeleteDatabaseModal extends Component { - constructor(props, context) { - super(props, context); - this.state = { - confirmValue: "", - error: null - }; - } - - static propTypes = { - database: PropTypes.object.isRequired, - onClose: PropTypes.func, - onDelete: PropTypes.func + constructor(props, context) { + super(props, context); + this.state = { + confirmValue: "", + error: null, }; + } - async deleteDatabase() { - try { - this.props.onDelete(this.props.database); - // immediately call on close because database deletion should be non blocking - this.props.onClose() - } catch (error) { - this.setState({ error }); - } + static propTypes = { + database: PropTypes.object.isRequired, + onClose: PropTypes.func, + onDelete: PropTypes.func, + }; + + async deleteDatabase() { + try { + this.props.onDelete(this.props.database); + // immediately call on close because database deletion should be non blocking + this.props.onClose(); + } catch (error) { + this.setState({ error }); } + } - render() { - const { database } = this.props; + render() { + const { database } = this.props; + const { confirmValue } = this.state; - var formError; - if (this.state.error) { - var errorMessage = "Server error encountered"; - if (this.state.error.data && - this.state.error.data.message) { - errorMessage = this.state.error.data.message; - } else { - errorMessage = this.state.error.message; - } + let formError; + if (this.state.error) { + let errorMessage = t`Server error encountered`; + if (this.state.error.data && this.state.error.data.message) { + errorMessage = this.state.error.data.message; + } else { + errorMessage = this.state.error.message; + } - // TODO: timeout display? - formError = ( - {errorMessage} - ); - } + // TODO: timeout display? + formError = {errorMessage}; + } - let confirmed = this.state.confirmValue.toUpperCase() === "DELETE"; + // allow English or localized + let confirmed = + confirmValue.toUpperCase() === "DELETE" || + confirmValue.toUpperCase() === t`DELETE`; - return ( - -
- { database.is_sample && -

Just a heads up: without the Sample Dataset, the Query Builder tutorial won't work. You can always restore the Sample Dataset, but any questions you've saved using this data will be lost.

- } -

- All saved questions, metrics, and segments that rely on this database will be lost. This cannot be undone. -

-

- If you're sure, please type DELETE in this box: -

- this.setState({ confirmValue: e.target.value })} autoFocus /> -
+ const headsUp = {t`Just a heads up:`}; + return ( + +
+ {database.is_sample && ( +

{jt`${headsUp} without the Sample Dataset, the Query Builder tutorial won't work. You can always restore the Sample Dataset, but any questions you've saved using this data will be lost.`}

+ )} +

+ {t`All saved questions, metrics, and segments that rely on this database will be lost.`}{" "} + {t`This cannot be undone.`} +

+

+ {t`If you're sure, please type`} {t`DELETE`}{" "} + {t`in this box:`} +

+ this.setState({ confirmValue: e.target.value })} + autoFocus + /> +
-
- - - {formError} -
-
- ); - } +
+ + + {formError} +
+
+ ); + } } diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx index ee4d48b20e68..61557e48bf82 100644 --- a/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx +++ b/frontend/src/metabase/admin/databases/containers/DatabaseEditApp.jsx @@ -1,252 +1,259 @@ +/* @flow weak */ + import React, { Component } from "react"; import PropTypes from "prop-types"; import { connect } from "react-redux"; import title from "metabase/hoc/Title"; -import cx from "classnames"; +import { t } from "c-3po"; import MetabaseSettings from "metabase/lib/settings"; import DeleteDatabaseModal from "../components/DeleteDatabaseModal.jsx"; import DatabaseEditForms from "../components/DatabaseEditForms.jsx"; import DatabaseSchedulingForm from "../components/DatabaseSchedulingForm"; - import ActionButton from "metabase/components/ActionButton.jsx"; -import Breadcrumbs from "metabase/components/Breadcrumbs.jsx" +import Breadcrumbs from "metabase/components/Breadcrumbs.jsx"; +import Radio from "metabase/components/Radio.jsx"; import ModalWithTrigger from "metabase/components/ModalWithTrigger.jsx"; import { - getEditingDatabase, - getFormState, - getDatabaseCreationStep + getEditingDatabase, + getFormState, + getDatabaseCreationStep, } from "../selectors"; import { - reset, - initializeDatabase, - proceedWithDbCreation, - saveDatabase, - syncDatabaseSchema, - rescanDatabaseFields, - discardSavedFieldValues, - deleteDatabase, - selectEngine + reset, + initializeDatabase, + proceedWithDbCreation, + saveDatabase, + syncDatabaseSchema, + rescanDatabaseFields, + discardSavedFieldValues, + deleteDatabase, + selectEngine, } from "../database"; import ConfirmContent from "metabase/components/ConfirmContent"; import LoadingAndErrorWrapper from "metabase/components/LoadingAndErrorWrapper"; const mapStateToProps = (state, props) => ({ - database: getEditingDatabase(state), - databaseCreationStep: getDatabaseCreationStep(state), - formState: getFormState(state) + database: getEditingDatabase(state), + databaseCreationStep: getDatabaseCreationStep(state), + formState: getFormState(state), }); -export const Tab = ({ name, setTab, currentTab }) => { - const isCurrentTab = currentTab === name.toLowerCase() - - return ( -
setTab(name)}> -

{name}

-
- ) -} - -export const Tabs = ({ tabs, currentTab, setTab }) => -
-
    - {tabs.map((tab, index) => -
  1. - -
  2. - )} -
-
- const mapDispatchToProps = { - reset, - initializeDatabase, - proceedWithDbCreation, - saveDatabase, - syncDatabaseSchema, - rescanDatabaseFields, - discardSavedFieldValues, - deleteDatabase, - selectEngine + reset, + initializeDatabase, + proceedWithDbCreation, + saveDatabase, + syncDatabaseSchema, + rescanDatabaseFields, + discardSavedFieldValues, + deleteDatabase, + selectEngine, }; +type TabName = "connection" | "scheduling"; +type TabOption = { name: string, value: TabName }; + +const TABS: TabOption[] = [ + { name: t`Connection`, value: "connection" }, + { name: t`Scheduling`, value: "scheduling" }, +]; + @connect(mapStateToProps, mapDispatchToProps) @title(({ database }) => database && database.name) export default class DatabaseEditApp extends Component { - constructor(props, context) { - super(props, context); + state: { + currentTab: TabName, + }; - this.state = { - currentTab: 'connection' - }; - } + constructor(props, context) { + super(props, context); - static propTypes = { - database: PropTypes.object, - databaseCreationStep: PropTypes.string, - formState: PropTypes.object.isRequired, - params: PropTypes.object.isRequired, - reset: PropTypes.func.isRequired, - initializeDatabase: PropTypes.func.isRequired, - syncDatabaseSchema: PropTypes.func.isRequired, - rescanDatabaseFields: PropTypes.func.isRequired, - discardSavedFieldValues: PropTypes.func.isRequired, - proceedWithDbCreation: PropTypes.func.isRequired, - deleteDatabase: PropTypes.func.isRequired, - saveDatabase: PropTypes.func.isRequired, - selectEngine: PropTypes.func.isRequired, - location: PropTypes.object + this.state = { + currentTab: TABS[0].value, }; + } - async componentWillMount() { - await this.props.reset(); - await this.props.initializeDatabase(this.props.params.databaseId); - } + static propTypes = { + database: PropTypes.object, + databaseCreationStep: PropTypes.string, + formState: PropTypes.object.isRequired, + params: PropTypes.object.isRequired, + reset: PropTypes.func.isRequired, + initializeDatabase: PropTypes.func.isRequired, + syncDatabaseSchema: PropTypes.func.isRequired, + rescanDatabaseFields: PropTypes.func.isRequired, + discardSavedFieldValues: PropTypes.func.isRequired, + proceedWithDbCreation: PropTypes.func.isRequired, + deleteDatabase: PropTypes.func.isRequired, + saveDatabase: PropTypes.func.isRequired, + selectEngine: PropTypes.func.isRequired, + location: PropTypes.object, + }; - componentWillReceiveProps(nextProps) { - const addingNewDatabase = !nextProps.database || !nextProps.database.id + async componentWillMount() { + await this.props.reset(); + await this.props.initializeDatabase(this.props.params.databaseId); + } - if (addingNewDatabase) { - // Update the current creation step (= active tab) if adding a new database - this.setState({ currentTab: nextProps.databaseCreationStep }); - } + componentWillReceiveProps(nextProps) { + const addingNewDatabase = !nextProps.database || !nextProps.database.id; + + if (addingNewDatabase) { + // Update the current creation step (= active tab) if adding a new database + this.setState({ currentTab: nextProps.databaseCreationStep }); } + } + + render() { + let { database, formState } = this.props; + const { currentTab } = this.state; + + const editingExistingDatabase = database && database.id != null; + const addingNewDatabase = !editingExistingDatabase; + + const letUserControlScheduling = + database && + database.details && + database.details["let-user-control-scheduling"]; + const showTabs = editingExistingDatabase && letUserControlScheduling; + + return ( +
+ +
+
+
+ {showTabs && ( +
+ this.setState({ currentTab })} + underlined + /> +
+ )} + + {() => ( +
+ {currentTab === "connection" && ( + + )} + {currentTab === "scheduling" && ( + + )} +
+ )} +
+
+
+ + {/* Sidebar Actions */} + {editingExistingDatabase && ( +
+
+
+ +
    +
  1. + + this.props.syncDatabaseSchema(database.id) + } + className="Button Button--syncDbSchema" + normalText={t`Sync database schema now`} + activeText={t`Starting…`} + failedText={t`Failed to sync`} + successText={t`Sync triggered!`} + /> +
  2. +
  3. + + this.props.rescanDatabaseFields(database.id) + } + className="Button Button--rescanFieldValues" + normalText={t`Re-scan field values now`} + activeText={t`Starting…`} + failedText={t`Failed to start scan`} + successText={t`Scan triggered!`} + /> +
  4. +
+
- render() { - let { database, formState } = this.props; - const { currentTab } = this.state; - - const editingExistingDatabase = database && database.id != null - const addingNewDatabase = !editingExistingDatabase - - const letUserControlScheduling = database && database.details && database.details["let-user-control-scheduling"] - const showTabs = editingExistingDatabase && letUserControlScheduling - - return ( -
- -
-
-
- { showTabs && - this.setState({currentTab: tab.toLowerCase()})} - /> - } - - { () => -
- { currentTab === 'connection' && - - } - { currentTab === 'scheduling' && - - } -
- } -
-
-
- - { /* Sidebar Actions */ } - { editingExistingDatabase && -
-
-
- -
    -
  1. - this.props.syncDatabaseSchema(database.id)} - className="Button Button--syncDbSchema" - normalText="Sync database schema now" - activeText="Starting…" - failedText="Failed to sync" - successText="Sync triggered!" - /> -
  2. -
  3. - this.props.rescanDatabaseFields(database.id)} - className="Button Button--rescanFieldValues" - normalText="Re-scan field values now" - activeText="Starting…" - failedText="Failed to start scan" - successText="Scan triggered!" - /> -
  4. -
-
- -
- -
    -
  1. - - this.refs.discardSavedFieldValuesModal.toggle()} - onAction={() => this.props.discardSavedFieldValues(database.id)} - /> - -
  2. - -
  3. - - this.refs.deleteDatabaseModal.toggle()} - onDelete={() => this.props.deleteDatabase(database.id, true)} - /> - -
  4. -
-
-
-
- } -
+
+ +
    +
  1. + + + this.refs.discardSavedFieldValuesModal.toggle() + } + onAction={() => + this.props.discardSavedFieldValues(database.id) + } + /> + +
  2. + +
  3. + + this.refs.deleteDatabaseModal.toggle()} + onDelete={() => + this.props.deleteDatabase(database.id, true) + } + /> + +
  4. +
+
+
- ); - } + )} +
+
+ ); + } } diff --git a/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx b/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx index 2fd3a3d75fff..9f52b35ddeb8 100644 --- a/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx +++ b/frontend/src/metabase/admin/databases/containers/DatabaseListApp.jsx @@ -1,150 +1,178 @@ +/* @flow weak */ + import React, { Component } from "react"; import PropTypes from "prop-types"; import { connect } from "react-redux"; import { Link } from "react-router"; +import { t } from "c-3po"; import cx from "classnames"; import MetabaseSettings from "metabase/lib/settings"; + import ModalWithTrigger from "metabase/components/ModalWithTrigger.jsx"; import LoadingSpinner from "metabase/components/LoadingSpinner.jsx"; +import FormMessage from "metabase/components/form/FormMessage"; import CreatedDatabaseModal from "../components/CreatedDatabaseModal.jsx"; import DeleteDatabaseModal from "../components/DeleteDatabaseModal.jsx"; -import { - getDatabasesSorted, - hasSampleDataset, - getDeletes, - getDeletionError -} from "../selectors"; -import * as databaseActions from "../database"; -import FormMessage from "metabase/components/form/FormMessage"; +import Databases from "metabase/entities/databases"; +import { entityListLoader } from "metabase/entities/containers/EntityListLoader"; -const mapStateToProps = (state, props) => { - return { - created: props.location.query.created, - databases: getDatabasesSorted(state), - hasSampleDataset: hasSampleDataset(state), - engines: MetabaseSettings.get('engines'), - deletes: getDeletes(state), - deletionError: getDeletionError(state) - } -} +import { getDeletes, getDeletionError } from "../selectors"; +import { deleteDatabase, addSampleDataset } from "../database"; + +const mapStateToProps = (state, props) => ({ + hasSampleDataset: Databases.selectors.getHasSampleDataset(state), + + created: props.location.query.created, + engines: MetabaseSettings.get("engines"), + + deletes: getDeletes(state), + deletionError: getDeletionError(state), +}); const mapDispatchToProps = { - ...databaseActions -} + fetchDatabases: Databases.actions.fetchList, + // NOTE: still uses deleteDatabase from metabaseadmin/databases/databases.js + // rather than metabase/entities/databases since it updates deletes/deletionError + deleteDatabase: deleteDatabase, + addSampleDataset: addSampleDataset, +}; +@entityListLoader({ entityType: "databases" }) @connect(mapStateToProps, mapDispatchToProps) export default class DatabaseList extends Component { - static propTypes = { - databases: PropTypes.array, - hasSampleDataset: PropTypes.bool, - engines: PropTypes.object, - deletes: PropTypes.array, - deletionError: PropTypes.object - }; + static propTypes = { + databases: PropTypes.array, + hasSampleDataset: PropTypes.bool, + engines: PropTypes.object, + deletes: PropTypes.array, + deletionError: PropTypes.object, + }; - componentWillMount() { - this.props.fetchDatabases(); + componentWillReceiveProps(newProps) { + if (!this.props.created && newProps.created) { + this.refs.createdDatabaseModal.open(); } + } - componentWillReceiveProps(newProps) { - if (!this.props.created && newProps.created) { - this.refs.createdDatabaseModal.open() - } - } - - render() { - let { databases, hasSampleDataset, created, engines, deletionError } = this.props; + render() { + let { + databases, + hasSampleDataset, + created, + engines, + deletionError, + } = this.props; - return ( -
-
- Add database -

Databases

-
- { deletionError && -
- -
- } -
- - - - - - - - - - { databases ? - [ databases.map(database => { - const isDeleting = this.props.deletes.indexOf(database.id) !== -1 - return ( - - - - { isDeleting - ? () - : ( - - ) - } - - )}), - ] - : - - - - } - -
NameEngine
- - {database.name} - - - {engines && engines[database.engine] ? engines[database.engine]['driver-name'] : database.engine} - Deleting... - - this.refs["deleteDatabaseModal_"+database.id].close()} - onDelete={() => this.props.deleteDatabase(database.id)} - /> - -
- -

Loading ...

-
- { !hasSampleDataset ? - - : null } -
- - this.refs.createdDatabaseModal.toggle() } - onClose={() => this.refs.createdDatabaseModal.toggle() } - /> - + return ( +
+
+ {t`Add database`} +

{t`Databases`}

+
+ {deletionError && ( +
+ +
+ )} +
+ + + + + + + + + {databases ? ( + [ + databases.map(database => { + const isDeleting = + this.props.deletes.indexOf(database.id) !== -1; + return ( + + + + {isDeleting ? ( + + ) : ( + + )} + + ); + }), + ] + ) : ( + + + + )} + +
{t`Name`}{t`Engine`} +
+ + {database.name} + + + {engines && engines[database.engine] + ? engines[database.engine]["driver-name"] + : database.engine} + {t`Deleting...`} + + + this.refs[ + "deleteDatabaseModal_" + database.id + ].close() + } + onDelete={() => + this.props.deleteDatabase(database.id) + } + /> + +
+ +

{t`Loading ...`}

+
+ {!hasSampleDataset ? ( + - ); - } + ) : null} +
+ + this.refs.createdDatabaseModal.toggle()} + onClose={() => this.refs.createdDatabaseModal.toggle()} + /> + +
+ ); + } } diff --git a/frontend/src/metabase/admin/databases/database.js b/frontend/src/metabase/admin/databases/database.js index ff730c2703fe..ccaab74beb62 100644 --- a/frontend/src/metabase/admin/databases/database.js +++ b/frontend/src/metabase/admin/databases/database.js @@ -1,29 +1,34 @@ -import _ from "underscore"; +/* @flow weak */ import { createAction } from "redux-actions"; -import { handleActions, combineReducers, createThunkAction } from "metabase/lib/redux"; +import { + handleActions, + combineReducers, + createThunkAction, +} from "metabase/lib/redux"; import { push } from "react-router-redux"; - +import { t } from "c-3po"; import MetabaseAnalytics from "metabase/lib/analytics"; import MetabaseSettings from "metabase/lib/settings"; import { MetabaseApi } from "metabase/services"; +import Databases from "metabase/entities/databases"; // Default schedules for db sync and deep analysis export const DEFAULT_SCHEDULES = { - "cache_field_values": { - "schedule_day": null, - "schedule_frame": null, - "schedule_hour": 0, - "schedule_type": "daily" - }, - "metadata_sync": { - "schedule_day": null, - "schedule_frame": null, - "schedule_hour": null, - "schedule_type": "hourly" - } -} + cache_field_values: { + schedule_day: null, + schedule_frame: null, + schedule_hour: 0, + schedule_type: "daily", + }, + metadata_sync: { + schedule_day: null, + schedule_frame: null, + schedule_hour: null, + schedule_type: "hourly", + }, +}; export const DB_EDIT_FORM_CONNECTION_TAB = "connection"; export const DB_EDIT_FORM_SCHEDULING_TAB = "scheduling"; @@ -31,310 +36,374 @@ export const DB_EDIT_FORM_SCHEDULING_TAB = "scheduling"; export const RESET = "metabase/admin/databases/RESET"; export const SELECT_ENGINE = "metabase/admin/databases/SELECT_ENGINE"; export const FETCH_DATABASES = "metabase/admin/databases/FETCH_DATABASES"; -export const INITIALIZE_DATABASE = "metabase/admin/databases/INITIALIZE_DATABASE"; +export const INITIALIZE_DATABASE = + "metabase/admin/databases/INITIALIZE_DATABASE"; export const ADD_SAMPLE_DATASET = "metabase/admin/databases/ADD_SAMPLE_DATASET"; export const DELETE_DATABASE = "metabase/admin/databases/DELETE_DATABASE"; -export const SYNC_DATABASE_SCHEMA = "metabase/admin/databases/SYNC_DATABASE_SCHEMA"; -export const RESCAN_DATABASE_FIELDS = "metabase/admin/databases/RESCAN_DATABASE_FIELDS"; -export const DISCARD_SAVED_FIELD_VALUES = "metabase/admin/databases/DISCARD_SAVED_FIELD_VALUES"; -export const UPDATE_DATABASE = 'metabase/admin/databases/UPDATE_DATABASE' -export const UPDATE_DATABASE_STARTED = 'metabase/admin/databases/UPDATE_DATABASE_STARTED' -export const UPDATE_DATABASE_FAILED = 'metabase/admin/databases/UPDATE_DATABASE_FAILED' -export const SET_DATABASE_CREATION_STEP = 'metabase/admin/databases/SET_DATABASE_CREATION_STEP' -export const CREATE_DATABASE = 'metabase/admin/databases/CREATE_DATABASE' -export const CREATE_DATABASE_STARTED = 'metabase/admin/databases/CREATE_DATABASE_STARTED' -export const VALIDATE_DATABASE_STARTED = 'metabase/admin/databases/VALIDATE_DATABASE_STARTED' -export const VALIDATE_DATABASE_FAILED = 'metabase/admin/databases/VALIDATE_DATABASE_FAILED' -export const CREATE_DATABASE_FAILED = 'metabase/admin/databases/CREATE_DATABASE_FAILED' -export const DELETE_DATABASE_STARTED = 'metabase/admin/databases/DELETE_DATABASE_STARTED' -export const DELETE_DATABASE_FAILED = "metabase/admin/databases/DELETE_DATABASE_FAILED"; -export const CLEAR_FORM_STATE = 'metabase/admin/databases/CLEAR_FORM_STATE' -export const MIGRATE_TO_NEW_SCHEDULING_SETTINGS = 'metabase/admin/databases/MIGRATE_TO_NEW_SCHEDULING_SETTINGS' +export const SYNC_DATABASE_SCHEMA = + "metabase/admin/databases/SYNC_DATABASE_SCHEMA"; +export const RESCAN_DATABASE_FIELDS = + "metabase/admin/databases/RESCAN_DATABASE_FIELDS"; +export const DISCARD_SAVED_FIELD_VALUES = + "metabase/admin/databases/DISCARD_SAVED_FIELD_VALUES"; +export const UPDATE_DATABASE = "metabase/admin/databases/UPDATE_DATABASE"; +export const UPDATE_DATABASE_STARTED = + "metabase/admin/databases/UPDATE_DATABASE_STARTED"; +export const UPDATE_DATABASE_FAILED = + "metabase/admin/databases/UPDATE_DATABASE_FAILED"; +export const SET_DATABASE_CREATION_STEP = + "metabase/admin/databases/SET_DATABASE_CREATION_STEP"; +export const CREATE_DATABASE = "metabase/admin/databases/CREATE_DATABASE"; +export const CREATE_DATABASE_STARTED = + "metabase/admin/databases/CREATE_DATABASE_STARTED"; +export const VALIDATE_DATABASE_STARTED = + "metabase/admin/databases/VALIDATE_DATABASE_STARTED"; +export const VALIDATE_DATABASE_FAILED = + "metabase/admin/databases/VALIDATE_DATABASE_FAILED"; +export const CREATE_DATABASE_FAILED = + "metabase/admin/databases/CREATE_DATABASE_FAILED"; +export const DELETE_DATABASE_STARTED = + "metabase/admin/databases/DELETE_DATABASE_STARTED"; +export const DELETE_DATABASE_FAILED = + "metabase/admin/databases/DELETE_DATABASE_FAILED"; +export const CLEAR_FORM_STATE = "metabase/admin/databases/CLEAR_FORM_STATE"; +export const MIGRATE_TO_NEW_SCHEDULING_SETTINGS = + "metabase/admin/databases/MIGRATE_TO_NEW_SCHEDULING_SETTINGS"; + +// NOTE: some but not all of these actions have been migrated to use metabase/entities/databases export const reset = createAction(RESET); // selectEngine (uiControl) export const selectEngine = createAction(SELECT_ENGINE); -// fetchDatabases -export const fetchDatabases = createThunkAction(FETCH_DATABASES, function() { - return async function(dispatch, getState) { - try { - return await MetabaseApi.db_list(); - } catch(error) { - console.error("error fetching databases", error); - } - }; -}); - // Migrates old "Enable in-depth database analysis" option to new "Let me choose when Metabase syncs and scans" option // Migration is run as a separate action because that makes it easy to track in tests -const migrateDatabaseToNewSchedulingSettings = (database) => { - return async function(dispatch, getState) { - if (database.details["let-user-control-scheduling"] == undefined) { - dispatch.action(MIGRATE_TO_NEW_SCHEDULING_SETTINGS, { - ...database, - details: { - ...database.details, - // if user has enabled in-depth analysis already, we will run sync&scan in default schedule anyway - // otherwise let the user control scheduling - "let-user-control-scheduling": !database.is_full_sync - } - }) - } else { - console.log(`${MIGRATE_TO_NEW_SCHEDULING_SETTINGS} is no-op as scheduling settings are already set`) - } +const migrateDatabaseToNewSchedulingSettings = database => { + return async function(dispatch, getState) { + if (database.details["let-user-control-scheduling"] == undefined) { + dispatch.action(MIGRATE_TO_NEW_SCHEDULING_SETTINGS, { + ...database, + details: { + ...database.details, + // if user has enabled in-depth analysis already, we will run sync&scan in default schedule anyway + // otherwise let the user control scheduling + "let-user-control-scheduling": !database.is_full_sync, + }, + }); + } else { + console.log( + `${MIGRATE_TO_NEW_SCHEDULING_SETTINGS} is no-op as scheduling settings are already set`, + ); } -} + }; +}; // initializeDatabase export const initializeDatabase = function(databaseId) { - return async function(dispatch, getState) { - if (databaseId) { - try { - const database = await MetabaseApi.db_get({"dbId": databaseId}); - dispatch.action(INITIALIZE_DATABASE, database) - - // If the new scheduling toggle isn't set, run the migration - if (database.details["let-user-control-scheduling"] == undefined) { - dispatch(migrateDatabaseToNewSchedulingSettings(database)) - } - } catch (error) { - if (error.status == 404) { - //$location.path('/admin/databases/'); - } else { - console.error("error fetching database", databaseId, error); - } - } + return async function(dispatch, getState) { + if (databaseId) { + try { + const action = await dispatch( + Databases.actions.fetch({ id: databaseId }, { reload: true }), + ); + const database = Databases.HACK_getObjectFromAction(action); + dispatch.action(INITIALIZE_DATABASE, database); + + // If the new scheduling toggle isn't set, run the migration + if (database.details["let-user-control-scheduling"] == undefined) { + dispatch(migrateDatabaseToNewSchedulingSettings(database)); + } + } catch (error) { + if (error.status == 404) { + //$location.path('/admin/databases/'); } else { - const newDatabase = { - name: '', - engine: Object.keys(MetabaseSettings.get('engines'))[0], - details: {}, - created: false - } - dispatch.action(INITIALIZE_DATABASE, newDatabase); + console.error("error fetching database", databaseId, error); } + } + } else { + const newDatabase = { + name: "", + engine: Object.keys(MetabaseSettings.get("engines"))[0], + details: {}, + created: false, + }; + dispatch.action(INITIALIZE_DATABASE, newDatabase); } -} - + }; +}; // addSampleDataset -export const addSampleDataset = createThunkAction(ADD_SAMPLE_DATASET, function() { +export const addSampleDataset = createThunkAction( + ADD_SAMPLE_DATASET, + function() { return async function(dispatch, getState) { - try { - let sampleDataset = await MetabaseApi.db_add_sample_dataset(); - MetabaseAnalytics.trackEvent("Databases", "Add Sample Data"); - return sampleDataset; - } catch(error) { - console.error("error adding sample dataset", error); - return error; - } + try { + let sampleDataset = await MetabaseApi.db_add_sample_dataset(); + MetabaseAnalytics.trackEvent("Databases", "Add Sample Data"); + return sampleDataset; + } catch (error) { + console.error("error adding sample dataset", error); + return error; + } }; -}); - -export const proceedWithDbCreation = function (database) { - return async function (dispatch, getState) { - if (database.details["let-user-control-scheduling"]) { - try { - dispatch.action(VALIDATE_DATABASE_STARTED); - const { valid } = await MetabaseApi.db_validate({ details: database }); - - if (valid) { - dispatch.action(SET_DATABASE_CREATION_STEP, { - // NOTE Atte Keinänen: DatabaseSchedulingForm needs `editingDatabase` with `schedules` so I decided that - // it makes sense to set the value of editingDatabase as part of SET_DATABASE_CREATION_STEP - database: { - ...database, - is_full_sync: true, - schedules: DEFAULT_SCHEDULES - }, - step: DB_EDIT_FORM_SCHEDULING_TAB - }); - } else { - dispatch.action(VALIDATE_DATABASE_FAILED, { error: { data: { message: "Couldn't connect to the database. Please check the connection details." } } }); - } - } catch(error) { - dispatch.action(VALIDATE_DATABASE_FAILED, { error }); - } + }, +); + +export const proceedWithDbCreation = function(database) { + return async function(dispatch, getState) { + if (database.details["let-user-control-scheduling"]) { + try { + dispatch.action(VALIDATE_DATABASE_STARTED); + const { valid } = await MetabaseApi.db_validate({ details: database }); + + if (valid) { + dispatch.action(SET_DATABASE_CREATION_STEP, { + // NOTE Atte Keinänen: DatabaseSchedulingForm needs `editingDatabase` with `schedules` so I decided that + // it makes sense to set the value of editingDatabase as part of SET_DATABASE_CREATION_STEP + database: { + ...database, + is_full_sync: true, + schedules: DEFAULT_SCHEDULES, + }, + step: DB_EDIT_FORM_SCHEDULING_TAB, + }); } else { - // Skip the scheduling step if user doesn't need precise control over sync and scan - dispatch(createDatabase(database)); + dispatch.action(VALIDATE_DATABASE_FAILED, { + error: { + data: { + message: t`Couldn't connect to the database. Please check the connection details.`, + }, + }, + }); } + } catch (error) { + dispatch.action(VALIDATE_DATABASE_FAILED, { error }); + } + } else { + // Skip the scheduling step if user doesn't need precise control over sync and scan + dispatch(createDatabase(database)); } -} - -export const createDatabase = function (database) { - return async function (dispatch, getState) { - try { - dispatch.action(CREATE_DATABASE_STARTED, {}) - const createdDatabase = await MetabaseApi.db_create(database); - MetabaseAnalytics.trackEvent("Databases", "Create", database.engine); - - // update the db metadata already here because otherwise there will be a gap between "Adding..." status - // and seeing the db that was just added - await dispatch(fetchDatabases()) - - dispatch.action(CREATE_DATABASE) - dispatch(push('/admin/databases?created=' + createdDatabase.id)); - } catch (error) { - console.error("error creating a database", error); - MetabaseAnalytics.trackEvent("Databases", "Create Failed", database.engine); - dispatch.action(CREATE_DATABASE_FAILED, { error }) - } - }; -} + }; +}; + +export const createDatabase = function(database) { + return async function(dispatch, getState) { + try { + dispatch.action(CREATE_DATABASE_STARTED, {}); + const action = await dispatch(Databases.actions.create(database)); + const createdDatabase = Databases.HACK_getObjectFromAction(action); + MetabaseAnalytics.trackEvent("Databases", "Create", database.engine); + + dispatch.action(CREATE_DATABASE); + dispatch(push("/admin/databases?created=" + createdDatabase.id)); + } catch (error) { + console.error("error creating a database", error); + MetabaseAnalytics.trackEvent( + "Databases", + "Create Failed", + database.engine, + ); + dispatch.action(CREATE_DATABASE_FAILED, { error }); + } + }; +}; export const updateDatabase = function(database) { - return async function(dispatch, getState) { - try { - dispatch.action(UPDATE_DATABASE_STARTED, { database }) - const savedDatabase = await MetabaseApi.db_update(database); - MetabaseAnalytics.trackEvent("Databases", "Update", database.engine); - - dispatch.action(UPDATE_DATABASE, { database: savedDatabase }) - setTimeout(() => dispatch.action(CLEAR_FORM_STATE), 3000); - } catch (error) { - MetabaseAnalytics.trackEvent("Databases", "Update Failed", database.engine); - dispatch.action(UPDATE_DATABASE_FAILED, { error }); - } - }; + return async function(dispatch, getState) { + try { + dispatch.action(UPDATE_DATABASE_STARTED, { database }); + const action = await dispatch(Databases.actions.update(database)); + const savedDatabase = Databases.HACK_getObjectFromAction(action); + MetabaseAnalytics.trackEvent("Databases", "Update", database.engine); + + dispatch.action(UPDATE_DATABASE, { database: savedDatabase }); + setTimeout(() => dispatch.action(CLEAR_FORM_STATE), 3000); + } catch (error) { + MetabaseAnalytics.trackEvent( + "Databases", + "Update Failed", + database.engine, + ); + dispatch.action(UPDATE_DATABASE_FAILED, { error }); + } + }; }; // NOTE Atte Keinänen 7/26/17: Original monolithic saveDatabase was broken out to smaller actions // but `saveDatabase` action creator is still left here for keeping the interface for React components unchanged export const saveDatabase = function(database, details) { - // If we don't let user control the scheduling settings, let's override them with Metabase defaults - // TODO Atte Keinänen 8/15/17: Implement engine-specific scheduling defaults - const letUserControlScheduling = details["let-user-control-scheduling"]; - const overridesIfNoUserControl = letUserControlScheduling ? {} : { + // If we don't let user control the scheduling settings, let's override them with Metabase defaults + // TODO Atte Keinänen 8/15/17: Implement engine-specific scheduling defaults + const letUserControlScheduling = details["let-user-control-scheduling"]; + const overridesIfNoUserControl = letUserControlScheduling + ? {} + : { is_full_sync: true, - schedules: DEFAULT_SCHEDULES - } - - return async function(dispatch, getState) { - const databaseWithDetails = { - ...database, - details, - ...overridesIfNoUserControl - }; - const isUnsavedDatabase = !databaseWithDetails.id - if (isUnsavedDatabase) { - dispatch(createDatabase(databaseWithDetails)) - } else { - dispatch(updateDatabase(databaseWithDetails)) - } + schedules: DEFAULT_SCHEDULES, + }; + + return async function(dispatch, getState) { + const databaseWithDetails = { + ...database, + details, + ...overridesIfNoUserControl, }; + const isUnsavedDatabase = !databaseWithDetails.id; + if (isUnsavedDatabase) { + dispatch(createDatabase(databaseWithDetails)); + } else { + dispatch(updateDatabase(databaseWithDetails)); + } + }; }; export const deleteDatabase = function(databaseId, isDetailView = true) { - return async function(dispatch, getState) { - try { - dispatch.action(DELETE_DATABASE_STARTED, { databaseId }) - dispatch(push('/admin/databases/')); - await MetabaseApi.db_delete({"dbId": databaseId}); - MetabaseAnalytics.trackEvent("Databases", "Delete", isDetailView ? "Using Detail" : "Using List"); - dispatch.action(DELETE_DATABASE, { databaseId }) - } catch(error) { - console.log('error deleting database', error); - dispatch.action(DELETE_DATABASE_FAILED, { databaseId, error }) - } - }; -} + return async function(dispatch, getState) { + try { + dispatch.action(DELETE_DATABASE_STARTED, { databaseId }); + dispatch(push("/admin/databases/")); + await dispatch(Databases.actions.delete({ id: databaseId })); + MetabaseAnalytics.trackEvent( + "Databases", + "Delete", + isDetailView ? "Using Detail" : "Using List", + ); + dispatch.action(DELETE_DATABASE, { databaseId }); + } catch (error) { + console.log("error deleting database", error); + dispatch.action(DELETE_DATABASE_FAILED, { databaseId, error }); + } + }; +}; // syncDatabaseSchema -export const syncDatabaseSchema = createThunkAction(SYNC_DATABASE_SCHEMA, function(databaseId) { +export const syncDatabaseSchema = createThunkAction( + SYNC_DATABASE_SCHEMA, + function(databaseId) { return async function(dispatch, getState) { - try { - let call = await MetabaseApi.db_sync_schema({"dbId": databaseId}); - MetabaseAnalytics.trackEvent("Databases", "Manual Sync"); - return call; - } catch(error) { - console.log('error syncing database', error); - } + try { + let call = await MetabaseApi.db_sync_schema({ dbId: databaseId }); + MetabaseAnalytics.trackEvent("Databases", "Manual Sync"); + return call; + } catch (error) { + console.log("error syncing database", error); + } }; -}); + }, +); // rescanDatabaseFields -export const rescanDatabaseFields = createThunkAction(RESCAN_DATABASE_FIELDS, function(databaseId) { +export const rescanDatabaseFields = createThunkAction( + RESCAN_DATABASE_FIELDS, + function(databaseId) { return async function(dispatch, getState) { - try { - let call = await MetabaseApi.db_rescan_values({"dbId": databaseId}); - MetabaseAnalytics.trackEvent("Databases", "Manual Sync"); - return call; - } catch(error) { - console.log('error syncing database', error); - } + try { + let call = await MetabaseApi.db_rescan_values({ dbId: databaseId }); + MetabaseAnalytics.trackEvent("Databases", "Manual Sync"); + return call; + } catch (error) { + console.log("error syncing database", error); + } }; -}); + }, +); // discardSavedFieldValues -export const discardSavedFieldValues = createThunkAction(DISCARD_SAVED_FIELD_VALUES, function(databaseId) { +export const discardSavedFieldValues = createThunkAction( + DISCARD_SAVED_FIELD_VALUES, + function(databaseId) { return async function(dispatch, getState) { - try { - let call = await MetabaseApi.db_discard_values({"dbId": databaseId}); - MetabaseAnalytics.trackEvent("Databases", "Manual Sync"); - return call; - } catch(error) { - console.log('error syncing database', error); - } + try { + let call = await MetabaseApi.db_discard_values({ dbId: databaseId }); + MetabaseAnalytics.trackEvent("Databases", "Manual Sync"); + return call; + } catch (error) { + console.log("error syncing database", error); + } }; -}); + }, +); // reducers -const databases = handleActions({ - [FETCH_DATABASES]: { next: (state, { payload }) => payload }, - [ADD_SAMPLE_DATASET]: { next: (state, { payload }) => payload ? [...state, payload] : state }, - [DELETE_DATABASE]: (state, { payload: { databaseId} }) => - databaseId ? _.reject(state, (d) => d.id === databaseId) : state -}, null); - -const editingDatabase = handleActions({ +const editingDatabase = handleActions( + { [RESET]: () => null, [INITIALIZE_DATABASE]: (state, { payload }) => payload, [MIGRATE_TO_NEW_SCHEDULING_SETTINGS]: (state, { payload }) => payload, [UPDATE_DATABASE]: (state, { payload }) => payload.database || state, [DELETE_DATABASE]: (state, { payload }) => null, - [SELECT_ENGINE]: (state, { payload }) => ({...state, engine: payload }), - [SET_DATABASE_CREATION_STEP]: (state, { payload: { database } }) => database -}, null); - -const deletes = handleActions({ - [DELETE_DATABASE_STARTED]: (state, { payload: { databaseId } }) => state.concat([databaseId]), - [DELETE_DATABASE_FAILED]: (state, { payload: { databaseId, error } }) => state.filter((dbId) => dbId !== databaseId), - [DELETE_DATABASE]: (state, { payload: { databaseId } }) => state.filter((dbId) => dbId !== databaseId) -}, []); - -const deletionError = handleActions({ + [SELECT_ENGINE]: (state, { payload }) => ({ ...state, engine: payload }), + [SET_DATABASE_CREATION_STEP]: (state, { payload: { database } }) => + database, + }, + null, +); + +const deletes = handleActions( + { + [DELETE_DATABASE_STARTED]: (state, { payload: { databaseId } }) => + state.concat([databaseId]), + [DELETE_DATABASE_FAILED]: (state, { payload: { databaseId, error } }) => + state.filter(dbId => dbId !== databaseId), + [DELETE_DATABASE]: (state, { payload: { databaseId } }) => + state.filter(dbId => dbId !== databaseId), + }, + [], +); + +const deletionError = handleActions( + { [DELETE_DATABASE_FAILED]: (state, { payload: { error } }) => error, -}, null) + }, + null, +); -const databaseCreationStep = handleActions({ +const databaseCreationStep = handleActions( + { [RESET]: () => DB_EDIT_FORM_CONNECTION_TAB, - [SET_DATABASE_CREATION_STEP] : (state, { payload: { step } }) => step -}, DB_EDIT_FORM_CONNECTION_TAB) - -const DEFAULT_FORM_STATE = { formSuccess: null, formError: null, isSubmitting: false }; + [SET_DATABASE_CREATION_STEP]: (state, { payload: { step } }) => step, + }, + DB_EDIT_FORM_CONNECTION_TAB, +); + +const DEFAULT_FORM_STATE = { + formSuccess: null, + formError: null, + isSubmitting: false, +}; -const formState = handleActions({ +const formState = handleActions( + { [RESET]: { next: () => DEFAULT_FORM_STATE }, [CREATE_DATABASE_STARTED]: () => ({ isSubmitting: true }), // not necessarily needed as the page is immediately redirected after db creation - [CREATE_DATABASE]: () => ({ formSuccess: { data: { message: "Successfully created!" } } }), - [VALIDATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ formError: error }), - [CREATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ formError: error }), + [CREATE_DATABASE]: () => ({ + formSuccess: { data: { message: t`Successfully created!` } }, + }), + [VALIDATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ + formError: error, + }), + [CREATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ + formError: error, + }), [UPDATE_DATABASE_STARTED]: () => ({ isSubmitting: true }), - [UPDATE_DATABASE]: () => ({ formSuccess: { data: { message: "Successfully saved!" } } }), - [UPDATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ formError: error }), - [CLEAR_FORM_STATE]: () => DEFAULT_FORM_STATE -}, DEFAULT_FORM_STATE); + [UPDATE_DATABASE]: () => ({ + formSuccess: { data: { message: t`Successfully saved!` } }, + }), + [UPDATE_DATABASE_FAILED]: (state, { payload: { error } }) => ({ + formError: error, + }), + [CLEAR_FORM_STATE]: () => DEFAULT_FORM_STATE, + }, + DEFAULT_FORM_STATE, +); export default combineReducers({ - databases, - editingDatabase, - deletionError, - databaseCreationStep, - formState, - deletes + editingDatabase, + deletionError, + databaseCreationStep, + formState, + deletes, }); diff --git a/frontend/src/metabase/admin/databases/selectors.js b/frontend/src/metabase/admin/databases/selectors.js index c9d0bbabaee0..acfc858f52fd 100644 --- a/frontend/src/metabase/admin/databases/selectors.js +++ b/frontend/src/metabase/admin/databases/selectors.js @@ -1,27 +1,12 @@ /* @flow weak */ -import _ from "underscore"; -import { createSelector } from 'reselect'; - - -// Database List -export const databases = state => state.admin.databases.databases; - -export const getDatabasesSorted = createSelector( - [databases], - (databases) => _.sortBy(databases, 'name') -); - -export const hasSampleDataset = createSelector( - [databases], - (databases) => _.some(databases, (d) => d.is_sample) -); - - // Database Edit -export const getEditingDatabase = state => state.admin.databases.editingDatabase; -export const getFormState = state => state.admin.databases.formState; -export const getDatabaseCreationStep = state => state.admin.databases.databaseCreationStep; +export const getEditingDatabase = state => + state.admin.databases.editingDatabase; +export const getFormState = state => state.admin.databases.formState; +export const getDatabaseCreationStep = state => + state.admin.databases.databaseCreationStep; -export const getDeletes = state => state.admin.databases.deletes; -export const getDeletionError = state => state.admin.databases.deletionError; +// Database List +export const getDeletes = state => state.admin.databases.deletes; +export const getDeletionError = state => state.admin.databases.deletionError; diff --git a/frontend/src/metabase/admin/datamodel/components/FormInput.jsx b/frontend/src/metabase/admin/datamodel/components/FormInput.jsx index bfce1383e6f2..775fad44e80e 100644 --- a/frontend/src/metabase/admin/datamodel/components/FormInput.jsx +++ b/frontend/src/metabase/admin/datamodel/components/FormInput.jsx @@ -4,17 +4,21 @@ import cx from "classnames"; import { formDomOnlyProps } from "metabase/lib/redux"; export default class FormInput extends Component { - static propTypes = {}; + static propTypes = {}; - render() { - const { field, className, placeholder } = this.props; - return ( - - ); - } + render() { + const { field, className, placeholder } = this.props; + return ( + + ); + } } diff --git a/frontend/src/metabase/admin/datamodel/components/FormLabel.jsx b/frontend/src/metabase/admin/datamodel/components/FormLabel.jsx index c45ee4b1208e..ca5acb143ce0 100644 --- a/frontend/src/metabase/admin/datamodel/components/FormLabel.jsx +++ b/frontend/src/metabase/admin/datamodel/components/FormLabel.jsx @@ -2,26 +2,28 @@ import React, { Component } from "react"; import PropTypes from "prop-types"; export default class FormLabel extends Component { - static propTypes = { - title: PropTypes.string, - description: PropTypes.string, - }; + static propTypes = { + title: PropTypes.string, + description: PropTypes.string, + }; - static defaultProps = { - title: "", - description: "" - }; + static defaultProps = { + title: "", + description: "", + }; - render() { - let { title, description, children } = this.props; - return ( -
-
- { title && } - { description &&

{description}

} -
- {children} -
- ); - } + render() { + let { title, description, children } = this.props; + return ( +
+
+ {title && ( + + )} + {description &&

{description}

} +
+ {children} +
+ ); + } } diff --git a/frontend/src/metabase/admin/datamodel/components/FormTextArea.jsx b/frontend/src/metabase/admin/datamodel/components/FormTextArea.jsx index a2119223ab14..f304d5e7a3d7 100644 --- a/frontend/src/metabase/admin/datamodel/components/FormTextArea.jsx +++ b/frontend/src/metabase/admin/datamodel/components/FormTextArea.jsx @@ -5,16 +5,20 @@ import cx from "classnames"; import { formDomOnlyProps } from "metabase/lib/redux"; export default class FormTextArea extends Component { - static propTypes = {}; + static propTypes = {}; - render() { - const { field, className, placeholder } = this.props; - return ( -