-
Notifications
You must be signed in to change notification settings - Fork 18
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'develop' into Improvements-to-UI-translations-#1033
* develop: (67 commits) [#1464] Lint [#1464] Lint [#1464] Fix invalid filter bugs [#1381] Add translations (#1465) [#1462] Add proptype [#1462] Fix scatter tooltip issues [#1458] Fix bug where cannot switch viz type [#1459] Show nonzero origin ticks for scatter chart [#1456] Add rounding heuristic for chart display values [#1454] Pie % labels [#1381] Add translations Deploy develop to test environment [#1391] Return bad request on aggregation error [#1381] Remove vega [#1381] Fix agg formatting Tidy up test logging output [#1381] Fix line agg corner case Use proper ns Remove indirection lib.aggregation.pivot bis Remove indirection in lib.aggregation.pivot ... # Conflicts: # client/package-lock.json # client/src/components/visualisation/configMenu/FilterMenu.jsx # client/src/components/visualisation/configMenu/LayerConfigMenu.jsx # client/src/translations/en.json # client/src/translations/es.json # client/src/translations/fr.json
- Loading branch information
Showing
48 changed files
with
3,486 additions
and
3,530 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,133 @@ | ||
(ns akvo.lumen.lib.aggregation.bar | ||
(:require [akvo.lumen.lib :as lib] | ||
[akvo.lumen.lib.aggregation.filter :as filter] | ||
[akvo.lumen.lib.aggregation.utils :as utils] | ||
[clojure.java.jdbc :as jdbc])) | ||
|
||
(defn- run-query [tenant-conn table-name sql-text column-x-name column-y-name filter-sql aggregation-method truncate-size column-subbucket-name] | ||
(rest (jdbc/query tenant-conn | ||
[(format sql-text | ||
column-x-name column-y-name table-name filter-sql aggregation-method truncate-size column-subbucket-name)] | ||
{:as-arrays? true}))) | ||
|
||
(defn query | ||
[tenant-conn {:keys [columns table-name]} query] | ||
(let [filter-sql (filter/sql-str columns (get query "filters")) | ||
max-elements 200 | ||
column-x (utils/find-column columns (get query "bucketColumn")) | ||
column-x-type (get column-x "type") | ||
column-x-name (get column-x "columnName") | ||
column-x-title (get column-x "title") | ||
column-y (utils/find-column columns (get query "metricColumnY")) | ||
column-y-name (get column-y "columnName") | ||
column-y-title (get column-y "title") | ||
|
||
column-subbucket (utils/find-column columns (get query "subBucketColumn")) | ||
column-subbucket-name (get column-subbucket "columnName") | ||
column-subbucket-title (get column-subbucket "title") | ||
|
||
aggregation-method (get query "metricAggregation") | ||
truncate-size (or (get query "truncateSize") "ALL") | ||
sql-sort-subquery (case (get query "sort") | ||
nil "ORDER BY x ASC" | ||
"asc" "ORDER BY z.y ASC NULLS FIRST" | ||
"dsc" "ORDER BY z.y DESC NULLS LAST") | ||
sql-sort-subbucket-subquery (case (get query "sort") | ||
nil "ORDER BY x ASC" | ||
"asc" "ORDER BY sort_value ASC NULLS FIRST" | ||
"dsc" "ORDER BY sort_value DESC NULLS LAST") | ||
sql-aggregation-subquery (case aggregation-method | ||
nil "NULL" | ||
("min" "max" "count" "sum") (str aggregation-method "(%2$s)") | ||
"mean" "avg(%2$s)" | ||
"median" "percentile_cont(0.5) WITHIN GROUP (ORDER BY %2$s)" | ||
"distinct" "COUNT(DISTINCT %2$s)" | ||
"q1" "percentile_cont(0.25) WITHIN GROUP (ORDER BY %2$s)" | ||
"q3" "percentile_cont(0.75) WITHIN GROUP (ORDER BY %2$s)") | ||
sql-text-without-subbucket (str "SELECT * FROM (SELECT %1$s as x, " sql-aggregation-subquery " as y FROM %3$s WHERE %4$s GROUP BY %1$s)z " sql-sort-subquery " LIMIT %6$s") | ||
|
||
sql-text-with-subbucket (str " | ||
WITH | ||
sort_table | ||
AS | ||
(SELECT %1$s AS x, " sql-aggregation-subquery " AS sort_value, TRUE as include_value FROM %3$s WHERE %4$s GROUP BY %1$s " sql-sort-subbucket-subquery " LIMIT %6$s) | ||
, | ||
data_table | ||
AS | ||
( SELECT %1$s as x, " sql-aggregation-subquery " as y, | ||
%7$s as s | ||
FROM %3$s | ||
WHERE %4$s | ||
GROUP BY %1$s, %7$s | ||
) | ||
SELECT | ||
data_table.x AS x, | ||
data_table.y, | ||
data_table.s, | ||
sort_table.sort_value, | ||
sort_table.include_value | ||
FROM | ||
data_table | ||
LEFT JOIN | ||
sort_table | ||
ON | ||
COALESCE(sort_table.x::text, '@@@MISSINGDATA@@@') = COALESCE(data_table.x::text, '@@@MISSINGDATA@@@') | ||
WHERE | ||
sort_table.include_value IS NOT NULL | ||
" sql-sort-subbucket-subquery) | ||
|
||
valid-spec (boolean (and column-x column-y)) | ||
sql-text (if valid-spec (if column-subbucket sql-text-with-subbucket sql-text-without-subbucket) "SELECT NULL") | ||
sql-response (run-query tenant-conn table-name sql-text column-x-name column-y-name filter-sql aggregation-method truncate-size column-subbucket-name) | ||
bucket-values (distinct | ||
(mapv | ||
(fn [[x-value y-value s-value]] x-value) | ||
sql-response)) | ||
subbucket-values (distinct | ||
(mapv | ||
(fn [[x-value y-value s-value]] s-value) | ||
sql-response))] | ||
(if (> (count sql-response) max-elements) | ||
(lib/bad-request | ||
{"error" true | ||
"reason" "too-many" | ||
"max" max-elements | ||
"count" (count sql-response)}) | ||
|
||
(lib/ok | ||
(if (not column-subbucket) | ||
{"series" [{"key" column-y-title | ||
"label" column-y-title | ||
"data" (mapv (fn [[x-value y-value]] | ||
{"value" y-value}) | ||
sql-response)}] | ||
"common" {"metadata" {"type" column-x-type} | ||
"data" (mapv (fn [[x-value y-value]] | ||
{"label" x-value "key" x-value}) | ||
sql-response)}} | ||
{"series" | ||
(mapv | ||
(fn [s-value] | ||
{"key" s-value | ||
"label" s-value | ||
"data" | ||
(map | ||
(fn | ||
[bucket-value] | ||
{"value" | ||
(or (nth | ||
(first | ||
(filter | ||
(fn [o] (and (= (nth o 0) bucket-value) (= (nth o 2) s-value))) | ||
sql-response)) | ||
1 | ||
0) 0)}) | ||
bucket-values)}) | ||
subbucket-values) | ||
|
||
"common" | ||
{"metadata" | ||
{"type" column-x-type} | ||
"data" (mapv | ||
(fn [bucket] {"label" bucket "key" bucket}) | ||
bucket-values)}}))))) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
(ns akvo.lumen.lib.aggregation.line | ||
(:require [akvo.lumen.lib :as lib] | ||
[akvo.lumen.lib.aggregation.filter :as filter] | ||
[akvo.lumen.lib.aggregation.utils :as utils] | ||
[clojure.java.jdbc :as jdbc])) | ||
|
||
(defn- run-query [tenant-conn table-name sql-text column-x-name column-y-name filter-sql aggregation-method max-points] | ||
(rest (jdbc/query tenant-conn | ||
[(format sql-text | ||
column-x-name column-y-name table-name filter-sql aggregation-method max-points)] | ||
{:as-arrays? true}))) | ||
|
||
(defn query | ||
[tenant-conn {:keys [columns table-name]} query] | ||
(let [filter-sql (filter/sql-str columns (get query "filters")) | ||
column-x (utils/find-column columns (get query "metricColumnX")) | ||
column-x-type (get column-x "type") | ||
column-x-name (get column-x "columnName") | ||
column-x-title (get column-x "title") | ||
column-y (utils/find-column columns (get query "metricColumnY")) | ||
column-y-type (get column-y "type") | ||
column-y-name (get column-y "columnName") | ||
column-y-title (get column-y "title") | ||
max-points 2500 | ||
aggregation-method (get query "metricAggregation") | ||
aggregation-method (if (= column-y-type "text") "count" aggregation-method) | ||
sql-aggregation-subquery (case aggregation-method | ||
nil "" | ||
("min" "max" "count" "sum") (str aggregation-method "(%2$s)") | ||
"mean" "avg(%2$s)" | ||
"median" "percentile_cont(0.5) WITHIN GROUP (ORDER BY %2$s)" | ||
"distinct" "COUNT(DISTINCT %2$s)" | ||
"q1" "percentile_cont(0.25) WITHIN GROUP (ORDER BY %2$s)" | ||
"q3" "percentile_cont(0.75) WITHIN GROUP (ORDER BY %2$s)") | ||
sql-text-with-aggregation (str "SELECT * FROM (SELECT * FROM (SELECT %1$s, " sql-aggregation-subquery " FROM %3$s WHERE %4$s GROUP BY %1$s)z ORDER BY random() LIMIT %6$s)zz ORDER BY zz.%1$s") | ||
sql-text-without-aggregation "SELECT * FROM (SELECT * FROM (SELECT %1$s AS x, %2$s AS y FROM %3$s WHERE %4$s)z ORDER BY random() LIMIT %6$s)zz ORDER BY zz.x" | ||
sql-text-no-x (str "SELECT row_number() over() AS x, " (if (= column-y-type "text") "COUNT(%2$s) AS y " "%2$s AS y ") "FROM %3$s WHERE %4$s GROUP BY %2$s") | ||
sql-text (cond | ||
(not column-x) sql-text-no-x | ||
aggregation-method sql-text-with-aggregation | ||
:else sql-text-without-aggregation) | ||
sql-response (run-query tenant-conn table-name sql-text column-x-name column-y-name filter-sql aggregation-method max-points)] | ||
(lib/ok | ||
{"series" [{"key" column-y-title | ||
"label" column-y-title | ||
"data" (mapv (fn [[x-value y-value]] | ||
{"value" y-value}) | ||
sql-response)}] | ||
"common" {"metadata" {"type" column-x-type "sampled" (= (count sql-response) max-points)} | ||
"data" (mapv (fn [[x-value y-value]] | ||
{"timestamp" x-value}) | ||
sql-response)}}))) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.