Postgres Data Source (#9475)

* add postgresql datasource

* add rest of files for postgres datasource

* fix timeseries query, remove unused code

* consistent naming, refactoring

* s/mysql/postgres/

* s/mysql/postgres/

* couple more tests

* tests for more datatypes

* fix macros for postgres

* add __timeSec macro

* add frontend for postgres datasource

* adjust documentation

* fix formatting

* add proper plugin description

* merge editor changes from mysql

* port changes from mysql datasource

* set proper defaultQuery for postgres

* add time_sec to timeseries query
accept int for value for timeseries query

* revert allowing time_sec and handle int or float values as unix
timestamp for "time" column

* fix tslint error

* handle decimal values in timeseries query

* allow setting sslmode for postgres datasource

* use type switch for handling data types

* fix value for timeseries query

* refactor timeseries queries to make them more flexible

* remove debug statement from inner loop in type conversion

* use plain for loop in getTypedRowData

* fix timeseries queries

* adjust postgres datasource to tsdb refactoring

* adjust postgres datasource to frontend changes

* update lib/pq to latest version

* move type conversion to getTypedRowData

* handle address types cidr, inet and macaddr

* adjust response parser and docs for annotations

* convert unknown types to string

* add documentation for postgres datasource

* add another example query with metric column

* set more helpful default query

* update help text in query editor

* handle NULL in value column of timeseries query

* add __timeGroup macro

* add test for __timeGroup macro

* document __timeGroup and set proper default query for annotations

* fix typos in docs

* add postgres to list of datasources

* add postgres to builtInPlugins

* mysql: refactoring as prep for merging postgres

Refactors out the initialization of the xorm engine and the query logic
for an sql data source.

* mysql: rename refactoring + test update

* postgres:refactor to use SqlEngine(same as mysql)

Refactored to use a common base class with the MySql data source.

Other changes from the original PR:
- Changed time column to be time_sec to allow other time units in the
future and to be the same as MySQL
- Changed integration test to test the main Query method rather than
the private transformToTable method
- Changed the __timeSec macro name to __timeEpoch
- Renamed PostgresExecutor to PostgresQueryEndpoint

Fixes #9209 (the original PR)

* postgres: encrypt password on config page

With some other cosmetic changes to the config page:
- placeholder texts
- reset button for the password after it has been encrypted.
- default value for the sslmode field.

* postgres: change back col name to time from time_sec

* postgres mysql: remove annotation title

Title has been removed from annotations

* postgres: fix images for docs page

* postgres mysql: fix specs
This commit is contained in:
Daniel Lee
2017-10-10 15:19:14 +02:00
committed by GitHub
parent 630e6f5da6
commit d1c9760fa8
36 changed files with 2231 additions and 478 deletions

View File

@ -0,0 +1,196 @@
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
import moment from 'moment';
import helpers from 'test/specs/helpers';
import {PostgresDatasource} from '../datasource';
describe('PostgreSQLDatasource', function() {
var ctx = new helpers.ServiceTestContext();
var instanceSettings = {name: 'postgresql'};
beforeEach(angularMocks.module('grafana.core'));
beforeEach(angularMocks.module('grafana.services'));
beforeEach(ctx.providePhase(['backendSrv']));
beforeEach(angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
ctx.$q = $q;
ctx.$httpBackend = $httpBackend;
ctx.$rootScope = $rootScope;
ctx.ds = $injector.instantiate(PostgresDatasource, {instanceSettings: instanceSettings});
$httpBackend.when('GET', /\.html$/).respond('');
}));
describe('When performing annotationQuery', function() {
let results;
const annotationName = 'MyAnno';
const options = {
annotation: {
name: annotationName,
rawQuery: 'select time, title, text, tags from table;'
},
range: {
from: moment(1432288354),
to: moment(1432288401)
}
};
const response = {
results: {
MyAnno: {
refId: annotationName,
tables: [
{
columns: [{text: 'time'}, {text: 'text'}, {text: 'tags'}],
rows: [
[1432288355, 'some text', 'TagA,TagB'],
[1432288390, 'some text2', ' TagB , TagC'],
[1432288400, 'some text3']
]
}
]
}
}
};
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({data: response, status: 200});
};
ctx.ds.annotationQuery(options).then(function(data) { results = data; });
ctx.$rootScope.$apply();
});
it('should return annotation list', function() {
expect(results.length).to.be(3);
expect(results[0].text).to.be('some text');
expect(results[0].tags[0]).to.be('TagA');
expect(results[0].tags[1]).to.be('TagB');
expect(results[1].tags[0]).to.be('TagB');
expect(results[1].tags[1]).to.be('TagC');
expect(results[2].tags.length).to.be(0);
});
});
describe('When performing metricFindQuery', function() {
let results;
const query = 'select * from atable';
const response = {
results: {
tempvar: {
meta: {
rowCount: 3
},
refId: 'tempvar',
tables: [
{
columns: [{text: 'title'}, {text: 'text'}],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3']
]
}
]
}
}
};
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({data: response, status: 200});
};
ctx.ds.metricFindQuery(query).then(function(data) { results = data; });
ctx.$rootScope.$apply();
});
it('should return list of all column values', function() {
expect(results.length).to.be(6);
expect(results[0].text).to.be('aTitle');
expect(results[5].text).to.be('some text3');
});
});
describe('When performing metricFindQuery with key, value columns', function() {
let results;
const query = 'select * from atable';
const response = {
results: {
tempvar: {
meta: {
rowCount: 3
},
refId: 'tempvar',
tables: [
{
columns: [{text: '__value'}, {text: '__text'}],
rows: [
['value1', 'aTitle'],
['value2', 'aTitle2'],
['value3', 'aTitle3']
]
}
]
}
}
};
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({data: response, status: 200});
};
ctx.ds.metricFindQuery(query).then(function(data) { results = data; });
ctx.$rootScope.$apply();
});
it('should return list of as text, value', function() {
expect(results.length).to.be(3);
expect(results[0].text).to.be('aTitle');
expect(results[0].value).to.be('value1');
expect(results[2].text).to.be('aTitle3');
expect(results[2].value).to.be('value3');
});
});
describe('When performing metricFindQuery with key, value columns and with duplicate keys', function() {
let results;
const query = 'select * from atable';
const response = {
results: {
tempvar: {
meta: {
rowCount: 3
},
refId: 'tempvar',
tables: [
{
columns: [{text: '__text'}, {text: '__value'}],
rows: [
['aTitle', 'same'],
['aTitle', 'same'],
['aTitle', 'diff']
]
}
]
}
}
};
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({data: response, status: 200});
};
ctx.ds.metricFindQuery(query).then(function(data) { results = data; });
ctx.$rootScope.$apply();
});
it('should return list of unique keys', function() {
expect(results.length).to.be(1);
expect(results[0].text).to.be('aTitle');
expect(results[0].value).to.be('same');
});
});
});