|
@@ -4,10 +4,16 @@ import com.alibaba.fastjson.JSONArray;
|
|
import com.alibaba.fastjson.JSONObject;
|
|
import com.alibaba.fastjson.JSONObject;
|
|
import com.clickhouse.client.api.Client;
|
|
import com.clickhouse.client.api.Client;
|
|
import com.clickhouse.client.api.data_formats.ClickHouseBinaryFormatReader;
|
|
import com.clickhouse.client.api.data_formats.ClickHouseBinaryFormatReader;
|
|
|
|
+import com.clickhouse.client.api.data_formats.internal.SerializerUtils;
|
|
import com.clickhouse.client.api.enums.Protocol;
|
|
import com.clickhouse.client.api.enums.Protocol;
|
|
import com.clickhouse.client.api.metadata.TableSchema;
|
|
import com.clickhouse.client.api.metadata.TableSchema;
|
|
import com.clickhouse.client.api.query.QueryResponse;
|
|
import com.clickhouse.client.api.query.QueryResponse;
|
|
import com.clickhouse.client.api.query.QuerySettings;
|
|
import com.clickhouse.client.api.query.QuerySettings;
|
|
|
|
+import com.clickhouse.data.ClickHouseColumn;
|
|
|
|
+import com.clickhouse.data.ClickHouseFormat;
|
|
|
|
+import com.clickhouse.data.format.BinaryStreamUtils;
|
|
|
|
+import com.clickhouse.data.format.ClickHouseBinaryFormatProcessor;
|
|
|
|
+import com.clickhouse.data.format.ClickHouseRowBinaryProcessor;
|
|
import db.dto.StckPointVal;
|
|
import db.dto.StckPointVal;
|
|
import db.page.MultiPointRangePagingQuery;
|
|
import db.page.MultiPointRangePagingQuery;
|
|
import db.page.RangePagingQuery;
|
|
import db.page.RangePagingQuery;
|
|
@@ -519,8 +525,46 @@ ENGINE = Distributed('cluster_3S_1R', 'db1', 'tsdb_cpp', rand())
|
|
|
|
|
|
pointVals.getPoint().ensureValid();
|
|
pointVals.getPoint().ensureValid();
|
|
|
|
|
|
- long benchTimeBegin = System.currentTimeMillis();
|
|
|
|
- // Convert to ClickHousePointVal for insertion
|
|
|
|
|
|
+ long benchTimeBegin = System.currentTimeMillis(), benchTimeTemp = 0;
|
|
|
|
+ // NOTE: ClickHouse JAVA client has poorly designed insert APIs, we do things manually here
|
|
|
|
+ try {
|
|
|
|
+ // TODO: Maybe also use BinaryStreamUtils?
|
|
|
|
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
|
|
|
+ int count = pointVals.getCount();
|
|
|
|
+ HashMap<String, String> tags = new HashMap<>();
|
|
|
|
+ for (String key : pointVals.getTags().keySet()) {
|
|
|
|
+ if (StringUtils.equals(key, "pointName")) {
|
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
+ tags.put(key, pointVals.getTags().get(key));
|
|
|
|
+ }
|
|
|
|
+ ClickHouseColumn colMetricName = ClickHouseColumn.of("metric_name", "String");
|
|
|
|
+ ClickHouseColumn colPointName = ClickHouseColumn.of("point_name", "String");
|
|
|
|
+ ClickHouseColumn colTags = ClickHouseColumn.of("tags", "Map(String, String)");
|
|
|
|
+ ClickHouseColumn colValue = ClickHouseColumn.of("value", "Float64");
|
|
|
|
+ ClickHouseColumn colNanoseconds = ClickHouseColumn.of("nanoseconds", "Int64");
|
|
|
|
+ for (int i = 0; i < count; i++) {
|
|
|
|
+ SerializerUtils.serializeData(baos, pointVals.getValue(i), colValue);
|
|
|
|
+ SerializerUtils.serializeData(baos, pointVals.getUtcTime(i), colNanoseconds);
|
|
|
|
+ }
|
|
|
|
+ // Thanks to ClickHouse JAVA client not escaping table name, we can inject INSERT SELECT here
|
|
|
|
+ String tableName = makeTableName() + " (metric_name, point_name, tags, value, nanoseconds)" +
|
|
|
|
+ " SELECT " + Util.ToSqlLiteral(pointVals.getMetricName()) +
|
|
|
|
+ ", " + Util.ToSqlLiteral(pointVals.getPointName()) +
|
|
|
|
+ ", " + Util.ToSqlLiteral(tags) +
|
|
|
|
+ ", col1, col2" +
|
|
|
|
+ " FROM input('col1 Float64, col2 Int64')";
|
|
|
|
+ benchTimeTemp = System.currentTimeMillis();
|
|
|
|
+ LOG.debug("insertPointValsToDB: prepare insert took {}ms", benchTimeTemp - benchTimeBegin);
|
|
|
|
+ benchTimeBegin = benchTimeTemp;
|
|
|
|
+ client.insert(tableName, new ByteArrayInputStream(baos.toByteArray()), ClickHouseFormat.RowBinary).get();
|
|
|
|
+ benchTimeTemp = System.currentTimeMillis();
|
|
|
|
+ LOG.debug("insertPointValsToDB: insert took {}ms", benchTimeTemp - benchTimeBegin);
|
|
|
|
+ } catch (Exception e) {
|
|
|
|
+ LOG.error("insertPointValsToDb error: ", e);
|
|
|
|
+ }
|
|
|
|
+ /*
|
|
|
|
+ // Convert to StckPointVal for insertion
|
|
List<StckPointVal> pointValsList = new LinkedList<>();
|
|
List<StckPointVal> pointValsList = new LinkedList<>();
|
|
String metricName = pointVals.getMetricName();
|
|
String metricName = pointVals.getMetricName();
|
|
String pointName = pointVals.getPointName();
|
|
String pointName = pointVals.getPointName();
|
|
@@ -547,6 +591,7 @@ ENGINE = Distributed('cluster_3S_1R', 'db1', 'tsdb_cpp', rand())
|
|
} catch (Exception e) {
|
|
} catch (Exception e) {
|
|
LOG.error("insertPointValsToDb error: ", e);
|
|
LOG.error("insertPointValsToDb error: ", e);
|
|
}
|
|
}
|
|
|
|
+ */
|
|
}
|
|
}
|
|
|
|
|
|
/// Write to ClickHouse
|
|
/// Write to ClickHouse
|
|
@@ -1482,6 +1527,14 @@ INNER JOIN
|
|
// return dbVal;
|
|
// return dbVal;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ public List<DBVal> calAggOverTimeForManyPts(List<Point> points, String aggMethod, long startTime, long endTime) {
|
|
|
|
+ List<DBVal> result = new ArrayList<>();
|
|
|
|
+ for (Point point : points) {
|
|
|
|
+ result.add(calAggOverTime(point, aggMethod, startTime, endTime));
|
|
|
|
+ }
|
|
|
|
+ return result;
|
|
|
|
+ }
|
|
|
|
+
|
|
public Long countOverTime(Point point, long start, long end) {
|
|
public Long countOverTime(Point point, long start, long end) {
|
|
double result = this.selectPointAggValueFromDB(point, start, end, "count");
|
|
double result = this.selectPointAggValueFromDB(point, start, end, "count");
|
|
if (Double.isNaN(result)) {
|
|
if (Double.isNaN(result)) {
|