-
插入数据
-
删除数据
只能按 time的时间范围删除所有数据
不支持 field字段的条件删除
-
更新数据
把不符合的数据的field修改成其他值,那么查询sql就检索不到了,曲线救 国,在业务层面看间接实现了数据的删除。
-
package com.example.demo.other;
import org.influxdb.BatchOptions;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class xx {
public static void main(String[] args) {
updat();
* 插入数据
public static void in() {
InfluxDB influxDB = getInfluxDB();
String database = "ins_message";
String retentionPolicy = "autogen";
influxDB.createDatabase(database);
influxDB.setDatabase(database);
influxDB.createRetentionPolicy(retentionPolicy, database, "30d", 1, true);
String measurement = "my_user_message";
String eventCode = "0";
String eventType = "2";
Instant now = Instant.now();
Instant sevenDaysAgo = now.minus(7, ChronoUnit.DAYS);
for (int i = 0; i < 10; i++) {
Instant timestamp = now.minus(i, ChronoUnit.DAYS);
Point point = Point.measurement(measurement)
.time(timestamp.toEpochMilli(), TimeUnit.MILLISECONDS)
.addField("eventCode", eventCode)
.addField("eventType", eventType)
.addField("source", "ins_8k")
.tag("userId", String.valueOf(111111111))
.tag("pointId", String.valueOf(222222222))
.tag("machineId", String.valueOf(333333333))
.build();
influxDB.write(database, retentionPolicy, point);
influxDB.close();
* influxdb 只支持 按time的时间范围删除 。
* 不支持 field的条件删除
public static void delete() {
InfluxDB influxDB = getInfluxDB();
String database = "ins_message";
String measurement = "my_user_message";
String whereClause = "time > now() - 17d";
String deleteQuery = String.format("DELETE FROM %s WHERE %s", measurement, whereClause);
Query query = new Query(deleteQuery, database);
QueryResult queryResult = influxDB.query(query);
System.out.println(deleteQuery);
* influxdb 修改数据
public static void updat() {
InfluxDB influxDB = getInfluxDB();
Instant now = Instant.now();
Instant sevenDaysAgo = now.minus(8, ChronoUnit.DAYS);
String database = "ins_message";
String measurement = "my_user_message";
String updateQuery = String.format("SELECT * FROM \"%s\" WHERE time >= '%s' AND time <= '%s' AND eventCode = '0' AND eventType = '2'", measurement, sevenDaysAgo, now);
Query query = new Query(updateQuery, database);
QueryResult queryResult = influxDB.query(query);
if (queryResult.hasError()) {
System.out.println("查询操作执行失败:" + queryResult.getError());
} else {
BatchPoints batchPoints = BatchPoints
.database(database)
.build();
if (queryResult != null && queryResult.getResults() != null) {
for (QueryResult.Result result : queryResult.getResults()) {
List<QueryResult.Series> series = result.getSeries();
if (series != null && series.size() > 0) {
for (QueryResult.Series serie : series) {
List<List<Object>> values = serie.getValues();
List<String> colums = serie.getColumns();
for (List<Object> value : values) {
Float eventType = 0f;
Float eventCode = 0f;
long time = 0;
String userId = "";
String pointId = "";
String machineId = "";
for (int j = 0; j < colums.size(); j++) {
if ("time".equals(colums.get(j))) {
time = Instant.parse((String) value.get(j)).toEpochMilli();
} else if ("eventCode".equals(colums.get(j))) {
eventCode = Float.parseFloat((String) value.get(j));
} else if ("eventType".equals(colums.get(j))) {
eventType = Float.parseFloat((String) value.get(j));
} else if ("userId".equals(colums.get(j))) {
userId = (String) value.get(j);
} else if ("pointId".equals(colums.get(j))) {
pointId = (String) value.get(j);
} else if ("machineId".equals(colums.get(j))) {
machineId = (String) value.get(j);
if (eventType == 2.0f && eventCode == 0.0f) {
Point point = Point.measurement(measurement)
.time(time, TimeUnit.MILLISECONDS)
.tag("userId", userId)
.tag("pointId", pointId)
.tag("machineId", machineId)
.addField("source", "errorData")
.build();
batchPoints.point(point);
BatchOptions options = BatchOptions.DEFAULTS.bufferLimit(10000).actions(100);
influxDB.enableBatch(options);
influxDB.write(batchPoints);
System.out.println("数据更新成功");
influxDB.close();
public static InfluxDB getInfluxDB() {
String influxDbUrl = "http://127.0.0.1:8086";
String username = "xxxx";
String password = "xxxxxx";
return InfluxDBFactory.connect(influxDbUrl, username, password);
<groupId>com.influxdb</groupId>
<artifactId>influxdb-client-java</artifactId>
<version>1.7.0</version&g
为了支持大量的时间序列,即数据库存储的唯一时间序列数量的基数非常高,InfluxData 添加了新的时间序列索引(TSI)。InfluxData 支持客户使用 InfluxDB 数以千万计的时间序列。然而,InfluxData 的目标是扩大到数亿,最终达到数十亿。使用 InfluxData 的 TSI 存储引擎,用户应该能够拥有数百万个独特的时间序列。目标是系列的数量应该不受服务器硬件上的内存量的限制。重要的是,数据库中存在的系列数量对数据库启动时间的影响可以忽略不计。
5、打开连接认证开关(编辑/etc/influxdb/influxdb.conf文件中)1、下载influxdb1.8安装包。4、添加influxdb账户。3、启动并查看服务状态。
Point:代表一行的数据,由时间戳(time)、数据(field)和标签(tags)组成
tag sets: tags在InfluxDB中会按照字典序排序,不管是tag
专为时间序列数据编写的自定义高性能数据存储。 TSM引擎允许高摄取速度和数据压缩
完全用 Go 语言编写。 它编译成单个二进制文件,没有外部依赖项
简单,高性能的写入和查询HTTP API
专为类似SQL的查询语言量身定制,可轻松查询聚合数据