flume常见问题

olivee 3年前 ⋅ 998 阅读

3. flume的文件channal 如果磁盘空间满

可能造成数据丢失,因为savepoint写不进去,所以最好不要手动停flume,等扩容磁盘后问题自动就解决了

4. flume添加监控

在启动脚本中加入参数

-Dflume.monitoring.type=http -Dflume.monitoring.port=34545

然后就可以通过 http://ip:34545/metrics 获取监控信息,信息类似:

{
	"SINK.k1": {
		"ConnectionCreatedCount": "5",
		"BatchCompleteCount": "0",
		"EventWriteFail": "0",
		"BatchEmptyCount": "0",
		"EventDrainAttemptCount": "373596",
		"StartTime": "1632377071683",
		"BatchUnderflowCount": "0",
		"ChannelReadFail": "0",
		"ConnectionFailedCount": "0",
		"ConnectionClosedCount": "4",
		"Type": "SINK",
		"EventDrainSuccessCount": "373496",
		"StopTime": "0"
	},
	"CHANNEL.c1": {
		"Unhealthy": "0",
		"ChannelSize": "3500",
		"EventTakeAttemptCount": "373668",
		"StartTime": "1632377071321",
		"Open": "true",
		"CheckpointWriteErrorCount": "0",
		"ChannelCapacity": "1000000",
		"ChannelFillPercentage": "0.35000000000000003",
		"EventTakeErrorCount": "0",
		"Type": "CHANNEL",
		"EventTakeSuccessCount": "373496",
		"Closed": "0",
		"CheckpointBackupWriteErrorCount": "0",
		"EventPutAttemptCount": "377083",
		"EventPutSuccessCount": "376996",
		"EventPutErrorCount": "0",
		"StopTime": "0"
	},
	"SOURCE.s1": {
		"KafkaEventGetTimer": "41085",
		"AppendBatchAcceptedCount": "0",
		"EventAcceptedCount": "377200",
		"AppendReceivedCount": "0",
		"StartTime": "1632377071840",
		"AppendBatchReceivedCount": "0",
		"KafkaCommitTimer": "18612",
		"EventReceivedCount": "377300",
		"Type": "SOURCE",
		"AppendAcceptedCount": "0",
		"KafkaEmptyCount": "1",
		"OpenConnectionCount": "0",
		"StopTime": "0"
	}
}