标签:for sp 文件 on cti log amp ad ef
写入hdfs
a1.channels.c1.type = memory
# Define an Avro source called r1 on a1 and tell it
# to bind to 0.0.0.0:41414. Connect it to channel c1.
a1.sources.r1.channels = c1
a1.sources.r1.type = avro
a1.sources.r1.bind = 0.0.0.0
a1.sources.r1.port = 41414
a1.sinks.k1.type = hdfs
a1.sinks.k1.channel = c1
a1.sinks.k1.hdfs.path = hdfs://192.168.0.200:9000/user/hadoop/input/web
a1.sinks.k1.hdfs.filePrefix = access-%Y-%m-%d
a1.sinks.k1.hdfs.fileSuffix = .log
a1.sinks.k1.hdfs.useLocalTimeStamp = true
a1.sinks.k1.hdfs.fileType = DataStream
a1.sinks.k1.hdfs.writeFormat = Text
a1.sinks.k1.hdfs.rollSize = 0
a1.sinks.k1.hdfs.rollInterval= 0
a1.sinks.k1.hdfs.rollCount = 0
#超时之后不活跃的文件关
a1.sinks.k1.hdfs.idleTimeout = 0
#
# Finally, now that we‘ve defined all of our components, tell
# a1 which ones we want to activate.
a1.channels = c1
a1.sources = r1
a1.sinks = k1
bin/flume-ng agent -c conf/ -f conf/server.conf -n a1 -Dflume.root.logger=DEBUG,console
标签:for sp 文件 on cti log amp ad ef
原文地址:http://www.cnblogs.com/DDSys/p/4032294.html