scala实现kafkaProduce1.0读取文件发送到kafka

pox.xml文件

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <dependencies>
        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.11.8</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>1.0.0</version>
        </dependency>

    </dependencies>

    <build>
        <plugins>
            <!-- 打包插件, 否则 scala 类不会编译并打包进去 -->
            <plugin>
                <groupId>net.alchim31.maven</groupId>
                <artifactId>scala-maven-plugin</artifactId>
                <version>3.4.6</version>
                <executions>
                    <execution>
                        <goals>
                            <goal>compile</goal>
                            <goal>testCompile</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>

            <plugin>
                <artifactId>maven-assembly-plugin</artifactId>
                <configuration>
                    <appendAssemblyId>false</appendAssemblyId>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                    <archive>
                        <manifest>
                            <mainClass>com.lbtt.onlineeducation.RegisterProducer</mainClass> //主类名称
                        </manifest>
                    </archive>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assembly</id>
                        <phase>package</phase>
                        <goals>
                            <goal>assembly</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>
</project>

scala源码

package com.lbtt.onlineeducation

import java.io.{BufferedReader, File, FileReader}
import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

/**
* author hneratsz
* date 2020/6/27
*/
object RegisterProducer {
  def main(args: Array[String]): Unit = {

    val props = new Properties()
    props.put("bootstrap.servers", "node01:9092,node02:9092,node03:9092")  //9092为kafka消息队列的监听端口,用于生产者监听
    props.put("acks", "1")
    props.put("batch.size", "16384")
    props.put("linger.ms", "10")
    props.put("buffer.memory", "33554432")
    props.put("key.serializer",
      "org.apache.kafka.common.serialization.StringSerializer")
    props.put("value.serializer",
      "org.apache.kafka.common.serialization.StringSerializer")
    val producer = new KafkaProducer[String, String](props)

    // 获得log日志
    val srcPath = "路径"

    val srcFile = new File(srcPath)
    val srcReader = new FileReader(srcFile)
    val srcBufferedReader = new BufferedReader(srcReader)

    try {
      var line = srcBufferedReader.readLine()
      var count = 0

      val topic = "test"
      while (line != null) {
        producer.send(
          new ProducerRecord[String, String](topic, Integer.toString(count), line)
        )
        println(s"发送${topic}的${count}条数据>>>" + line)
        line = srcBufferedReader.readLine()
        count += 1
      }
    } catch {
      case e: Exception => {
        println(e.getMessage)
      }
    } finally {
      srcBufferedReader.close()
      srcReader.close()
      producer.close()
    }
  }
}

参考

kafkaProducer 1.1 读取文件目录文件,发送到kafka Topic

posted @ 2020-06-28 16:28  bitbitbyte  阅读(350)  评论(0编辑  收藏  举报