如何使用 log4j 自定义附加程序在 HDFS 上创建日志?

问题描述 投票:0回答:1

概述

我们希望使用 log4j 记录 Spark 作业活动,并将日志文件写入 HDFS。

  • Java 8、Spark 2.4.6、Scala 2.1.2、Hadoop 3.2.1

我们无法找到本地 apache log4j 附加程序来写入 HDFS(Apache Flume 不是一个选项),所以开始编写我们自己的。

Scala 逻辑与 Java 匹配,但是,日志记录到自定义附加程序始终失败并出现堆栈溢出错误。

我们找不到任何明显的错误吗?

实施:

这是一个简单的 hello-world 项目来重现该问题:

  • Scala / Java 项目(Maven 构建),包含一个简单的主类,以及:
  • HDFSAppender 类(扩展 log4j AppenderSkeleton)
  • LoggingTest 单元测试类
  • 用于测试的log4j配置文件

HDFSAppender.scala - Appender 类

```
package com.obfuscated.test.spark.log4j

import java.io.BufferedOutputStream
import java.io.IOException
import java.net.URI
import java.net.URISyntaxException

import scala.beans.BeanProperty

import org.apache.hadoop.fs.FSDataOutputStream
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.conf.Configuration
import org.apache.log4j.AppenderSkeleton
import org.apache.log4j.Level
import org.apache.log4j.Logger
import org.apache.log4j.MDC
import org.apache.log4j.spi.LoggingEvent
import org.apache.logging.log4j.scala.Logging
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

object HDFSAppender {
  def createAndRegister(): HDFSAppender = {
    val hdfsAppender: HDFSAppender = new HDFSAppender
    Logger.getRootLogger.addAppender(hdfsAppender)
    hdfsAppender
  }

  def setClassLoggerLevel(clazz: Class[_], logLevel: Level): Level = {
    val logger = Logger.getLogger(clazz)
    val previousLevel = logger.getLevel
    Logger.getLogger(clazz).setLevel(logLevel)
    previousLevel
  }

  def unregister(hdfsAppender: HDFSAppender): Unit = {
    Logger.getRootLogger.removeAppender(hdfsAppender)
  }
}


class HDFSAppender extends AppenderSkeleton with Logging {

  var sc: SparkContext = _
  
  @BeanProperty
  var file: String = null

  @BeanProperty
  var URI: String = null

  override def close(): Unit = {}

  override def requiresLayout(): Boolean = true

  override protected def append(event: LoggingEvent): Unit = {
    var success: Boolean = false
    val log: String = this.layout.format(event)
    val uri: URI = new URI(this.URI)
    
    write(this.URI, file, log)
    
  }
    
  def write(uriString: String, file: String, log: String): Unit = {
    val conf: Configuration = new Configuration()
    MDC.put("eventName", "HDFSLogWriterWrite");
    
    
    val uri: URI = new URI(uriString)
    val logPath: Path = new Path(uriString + file)
    
    val fs = getFS(uri,conf)
    var output_stream: FSDataOutputStream = null
    try {
      if ((fs.exists(logPath) && fs.getFileStatus(logPath).isFile)) {
        output_stream = fs.append(logPath, 512)
      } else {
        output_stream = fs.create(logPath, true, 512)
      }
      val buffered_output: BufferedOutputStream = new BufferedOutputStream(output_stream)
      buffered_output.write(log.getBytes("UTF-8"))
      buffered_output.close()   
      
    } catch {
      
      case e: IOException => {
            e.printStackTrace();
      }
       case e: URISyntaxException => {
            e.printStackTrace();
      }
    }
  }
    /**
   * return a FileSystem from URI
   */
  private def getFS(uri: URI, conf: Configuration): FileSystem = {
    var fs: FileSystem = null
    try {
      fs = FileSystem.get(uri, conf)
    } catch {
      case e: Exception =>
        e.printStackTrace()
    }
    fs
  }
}

测试课

```
package com.obfuscated.test.spark.log4j.test

import java.net.URI

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.apache.log4j.Level
import org.apache.log4j.LogManager
import org.apache.log4j.Logger
import org.junit.Assert.assertTrue
import org.junit.BeforeClass
import org.junit.Test
import org.scalatest.junit.JUnitSuite

import com.obfuscated.test.spark.log4j.CustomJsonEventLayout
import com.obfuscated.test.spark.log4j.HDFSAppender

import LoggingTest.fsHDFS
import LoggingTest.testHDFSHost
import LoggingTest.testOutputLogPath


object LoggingTest {
  
  var fsHDFS: FileSystem = _
  
  val testHDFSHost: String = 
    "hdfs://localhost:9000"
  
  val testOutputLogPath: String =
    "/test/ut/logging/log-test-hdfs-appender_scala.txt"

  @BeforeClass
  def setupFsRemovePreviousTestFiles(): Unit = {
    
    val conf: Configuration = new Configuration()
    conf.set("fs.defaultFS", testHDFSHost)
    fsHDFS = FileSystem.get(new URI(testHDFSHost), conf)
    val outputPath: Path = new Path(testOutputLogPath)
    if (fsHDFS.exists(outputPath)) {
      fsHDFS.delete(outputPath, true)
    }
  }

}

/**
 * programmatically builds and tests logging events with log4j.test
 *
 */
class LoggingTest extends JUnitSuite {
  val conf: Configuration = new Configuration()

  @Test
  def testHDFSAppender(): Unit = {
    val hdfsAppender: HDFSAppender = new HDFSAppender()
    val rootlogger: Logger = LogManager.getRootLogger
   
    hdfsAppender.setFile(testOutputLogPath)
    hdfsAppender.setURI(testHDFSHost)
    hdfsAppender.setThreshold(Level.ALL)
    hdfsAppender.setLayout(new CustomJsonEventLayout())
    rootlogger.addAppender(hdfsAppender)
    val logger: Logger = Logger.getLogger(this.getClass)

    try {
      logger.info("Test info message")
      logger.warn("Test warn message")
      logger.error("Test error message")
      logger.debug("Test debug message")
    } finally {
     logger.removeAppender(hdfsAppender)
  }
    val logFile: Path = new Path(testHDFSHost + testOutputLogPath)
    assertTrue(fsHDFS.exists(logFile))
    // TODO: further assertions to check log contents

  }
}

log4j配置文件(运行时测试)

```
log4j.rootLogger=DEBUG, console, HDFSAppender

log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.threshold=DEBUG
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} [%-5p] [%X{eventName}] %m %c%n    
log4j.appender.HDFSAppender=com.obfuscated.test.spark.log4j.HDFSAppender
log4j.appender.HDFSAppender.layout=com.obfuscated.test.spark.log4j.CustomJsonEventLayout
log4j.appender.HDFSAppender.file=/test/ut/logging/log-test-hdfs-appender_scala.json
log4j.appender.HDFSAppender.URI=hdfs://localhost:9000
log4j.appender.HDFSAppender.threshold=DEBUG

任何日志记录事件都会导致程序因堆栈溢出错误而崩溃:

java.util.concurrent.ExecutionException: java.lang.StackOverflowError
 at java.util.concurrent.FutureTask.report(FutureTask.java:122)
 at java.util.concurrent.FutureTask.get(FutureTask.java:206)
 at org.apache.hadoop.util.ShutdownHookManager.executeShutdown(ShutdownHookManager.java:124)
 at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:95)
Caused by: java.lang.StackOverflowError
 at org.apache.log4j.PatternLayout.format(PatternLayout.java:506)
 at org.apache.log4j.WriterAppender.subAppend(WriterAppender.java:310)
 at org.apache.log4j.WriterAppender.append(WriterAppender.java:162)

然后出现以下消息,暗示某种循环/竞争条件

 at org.apache.log4j.AppenderSkeleton.doAppend(AppenderSkeleton.java:251)
   at org.apache.log4j.helpers.AppenderAttachableImpl.appendLoopOnAppenders(AppenderAttachableImpl.java:66)
   at org.apache.log4j.Category.callAppenders(Category.java:206)
   at org.apache.log4j.Category.forcedLog(Category.java:391)
   at org.apache.log4j.Category.log(Category.java:856)
   at org.slf4j.impl.Log4jLoggerAdapter.warn(Log4jLoggerAdapter.java:401)
   at org.apache.spark.internal.Logging.logWarning(Logging.scala:66)
   at org.apache.spark.internal.Logging.logWarning$(Logging.scala:65)
   at org.apache.spark.SparkContext$.logWarning(SparkContext.scala:2442)
   at org.apache.spark.SparkContext$.$anonfun$assertNoOtherContextIsRunning$5(SparkContext.scala:2500)
   at org.apache.spark.SparkContext$.$anonfun$assertNoOtherContextIsRunning$5$adapted(SparkContext.scala:2491)
   at scala.Option.foreach(Option.scala:274)
   at org.apache.spark.SparkContext$.assertNoOtherContextIsRunning(SparkContext.scala:2491)
   at org.apache.spark.SparkContext$.markPartiallyConstructed(SparkContext.scala:2568)
   at org.apache.spark.SparkContext.<init>(SparkContext.scala:85)
   at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
   at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$5(SparkSession.scala:935)
   at scala.Option.getOrElse(Option.scala:138)
   at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:926)
   at com.obfuscated.test.spark.log4j.HDFSLogWriter.write(HDFSLogWriter.scala:23)
   at com.obfuscated.test.spark.log4j.HDFSAppender.append(HDFSAppender.scala:63)
(repeats 48 times...)

Maven 依赖项

Downloaded from central: 

https://repo.maven.apache.org/maven2/net/alchim31/maven/scala-maven-plugin/3.2.2/scala-maven-plugin-3.2.2.jar (119 kB at 163 kB/s)
[INFO] 
[INFO] ----------< spark-hello-world-scala:spark-hello-world-scala >-----------
[INFO] Building spark-hello-world-scala 0.0.1-SNAPSHOT
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-dependency-plugin:2.8:tree (default-cli) @ spark-hello-world-scala ---
[INFO] spark-hello-world-scala:spark-hello-world-scala:jar:0.0.1-SNAPSHOT
[INFO] +- org.apache.spark:spark-core_2.12:jar:3.0.0:compile
[INFO] |  +- com.thoughtworks.paranamer:paranamer:jar:2.8:compile
[INFO] |  +- org.apache.avro:avro:jar:1.8.2:compile
[INFO] |  |  +- org.codehaus.jackson:jackson-core-asl:jar:1.9.13:compile
[INFO] |  |  +- org.codehaus.jackson:jackson-mapper-asl:jar:1.9.13:compile
[INFO] |  |  +- org.apache.commons:commons-compress:jar:1.8.1:compile
[INFO] |  |  \- org.tukaani:xz:jar:1.5:compile
[INFO] |  +- org.apache.avro:avro-mapred:jar:hadoop2:1.8.2:compile
[INFO] |  |  +- org.apache.avro:avro-ipc:jar:1.8.2:compile
[INFO] |  |  \- commons-codec:commons-codec:jar:1.9:compile
[INFO] |  +- com.twitter:chill_2.12:jar:0.9.5:compile
[INFO] |  |  \- com.esotericsoftware:kryo-shaded:jar:4.0.2:compile
[INFO] |  |     +- com.esotericsoftware:minlog:jar:1.3.0:compile
[INFO] |  |     \- org.objenesis:objenesis:jar:2.5.1:compile
[INFO] |  +- com.twitter:chill-java:jar:0.9.5:compile
[INFO] |  +- org.apache.xbean:xbean-asm7-shaded:jar:4.15:compile
[INFO] |  +- org.apache.hadoop:hadoop-client:jar:2.7.4:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-common:jar:2.7.4:compile
[INFO] |  |  |  +- commons-cli:commons-cli:jar:1.2:compile
[INFO] |  |  |  +- xmlenc:xmlenc:jar:0.52:compile
[INFO] |  |  |  +- commons-httpclient:commons-httpclient:jar:3.1:compile
[INFO] |  |  |  +- commons-collections:commons-collections:jar:3.2.2:compile
[INFO] |  |  |  +- org.mortbay.jetty:jetty-sslengine:jar:6.1.26:compile
[INFO] |  |  |  +- javax.servlet.jsp:jsp-api:jar:2.1:runtime
[INFO] |  |  |  +- commons-configuration:commons-configuration:jar:1.6:compile
[INFO] |  |  |  |  \- commons-digester:commons-digester:jar:1.8:compile
[INFO] |  |  |  |     \- commons-beanutils:commons-beanutils:jar:1.7.0:compile
[INFO] |  |  |  +- com.google.code.gson:gson:jar:2.2.4:compile
[INFO] |  |  |  +- org.apache.hadoop:hadoop-auth:jar:2.7.4:compile
[INFO] |  |  |  |  +- org.apache.httpcomponents:httpclient:jar:4.2.5:compile
[INFO] |  |  |  |  |  \- org.apache.httpcomponents:httpcore:jar:4.2.4:compile
[INFO] |  |  |  |  \- org.apache.directory.server:apacheds-kerberos-codec:jar:2.0.0-M15:compile
[INFO] |  |  |  |     +- org.apache.directory.server:apacheds-i18n:jar:2.0.0-M15:compile
[INFO] |  |  |  |     +- org.apache.directory.api:api-asn1-api:jar:1.0.0-M20:compile
[INFO] |  |  |  |     \- org.apache.directory.api:api-util:jar:1.0.0-M20:compile
[INFO] |  |  |  +- org.apache.curator:curator-client:jar:2.7.1:compile
[INFO] |  |  |  \- org.apache.htrace:htrace-core:jar:3.1.0-incubating:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-hdfs:jar:2.7.4:compile
[INFO] |  |  |  +- org.mortbay.jetty:jetty-util:jar:6.1.26:compile
[INFO] |  |  |  \- xerces:xercesImpl:jar:2.9.1:compile
[INFO] |  |  |     \- xml-apis:xml-apis:jar:1.3.04:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile
[INFO] |  |  |  +- org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile
[INFO] |  |  |  |  +- org.apache.hadoop:hadoop-yarn-client:jar:2.7.4:compile
[INFO] |  |  |  |  \- org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile
[INFO] |  |  |  \- org.apache.hadoop:hadoop-mapreduce-client-shuffle:jar:2.7.4:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-yarn-api:jar:2.7.4:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.7.4:compile
[INFO] |  |  |  \- org.apache.hadoop:hadoop-yarn-common:jar:2.7.4:compile
[INFO] |  |  |     +- javax.xml.bind:jaxb-api:jar:2.2.2:compile
[INFO] |  |  |     |  \- javax.xml.stream:stax-api:jar:1.0-2:compile
[INFO] |  |  |     +- org.codehaus.jackson:jackson-jaxrs:jar:1.9.13:compile
[INFO] |  |  |     \- org.codehaus.jackson:jackson-xc:jar:1.9.13:compile
[INFO] |  |  +- org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.7.4:compile
[INFO] |  |  \- org.apache.hadoop:hadoop-annotations:jar:2.7.4:compile
[INFO] |  +- org.apache.spark:spark-launcher_2.12:jar:3.0.0:compile
[INFO] |  +- org.apache.spark:spark-kvstore_2.12:jar:3.0.0:compile
[INFO] |  |  +- org.fusesource.leveldbjni:leveldbjni-all:jar:1.8:compile
[INFO] |  |  +- com.fasterxml.jackson.core:jackson-core:jar:2.10.0:compile
[INFO] |  |  \- com.fasterxml.jackson.core:jackson-annotations:jar:2.10.0:compile
[INFO] |  +- org.apache.spark:spark-network-common_2.12:jar:3.0.0:compile
[INFO] |  +- org.apache.spark:spark-network-shuffle_2.12:jar:3.0.0:compile
[INFO] |  +- org.apache.spark:spark-unsafe_2.12:jar:3.0.0:compile
[INFO] |  +- javax.activation:activation:jar:1.1.1:compile
[INFO] |  +- org.apache.curator:curator-recipes:jar:2.7.1:compile
[INFO] |  |  +- org.apache.curator:curator-framework:jar:2.7.1:compile
[INFO] |  |  \- com.google.guava:guava:jar:16.0.1:compile
[INFO] |  +- org.apache.zookeeper:zookeeper:jar:3.4.14:compile
[INFO] |  |  \- org.apache.yetus:audience-annotations:jar:0.5.0:compile
[INFO] |  +- javax.servlet:javax.servlet-api:jar:3.1.0:compile
[INFO] |  +- org.apache.commons:commons-lang3:jar:3.9:compile
[INFO] |  +- org.apache.commons:commons-math3:jar:3.4.1:compile
[INFO] |  +- org.apache.commons:commons-text:jar:1.6:compile
[INFO] |  +- com.google.code.findbugs:jsr305:jar:3.0.0:compile
[INFO] |  +- org.slf4j:slf4j-api:jar:1.7.30:compile
[INFO] |  +- org.slf4j:jul-to-slf4j:jar:1.7.30:compile
[INFO] |  +- org.slf4j:jcl-over-slf4j:jar:1.7.30:compile
[INFO] |  +- log4j:log4j:jar:1.2.17:compile
[INFO] |  +- org.slf4j:slf4j-log4j12:jar:1.7.30:compile
[INFO] |  +- com.ning:compress-lzf:jar:1.0.3:compile
[INFO] |  +- org.xerial.snappy:snappy-java:jar:1.1.7.5:compile
[INFO] |  +- org.lz4:lz4-java:jar:1.7.1:compile
[INFO] |  +- com.github.luben:zstd-jni:jar:1.4.4-3:compile
[INFO] |  +- org.roaringbitmap:RoaringBitmap:jar:0.7.45:compile
[INFO] |  |  \- org.roaringbitmap:shims:jar:0.7.45:compile
[INFO] |  +- commons-net:commons-net:jar:3.1:compile
[INFO] |  +- org.scala-lang.modules:scala-xml_2.12:jar:1.2.0:compile
[INFO] |  +- org.scala-lang:scala-reflect:jar:2.12.10:compile
[INFO] |  +- org.json4s:json4s-jackson_2.12:jar:3.6.6:compile
[INFO] |  |  \- org.json4s:json4s-core_2.12:jar:3.6.6:compile
[INFO] |  |     +- org.json4s:json4s-ast_2.12:jar:3.6.6:compile
[INFO] |  |     \- org.json4s:json4s-scalap_2.12:jar:3.6.6:compile
[INFO] |  +- org.glassfish.jersey.core:jersey-client:jar:2.30:compile
[INFO] |  |  +- jakarta.ws.rs:jakarta.ws.rs-api:jar:2.1.6:compile
[INFO] |  |  \- org.glassfish.hk2.external:jakarta.inject:jar:2.6.1:compile
[INFO] |  +- org.glassfish.jersey.core:jersey-common:jar:2.30:compile
[INFO] |  |  +- jakarta.annotation:jakarta.annotation-api:jar:1.3.5:compile
[INFO] |  |  \- org.glassfish.hk2:osgi-resource-locator:jar:1.0.3:compile
[INFO] |  +- org.glassfish.jersey.core:jersey-server:jar:2.30:compile
[INFO] |  |  +- org.glassfish.jersey.media:jersey-media-jaxb:jar:2.30:compile
[INFO] |  |  \- jakarta.validation:jakarta.validation-api:jar:2.0.2:compile
[INFO] |  +- org.glassfish.jersey.containers:jersey-container-servlet:jar:2.30:compile
[INFO] |  +- org.glassfish.jersey.containers:jersey-container-servlet-core:jar:2.30:compile
[INFO] |  +- org.glassfish.jersey.inject:jersey-hk2:jar:2.30:compile
[INFO] |  |  +- org.glassfish.hk2:hk2-locator:jar:2.6.1:compile
[INFO] |  |  |  +- org.glassfish.hk2.external:aopalliance-repackaged:jar:2.6.1:compile
[INFO] |  |  |  +- org.glassfish.hk2:hk2-api:jar:2.6.1:compile
[INFO] |  |  |  \- org.glassfish.hk2:hk2-utils:jar:2.6.1:compile
[INFO] |  |  \- org.javassist:javassist:jar:3.25.0-GA:compile
[INFO] |  +- io.netty:netty-all:jar:4.1.47.Final:compile
[INFO] |  +- com.clearspring.analytics:stream:jar:2.9.6:compile
[INFO] |  +- io.dropwizard.metrics:metrics-core:jar:4.1.1:compile
[INFO] |  +- io.dropwizard.metrics:metrics-jvm:jar:4.1.1:compile
[INFO] |  +- io.dropwizard.metrics:metrics-json:jar:4.1.1:compile
[INFO] |  +- io.dropwizard.metrics:metrics-graphite:jar:4.1.1:compile
[INFO] |  +- io.dropwizard.metrics:metrics-jmx:jar:4.1.1:compile
[INFO] |  +- com.fasterxml.jackson.core:jackson-databind:jar:2.10.0:compile
[INFO] |  +- com.fasterxml.jackson.module:jackson-module-scala_2.12:jar:2.10.0:compile
[INFO] |  |  \- com.fasterxml.jackson.module:jackson-module-paranamer:jar:2.10.0:compile
[INFO] |  +- org.apache.ivy:ivy:jar:2.4.0:compile
[INFO] |  +- oro:oro:jar:2.0.8:compile
[INFO] |  +- net.razorvine:pyrolite:jar:4.30:compile
[INFO] |  +- net.sf.py4j:py4j:jar:0.10.9:compile
[INFO] |  +- org.apache.spark:spark-tags_2.12:jar:3.0.0:compile
[INFO] |  +- org.apache.commons:commons-crypto:jar:1.0.0:compile
[INFO] |  \- org.spark-project.spark:unused:jar:1.0.0:compile
[INFO] +- org.apache.spark:spark-mllib_2.12:jar:3.0.0:runtime
[INFO] |  +- org.scala-lang.modules:scala-parser-combinators_2.12:jar:1.1.2:compile
[INFO] |  +- org.apache.spark:spark-streaming_2.12:jar:3.0.0:runtime
[INFO] |  +- org.apache.spark:spark-graphx_2.12:jar:3.0.0:runtime
[INFO] |  |  +- com.github.fommil.netlib:core:jar:1.1.2:runtime
[INFO] |  |  \- net.sourceforge.f2j:arpack_combined_all:jar:0.1:runtime
[INFO] |  +- org.apache.spark:spark-mllib-local_2.12:jar:3.0.0:runtime
[INFO] |  +- org.scalanlp:breeze_2.12:jar:1.0:runtime
[INFO] |  |  +- org.scalanlp:breeze-macros_2.12:jar:1.0:runtime
[INFO] |  |  +- net.sf.opencsv:opencsv:jar:2.3:runtime
[INFO] |  |  +- com.github.wendykierp:JTransforms:jar:3.1:runtime
[INFO] |  |  |  \- pl.edu.icm:JLargeArrays:jar:1.5:runtime
[INFO] |  |  +- com.chuusai:shapeless_2.12:jar:2.3.3:runtime
[INFO] |  |  |  \- org.typelevel:macro-compat_2.12:jar:1.1.1:runtime
[INFO] |  |  +- org.typelevel:spire_2.12:jar:0.17.0-M1:runtime
[INFO] |  |  |  +- org.typelevel:spire-macros_2.12:jar:0.17.0-M1:runtime
[INFO] |  |  |  +- org.typelevel:spire-platform_2.12:jar:0.17.0-M1:runtime
[INFO] |  |  |  +- org.typelevel:spire-util_2.12:jar:0.17.0-M1:runtime
[INFO] |  |  |  +- org.typelevel:machinist_2.12:jar:0.6.8:runtime
[INFO] |  |  |  \- org.typelevel:algebra_2.12:jar:2.0.0-M2:runtime
[INFO] |  |  |     \- org.typelevel:cats-kernel_2.12:jar:2.0.0-M4:runtime
[INFO] |  |  \- org.scala-lang.modules:scala-collection-compat_2.12:jar:2.1.1:runtime
[INFO] |  \- org.glassfish.jaxb:jaxb-runtime:jar:2.3.2:runtime
[INFO] |     +- jakarta.xml.bind:jakarta.xml.bind-api:jar:2.3.2:runtime
[INFO] |     \- com.sun.istack:istack-commons-runtime:jar:3.0.8:runtime
[INFO] +- org.scala-lang:scala-library:jar:2.12.8:compile
[INFO] +- org.apache.spark:spark-sql_2.12:jar:3.0.0:compile
[INFO] |  +- com.univocity:univocity-parsers:jar:2.8.3:compile
[INFO] |  +- org.apache.spark:spark-sketch_2.12:jar:3.0.0:compile
[INFO] |  +- org.apache.spark:spark-catalyst_2.12:jar:3.0.0:compile
[INFO] |  |  +- org.codehaus.janino:janino:jar:3.0.16:compile
[INFO] |  |  +- org.codehaus.janino:commons-compiler:jar:3.0.16:compile
[INFO] |  |  +- org.antlr:antlr4-runtime:jar:4.7.1:compile
[INFO] |  |  \- org.apache.arrow:arrow-vector:jar:0.15.1:compile
[INFO] |  |     +- org.apache.arrow:arrow-format:jar:0.15.1:compile
[INFO] |  |     +- org.apache.arrow:arrow-memory:jar:0.15.1:compile
[INFO] |  |     \- com.google.flatbuffers:flatbuffers-java:jar:1.9.0:compile
[INFO] |  +- org.apache.orc:orc-core:jar:1.5.10:compile
[INFO] |  |  +- org.apache.orc:orc-shims:jar:1.5.10:compile
[INFO] |  |  +- com.google.protobuf:protobuf-java:jar:2.5.0:compile
[INFO] |  |  +- commons-lang:commons-lang:jar:2.6:compile
[INFO] |  |  +- io.airlift:aircompressor:jar:0.10:compile
[INFO] |  |  \- org.threeten:threeten-extra:jar:1.5.0:compile
[INFO] |  +- org.apache.orc:orc-mapreduce:jar:1.5.10:compile
[INFO] |  +- org.apache.hive:hive-storage-api:jar:2.7.1:compile
[INFO] |  +- org.apache.parquet:parquet-column:jar:1.10.1:compile
[INFO] |  |  +- org.apache.parquet:parquet-common:jar:1.10.1:compile
[INFO] |  |  \- org.apache.parquet:parquet-encoding:jar:1.10.1:compile
[INFO] |  \- org.apache.parquet:parquet-hadoop:jar:1.10.1:compile
[INFO] |     +- org.apache.parquet:parquet-format:jar:2.4.0:compile
[INFO] |     \- org.apache.parquet:parquet-jackson:jar:1.10.1:compile
[INFO] +- com.databricks:spark-xml_2.12:jar:0.9.0:compile
[INFO] |  +- commons-io:commons-io:jar:2.6:compile
[INFO] |  \- org.glassfish.jaxb:txw2:jar:2.3.2:compile
[INFO] +- org.apache.spark:spark-avro_2.12:jar:3.0.0:compile
[INFO] +- org.scalatest:scalatest_2.12:jar:3.0.5:test
[INFO] |  \- org.scalactic:scalactic_2.12:jar:3.0.5:test
[INFO] +- org.specs2:specs2-core_2.12:jar:4.2.0:test
[INFO] |  +- org.specs2:specs2-matcher_2.12:jar:4.2.0:test
[INFO] |  |  \- org.specs2:specs2-common_2.12:jar:4.2.0:test
[INFO] |  |     \- org.specs2:specs2-fp_2.12:jar:4.2.0:test
[INFO] |  \- org.scala-sbt:test-interface:jar:1.0:test
[INFO] +- org.specs2:specs2-junit_2.12:jar:4.2.0:test
[INFO] |  \- junit:junit:jar:4.12:test
[INFO] |     \- org.hamcrest:hamcrest-core:jar:1.3:test
[INFO] +- org.apache.logging.log4j:log4j-api:jar:2.13.3:compile
[INFO] +- org.apache.logging.log4j:log4j-core:jar:2.13.3:compile
[INFO] +- org.apache.logging.log4j:log4j-api-scala_2.12:jar:12.0:compile
[INFO] +- org.apache.logging.log4j:log4j-api:test-jar:tests:2.13.3:test
[INFO] \- org.apache.logging.log4j:log4j-to-slf4j:jar:2.13.3:compile
java scala apache-spark hadoop log4j
1个回答
0
投票

考虑到您的代码没有实现 ThreadLocal 标记来避免重入调用,堆栈溢出是正常的。 在内部,任何 HDFS 实现类都可以调用“log.info()”,因此创建一个递归 stackoverflow。

© www.soinside.com 2019 - 2024. All rights reserved.