Skip to content

Commit a6c8816

Browse files
committed
[query] use log4j-api-scala for string interpolation
1 parent 6fdd548 commit a6c8816

File tree

82 files changed

+350
-403
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

82 files changed

+350
-403
lines changed

hail/build.mill

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,13 +52,15 @@ object Deps {
5252
def core: Task[Dep] = Task.Anon(mvn"org.apache.spark::spark-core:${Env.sparkVersion()}")
5353
def mllib: Task[Dep] = Task.Anon(mvn"org.apache.spark::spark-mllib:${Env.sparkVersion()}")
5454
}
55-
55+
5656
object Log4j {
5757
// have to forceVersion for reasons unknown?
5858
val bom = mvn"org.apache.logging.log4j:log4j-bom:2.25.2".forceVersion()
5959
val api = mvn"org.apache.logging.log4j:log4j-api"
6060
val core = mvn"org.apache.logging.log4j:log4j-core"
6161
val slf4j = mvn"org.apache.logging.log4j:log4j-slf4j2-impl"
62+
63+
val scala = mvn"org.apache.logging.log4j::log4j-api-scala:13.1.0"
6264
}
6365

6466
val samtools = mvn"com.github.samtools:htsjdk:3.0.5"
@@ -270,6 +272,7 @@ trait RootHailModule extends CrossScalaModule with HailModule { outer =>
270272
Deps.Asm.analysis,
271273
Deps.Asm.util,
272274
Deps.Log4j.core,
275+
Deps.Log4j.scala,
273276
Deps.samtools.excludeOrg("*"),
274277
Deps.jdistlib.excludeOrg("*"),
275278
Deps.freemarker,
@@ -306,7 +309,7 @@ trait RootHailModule extends CrossScalaModule with HailModule { outer =>
306309

307310
override def assemblyRules: Seq[Rule] = super.assemblyRules ++ Seq(
308311
Rule.Exclude("META-INF/INDEX.LIST"),
309-
Rule.ExcludePattern("scala/(?!collection/compat).*"),
312+
Rule.ExcludePattern("^scala/(?!collection/compat).*"),
310313
Rule.AppendPattern("META-INF/services/.*", "\n"),
311314
Rule.Relocate("breeze.**", "is.hail.relocated.@0"),
312315
Rule.Relocate("com.google.cloud.**", "is.hail.relocated.@0"),

hail/hail/src/is/hail/annotations/BroadcastValue.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import is.hail.expr.ir.defs.EncodedLiteral
66
import is.hail.io.{BufferSpec, Decoder, TypedCodecSpec}
77
import is.hail.types.physical.{PArray, PStruct, PType}
88
import is.hail.types.virtual.{TBaseStruct, TStruct}
9-
import is.hail.utils.{formatSpace, log, ArrayOfByteArrayOutputStream}
9+
import is.hail.utils.{formatSpace, ArrayOfByteArrayOutputStream, Logging}
1010
import is.hail.utils.prettyPrint.ArrayOfByteArrayInputStream
1111

1212
import java.io.InputStream
@@ -36,7 +36,7 @@ object BroadcastRow {
3636
}
3737
}
3838

39-
trait BroadcastRegionValue {
39+
trait BroadcastRegionValue extends Logging {
4040
def ctx: ExecuteContext
4141

4242
def value: RegionValue
@@ -72,7 +72,7 @@ trait BroadcastRegionValue {
7272
if (broadcasted == null) {
7373
val arrays = encodeToByteArrays(theHailClassLoader)
7474
val totalSize = arrays.map(_.length).sum
75-
log.info(
75+
logger.info(
7676
s"BroadcastRegionValue.broadcast: broadcasting ${arrays.length} byte arrays of total size $totalSize (${formatSpace(totalSize.toLong)}"
7777
)
7878
val srv = SerializableRegionValue(arrays, decodedPType, makeDec)

hail/hail/src/is/hail/annotations/Region.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ package is.hail.annotations
33
import is.hail.asm4s
44
import is.hail.asm4s.Code
55
import is.hail.types.physical._
6-
import is.hail.utils._
6+
import is.hail.utils.Logging
77

88
object Region {
99
type Size = Int
@@ -475,7 +475,7 @@ final class Region protected[annotations] (
475475
def totalManagedBytes(): Long = memory.totalManagedBytes()
476476
}
477477

478-
object RegionUtils {
478+
object RegionUtils extends Logging {
479479
def printAddr(off: Long, name: String): String = s"$name: ${"%016x".format(off)}"
480480

481481
def printAddr(off: Code[Long], name: String): Code[String] =
@@ -512,7 +512,7 @@ object RegionUtils {
512512

513513
val nReferenced = region.nReferencedRegions()
514514

515-
info(
515+
logger.info(
516516
s"""
517517
|$header:
518518
| block size: $size

hail/hail/src/is/hail/annotations/RegionPool.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ object RegionPool {
2222
}
2323

2424
final class RegionPool private (strictMemoryCheck: Boolean, threadName: String, threadID: Long)
25-
extends AutoCloseable {
26-
log.info(s"RegionPool: initialized for thread $threadID: $threadName")
25+
extends AutoCloseable with Logging {
26+
logger.info(s"initialized for thread $threadID: $threadName")
2727

2828
protected[annotations] val freeBlocks: Array[LongArrayBuilder] =
2929
Array.fill[LongArrayBuilder](4)(new LongArrayBuilder(8))
@@ -140,7 +140,7 @@ final class RegionPool private (strictMemoryCheck: Boolean, threadName: String,
140140

141141
val freeBlockCounts = freeBlocks.map(_.size)
142142
val usedBlockCounts = blocks.zip(freeBlockCounts).map { case (tot, free) => tot - free }
143-
info(
143+
logger.info(
144144
s"""Region count for $context
145145
| regions: $nRegions active, $nFree free
146146
| blocks: $nBlocks
@@ -152,15 +152,15 @@ final class RegionPool private (strictMemoryCheck: Boolean, threadName: String,
152152
def report(context: String): Unit = {
153153
val inBlocks = bytesInBlocks()
154154

155-
log.info(
155+
logger.info(
156156
s"RegionPool: $context: ${readableBytes(totalAllocatedBytes)} allocated (${readableBytes(inBlocks)} blocks / " +
157157
s"${readableBytes(totalAllocatedBytes - inBlocks)} chunks), regions.size = ${regions.size}, " +
158158
s"$numJavaObjects current java objects, thread $threadID: $threadName"
159159
)
160-
// log.info("-----------STACK_TRACES---------")
160+
// logger.info("-----------STACK_TRACES---------")
161161
// val stacks: String = regions.result().toIndexedSeq.flatMap(r => r.stackTrace.map((r.getTotalChunkMemory(), _))).foldLeft("")((a: String, b) => a + "\n" + b.toString())
162-
// log.info(stacks)
163-
// log.info("---------------END--------------")
162+
// logger.info(stacks)
163+
// logger.info("---------------END--------------")
164164
}
165165

166166
def scopedRegion[T](f: Region => T): T = using(Region(pool = this))(f)
@@ -202,7 +202,7 @@ final class RegionPool private (strictMemoryCheck: Boolean, threadName: String,
202202
if (strictMemoryCheck)
203203
fatal(msg)
204204
else
205-
warn(msg)
205+
logger.warn(msg)
206206
}
207207
}
208208
}

hail/hail/src/is/hail/asm4s/ClassBuilder.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ case class StaticField[T] private (lf: lir.StaticField) extends AnyVal {
6666
}
6767
}
6868

69-
class ClassesBytes(classesBytes: Array[(String, Array[Byte])]) extends Serializable {
69+
class ClassesBytes(classesBytes: Array[(String, Array[Byte])]) extends Serializable with Logging {
7070
@transient @volatile var loaded: Boolean = false
7171

7272
def load(hcl: HailClassLoader): Unit = {
@@ -81,7 +81,7 @@ class ClassesBytes(classesBytes: Array[(String, Array[Byte])]) extends Serializa
8181
val buffer = new ByteArrayOutputStream()
8282
FunctionBuilder.bytesToBytecodeString(bytes, buffer)
8383
val classJVMByteCodeAsEscapedStr = buffer.toString(StandardCharsets.UTF_8.name())
84-
log.error(s"Failed to load bytecode $e:\n" + classJVMByteCodeAsEscapedStr)
84+
logger.error(s"Failed to load bytecode $e:\n" + classJVMByteCodeAsEscapedStr)
8585
throw e
8686
}
8787
}

hail/hail/src/is/hail/backend/BackendUtils.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ object BackendUtils {
1313

1414
class BackendUtils(
1515
mods: Array[(String, (HailClassLoader, FS, HailTaskContext, Region) => BackendUtils.F)]
16-
) {
16+
) extends Logging {
1717

1818
import BackendUtils.F
1919

@@ -46,7 +46,7 @@ class BackendUtils(
4646
): Array[Array[Byte]] = {
4747

4848
val cachedResults = ctx.executionCache.lookup(semhash)
49-
log.info(s"$stageName: found ${cachedResults.length} entries for $semhash.")
49+
logger.info(s"$stageName: found ${cachedResults.length} entries for $semhash.")
5050

5151
val todo =
5252
contexts
@@ -65,7 +65,7 @@ class BackendUtils(
6565
val results = merge[(Array[Byte], Int)](cachedResults, successes, _._2 < _._2)
6666

6767
ctx.executionCache.put(semhash, results)
68-
log.info(s"$stageName: cached ${results.length} entries for $semhash.")
68+
logger.info(s"$stageName: cached ${results.length} entries for $semhash.")
6969

7070
failureOpt.foreach(throw _)
7171
Array.tabulate[Array[Byte]](results.length)(results(_)._1)
@@ -98,7 +98,7 @@ class BackendUtils(
9898

9999
val elapsed = System.nanoTime() - start
100100
val nTasks = partitions.map(_.length).getOrElse(contexts.length)
101-
log.info(s"$stageName: executed $nTasks tasks in ${formatTime(elapsed)}")
101+
logger.info(s"$stageName: executed $nTasks tasks in ${formatTime(elapsed)}")
102102

103103
r
104104
}

hail/hail/src/is/hail/backend/ExecutionCache.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ private case class FSExecutionCache(fs: FS, cacheDir: String) extends ExecutionC
7575
IndexedSeq.empty
7676

7777
case NonFatal(t) =>
78-
log.warn(s"Failed to read cache entry for $s", t)
78+
logger.warn(s"Failed to read cache entry for $s", t)
7979
IndexedSeq.empty
8080
}
8181

hail/hail/src/is/hail/backend/HailTaskContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ class TaskFinalizer {
1818
(0 until closeables.size).foreach(i => closeables(i).close())
1919
}
2020

21-
abstract class HailTaskContext extends AutoCloseable {
21+
abstract class HailTaskContext extends AutoCloseable with Logging {
2222
def stageId(): Int
2323

2424
def partitionId(): Int
@@ -44,7 +44,7 @@ abstract class HailTaskContext extends AutoCloseable {
4444
}
4545

4646
def close(): Unit = {
47-
log.info(
47+
logger.info(
4848
s"TaskReport: stage=${stageId()}, partition=${partitionId()}, attempt=${attemptNumber()}, " +
4949
s"peakBytes=${thePool.getHighestTotalUsage}, peakBytesReadable=${formatSpace(thePool.getHighestTotalUsage)}, " +
5050
s"chunks requested=${thePool.getUsage._1}, cache hits=${thePool.getUsage._2}"

hail/hail/src/is/hail/backend/driver/BatchQueryDriver.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,13 +71,13 @@ object BatchQueryDriver extends HttpLikeRpc with Logging {
7171
val (shortMessage, expandedMessage, errorId) =
7272
t match {
7373
case t: HailWorkerException =>
74-
log.error(
74+
logger.error(
7575
"A worker failed. The exception was written for Python but we will also throw an exception to fail this driver job.",
7676
t,
7777
)
7878
(t.shortMessage, t.expandedMessage, t.errorId)
7979
case _ =>
80-
log.error(
80+
logger.error(
8181
"An exception occurred in the driver. The exception was written for Python but we will re-throw to fail this driver job.",
8282
t,
8383
)
@@ -134,7 +134,7 @@ object BatchQueryDriver extends HttpLikeRpc with Logging {
134134
val inputURL = argv(5)
135135
val outputURL = argv(6)
136136

137-
log.info(f"${getClass.getName} $HAIL_PRETTY_VERSION")
137+
logger.info(f"${getClass.getName} $HAIL_PRETTY_VERSION")
138138

139139
sys.env.get("HAIL_SSL_CONFIG_DIR").foreach(tls.setSSLConfigFromDir)
140140

hail/hail/src/is/hail/backend/driver/Py4JQueryDriver.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ import org.json4s._
3636
import org.json4s.jackson.{JsonMethods, Serialization}
3737
import sourcecode.Enclosing
3838

39-
final class Py4JQueryDriver(backend: Backend) extends Closeable {
39+
final class Py4JQueryDriver(backend: Backend) extends Closeable with Logging {
4040

4141
private[this] val flags: HailFeatureFlags = HailFeatureFlags.fromEnv()
4242
private[this] val hcl = new HailClassLoader(getClass.getClassLoader)
@@ -77,7 +77,7 @@ final class Py4JQueryDriver(backend: Backend) extends Closeable {
7777
localTmpdir = tmp
7878
backend match {
7979
case s: SparkBackend if tmp != "file://" + s.sc.getConf.get("spark.local.dir", "") =>
80-
log.warn(
80+
logger.warn(
8181
"Cannot modify Spark's local directory at runtime. " +
8282
"Please stop and re-initialize hail with 'spark.local.dir' " +
8383
"in your Spark configuration."
@@ -234,7 +234,7 @@ final class Py4JQueryDriver(backend: Backend) extends Closeable {
234234
def pyReadMultipleMatrixTables(jsonQuery: String): util.List[MatrixIR] =
235235
withExecuteContext(selfContainedExecution = false) { ctx =>
236236
implicit val fmts: Formats = DefaultFormats
237-
log.info("pyReadMultipleMatrixTables: got query")
237+
logger.info("pyReadMultipleMatrixTables: got query")
238238

239239
val kvs = JsonMethods.parse(jsonQuery).extract[Map[String, JValue]]
240240
val paths = kvs("paths").extract[IndexedSeq[String]]
@@ -246,12 +246,12 @@ final class Py4JQueryDriver(backend: Backend) extends Closeable {
246246
val opts = NativeReaderOptions(intervalObjects, intervalPointType)
247247
val matrixReaders: util.List[MatrixIR] =
248248
paths.map { p =>
249-
log.info(s"creating MatrixRead node for $p")
249+
logger.info(s"creating MatrixRead node for $p")
250250
val mnr = MatrixNativeReader(ctx.fs, p, Some(opts))
251251
MatrixRead(mnr.fullMatrixTypeWithoutUIDs, false, false, mnr): MatrixIR
252252
}.asJava
253253

254-
log.info("pyReadMultipleMatrixTables: returning N matrix tables")
254+
logger.info("pyReadMultipleMatrixTables: returning N matrix tables")
255255
matrixReaders
256256
}._1
257257

@@ -293,10 +293,10 @@ final class Py4JQueryDriver(backend: Backend) extends Closeable {
293293

294294
def pyGrepPrint(regex: String, files: Seq[String], maxLines: Int): Unit =
295295
fileAndLineCounts(regex, files, maxLines).foreach { case (file, lines) =>
296-
info(s"$file: ${lines.length} ${plural(lines.length, "match", "matches")}:")
296+
logger.info(s"$file: ${lines.length} ${plural(lines.length, "match", "matches")}:")
297297
lines.map(_.value).foreach { line =>
298298
val (screen, logged) = line.truncatable().strings
299-
log.info("\t" + logged)
299+
logger.info("\t" + logged)
300300
println(s"\t$screen")
301301
}
302302
}

0 commit comments

Comments
 (0)