import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import java.net.URI

fun main() {
val config = Configuration()
config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem")
val fs = FileSystem.get(URI("hdfs://host:8020"), config, "hdfs")

val files = fs.listStatus(Path("/hbase"))
val res = mutableMapOf<String, Long>()
var first = true
val dirs = listOf<String>()
files.forEach { dir ->
val tableName = dir.path.toString().split("/")[8]
if (!dirs.contains(tableName)) {
val files = fs.listFiles(Path(dir.path.toUri().path), true)
while (files.hasNext()) {
val fileStatus = files.next()
val blocks = fileStatus.blockLocations
blocks.forEach { blockLocation ->
val blockCount = blockLocation.storageTypes.count { it.name == "SSD" }
res[tableName] = (res[tableName] ?: 0) + (blockCount * blockLocation.length)

if (System.currentTimeMillis() / 1000 % 10 == 0L) {
if (first) {
println(res.map { "${it.key}:${it.value}:${it.value / 1024 / 1024 / 1024}GB" })
first = false
}
} else {
first = true
}
}
}
}
}
println("==done=======================================================")
res.forEach{
println("${it.key}:${it.value}:${it.value / 1024 / 1024 / 1024} GB")
}
}

点赞(0) 打赏

评论列表 共有 0 条评论

暂无评论

微信公众账号

微信扫一扫加关注

发表
评论
返回
顶部