import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import java.net.URI
fun main() {
val config = Configuration()
config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem")
val fs = FileSystem.get(URI("hdfs://host:8020"), config, "hdfs")
val files = fs.listStatus(Path("/hbase"))
val res = mutableMapOf<String, Long>()
var first = true
val dirs = listOf<String>()
files.forEach { dir ->
val tableName = dir.path.toString().split("/")[8]
if (!dirs.contains(tableName)) {
val files = fs.listFiles(Path(dir.path.toUri().path), true)
while (files.hasNext()) {
val fileStatus = files.next()
val blocks = fileStatus.blockLocations
blocks.forEach { blockLocation ->
val blockCount = blockLocation.storageTypes.count { it.name == "SSD" }
res[tableName] = (res[tableName] ?: 0) + (blockCount * blockLocation.length)
if (System.currentTimeMillis() / 1000 % 10 == 0L) {
if (first) {
println(res.map { "${it.key}:${it.value}:${it.value / 1024 / 1024 / 1024}GB" })
first = false
}
} else {
first = true
}
}
}
}
}
println("==done=======================================================")
res.forEach{
println("${it.key}:${it.value}:${it.value / 1024 / 1024 / 1024} GB")
}
}
本站资源均来自互联网,仅供研究学习,禁止违法使用和商用,产生法律纠纷本站概不负责!如果侵犯了您的权益请与我们联系!
转载请注明出处: 免费源码网-免费的源码资源网站 » hadoop 统计hdfs中ssd使用情况
发表评论 取消回复