Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ MIT License
------------
org.slf4j:slf4j-api:2.0.9
com.bugsnag:bugsnag:3.7.2
com.github.oshi:6.4.0


EPL 1.0
Expand Down
1 change: 1 addition & 0 deletions dependencies.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"com.github.ben-manes.caffeine:caffeine",
"com.github.luben:zstd-jni",
"com.github.moquette-io.moquette:moquette-broker",
"com.github.oshi:oshi-core",
"com.github.stephenc.jcip:jcip-annotations",
"com.github.wendykierp:JTransforms",
"com.google.code.findbugs:jsr305",
Expand Down
4 changes: 4 additions & 0 deletions iotdb-core/datanode/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,10 @@
<version>1.3.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;

import java.io.BufferedReader;
import java.io.File;
Expand All @@ -52,7 +53,9 @@ public SystemRelatedFileMetrics() {

@Override
public void bindTo(AbstractMetricService metricService) {
if ((CONFIG.getSystemType() == SystemType.LINUX || CONFIG.getSystemType() == SystemType.MAC)
if ((CONFIG.getSystemType() == SystemType.LINUX
|| CONFIG.getSystemType() == SystemType.MAC
|| CONFIG.getSystemType() == SystemType.WINDOWS)
&& !CONFIG.getPid().isEmpty()) {
this.getOpenFileNumberCommand =
new String[] {"/bin/sh", "-c", String.format("lsof -p %s | wc -l", CONFIG.getPid())};
Expand Down Expand Up @@ -88,6 +91,9 @@ private long getOpenFileHandlersNumber() {
}
}
fdCount = Long.parseLong(result.toString().trim());
} else if (CONFIG.getSystemType() == SystemType.WINDOWS) {
SystemInfo systemInfo = new SystemInfo();
return systemInfo.getOperatingSystem().getCurrentProcess().getOpenFiles();
}
} catch (IOException e) {
LOGGER.warn("Failed to get open file number, because ", e);
Expand Down
4 changes: 4 additions & 0 deletions iotdb-core/metrics/interface/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,10 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iotdb.metrics.metricsets.disk;

import java.util.HashMap;
import java.util.Map;
import java.util.Set;

public abstract class AbstractDiskMetricsManager implements IDiskMetricsManager {

public static final double BYTES_PER_KB = 1024.0;
// Disk IO status structure
protected Map<String, Long> lastReadOperationCountForDisk;
protected Map<String, Long> lastWriteOperationCountForDisk;
protected Map<String, Long> lastReadSizeForDisk;
protected Map<String, Long> lastWriteSizeForDisk;
protected Map<String, Long> lastReadTimeCostForDisk;
protected Map<String, Long> lastWriteTimeCostForDisk;
protected Map<String, Long> lastMergedReadCountForDisk;
protected Map<String, Long> lastMergedWriteCountForDisk;
protected Map<String, Long> lastReadSectorCountForDisk;
protected Map<String, Long> lastWriteSectorCountForDisk;
protected Map<String, Long> lastIoBusyTimeForDisk;
protected Map<String, Long> lastTimeInQueueForDisk;
protected Map<String, Long> incrementReadOperationCountForDisk;
protected Map<String, Long> incrementWriteOperationCountForDisk;
protected Map<String, Long> incrementReadSizeForDisk;
protected Map<String, Long> incrementWriteSizeForDisk;
protected Map<String, Long> incrementMergedReadOperationCountForDisk;
protected Map<String, Long> incrementMergedWriteOperationCountForDisk;
protected Map<String, Long> incrementReadTimeCostForDisk;
protected Map<String, Long> incrementWriteTimeCostForDisk;
protected Map<String, Long> incrementReadSectorCountForDisk;
protected Map<String, Long> incrementWriteSectorCountForDisk;
protected Map<String, Long> incrementIoBusyTimeForDisk;
protected Map<String, Long> incrementTimeInQueueForDisk;
protected long lastUpdateTime = 0L;
protected long updateInterval = 1L;
protected Set<String> diskIdSet;

public AbstractDiskMetricsManager() {}

protected void init() {
collectDiskId();
lastReadOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastReadSizeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteSizeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastReadTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastMergedReadCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastMergedWriteCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastReadSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastIoBusyTimeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastTimeInQueueForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadSizeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteSizeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementMergedReadOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementMergedWriteOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementIoBusyTimeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementTimeInQueueForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
}

protected void checkUpdate() {
if (System.currentTimeMillis() - lastUpdateTime
> IDiskMetricsManager.UPDATE_SMALLEST_INTERVAL) {
updateInfo();
}
}

@Override
public Map<String, Double> getIoUtilsPercentage() {
checkUpdate();
Map<String, Double> utilsMap = new HashMap<>(incrementIoBusyTimeForDisk.size());
for (Map.Entry<String, Long> entry : incrementIoBusyTimeForDisk.entrySet()) {
utilsMap.put(entry.getKey(), ((double) entry.getValue()) / updateInterval);
}
return utilsMap;
}

protected void updateInfo() {
long currentTime = System.currentTimeMillis();
updateInterval = currentTime - lastUpdateTime;
lastUpdateTime = currentTime;
}

protected void updateSingleDiskInfo(
String diskId, long currentValue, Map<String, Long> lastMap, Map<String, Long> incrementMap) {
if (incrementMap != null) {
long lastValue = lastMap.getOrDefault(diskId, 0L);
if (lastValue != 0) {
incrementMap.put(diskId, currentValue - lastValue);
} else {
incrementMap.put(diskId, 0L);
}
}
lastMap.put(diskId, currentValue);
}

@Override
public Set<String> getDiskIds() {
return diskIdSet;
}

protected abstract void collectDiskId();
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
import java.util.Set;

public interface IDiskMetricsManager {

long UPDATE_SMALLEST_INTERVAL = 10000L;

default Map<String, Double> getReadDataSizeForDisk() {
return Collections.emptyMap();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import java.util.Map;
import java.util.Objects;
import java.util.Scanner;
import java.util.Set;
import java.util.stream.Collectors;

/**
Expand All @@ -49,7 +48,7 @@
* system call count, write system call count, byte attempt to read, byte attempt to write,
* cancelled write byte.
*/
public class LinuxDiskMetricsManager implements IDiskMetricsManager {
public class LinuxDiskMetricsManager extends AbstractDiskMetricsManager {
private static final Logger LOGGER = LoggerFactory.getLogger(LinuxDiskMetricsManager.class);

@SuppressWarnings("squid:S1075")
Expand All @@ -74,34 +73,7 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
private static final int DISK_IO_TOTAL_TIME_OFFSET = 13;
private static final int DISK_TIME_IN_QUEUE_OFFSET = 14;
private static final int DEFAULT_SECTOR_SIZE = 512;
private static final double BYTES_PER_KB = 1024.0;
private static final long UPDATE_SMALLEST_INTERVAL = 10000L;
private Set<String> diskIdSet;
private final Map<String, Integer> diskSectorSizeMap;
private long lastUpdateTime = 0L;
private long updateInterval = 1L;

// Disk IO status structure
private final Map<String, Long> lastReadOperationCountForDisk;
private final Map<String, Long> lastWriteOperationCountForDisk;
private final Map<String, Long> lastReadTimeCostForDisk;
private final Map<String, Long> lastWriteTimeCostForDisk;
private final Map<String, Long> lastMergedReadCountForDisk;
private final Map<String, Long> lastMergedWriteCountForDisk;
private final Map<String, Long> lastReadSectorCountForDisk;
private final Map<String, Long> lastWriteSectorCountForDisk;
private final Map<String, Long> lastIoBusyTimeForDisk;
private final Map<String, Long> lastTimeInQueueForDisk;
private final Map<String, Long> incrementReadOperationCountForDisk;
private final Map<String, Long> incrementWriteOperationCountForDisk;
private final Map<String, Long> incrementMergedReadOperationCountForDisk;
private final Map<String, Long> incrementMergedWriteOperationCountForDisk;
private final Map<String, Long> incrementReadTimeCostForDisk;
private final Map<String, Long> incrementWriteTimeCostForDisk;
private final Map<String, Long> incrementReadSectorCountForDisk;
private final Map<String, Long> incrementWriteSectorCountForDisk;
private final Map<String, Long> incrementIoBusyTimeForDisk;
private final Map<String, Long> incrementTimeInQueueForDisk;

// Process IO status structure
private long lastReallyReadSizeForProcess = 0L;
Expand All @@ -112,33 +84,13 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
private long lastWriteOpsCountForProcess = 0L;

public LinuxDiskMetricsManager() {
init();
processIoStatusPath =
String.format(
"/proc/%s/io", MetricConfigDescriptor.getInstance().getMetricConfig().getPid());
collectDiskId();
// leave one entry to avoid hashmap resizing
diskSectorSizeMap = new HashMap<>(diskIdSet.size() + 1, 1);
collectDiskInfo();
lastReadOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastReadTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastMergedReadCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastMergedWriteCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastReadSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastWriteSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastIoBusyTimeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
lastTimeInQueueForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementMergedReadOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementMergedWriteOperationCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteTimeCostForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementReadSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementWriteSectorCountForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementIoBusyTimeForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
incrementTimeInQueueForDisk = new HashMap<>(diskIdSet.size() + 1, 1);
}

@Override
Expand Down Expand Up @@ -184,15 +136,6 @@ public Map<String, Long> getWriteCostTimeForDisk() {
return lastWriteTimeCostForDisk;
}

@Override
public Map<String, Double> getIoUtilsPercentage() {
Map<String, Double> utilsMap = new HashMap<>(diskIdSet.size());
for (Map.Entry<String, Long> entry : incrementIoBusyTimeForDisk.entrySet()) {
utilsMap.put(entry.getKey(), ((double) entry.getValue()) / updateInterval);
}
return utilsMap;
}

@Override
public Map<String, Double> getAvgReadCostTimeOfEachOpsForDisk() {
Map<String, Double> avgReadTimeCostMap = new HashMap<>(diskIdSet.size());
Expand Down Expand Up @@ -318,11 +261,7 @@ public double getAttemptWriteSizeForProcess() {
}

@Override
public Set<String> getDiskIds() {
return diskIdSet;
}

private void collectDiskId() {
protected void collectDiskId() {
File diskIdFolder = new File(DISK_ID_PATH);
if (!diskIdFolder.exists()) {
return;
Expand Down Expand Up @@ -355,10 +294,9 @@ private void collectDiskInfo() {
}
}

private void updateInfo() {
long currentTime = System.currentTimeMillis();
updateInterval = currentTime - lastUpdateTime;
lastUpdateTime = currentTime;
@Override
protected void updateInfo() {
super.updateInfo();
updateDiskInfo();
updateProcessInfo();
}
Expand Down Expand Up @@ -430,15 +368,7 @@ private void updateSingleDiskInfo(
Map<String, Long> lastMap,
Map<String, Long> incrementMap) {
long currentValue = Long.parseLong(diskInfo[offset]);
if (incrementMap != null) {
long lastValue = lastMap.getOrDefault(diskId, 0L);
if (lastValue != 0) {
incrementMap.put(diskId, currentValue - lastValue);
} else {
incrementMap.put(diskId, 0L);
}
}
lastMap.put(diskId, currentValue);
updateSingleDiskInfo(diskId, currentValue, lastMap, incrementMap);
}

private void updateProcessInfo() {
Expand Down Expand Up @@ -469,10 +399,4 @@ private void updateProcessInfo() {
LOGGER.error("Meets error while updating process io info", e);
}
}

private void checkUpdate() {
if (System.currentTimeMillis() - lastUpdateTime > UPDATE_SMALLEST_INTERVAL) {
updateInfo();
}
}
}
Loading
Loading