延庆+山东项目

This commit is contained in:
zeaslity
2024-11-20 16:04:33 +08:00
parent 8c34f9018c
commit a0792c50cb
59 changed files with 16411 additions and 209 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,66 @@
#!/bin/bash
# 设置测试目录
TEST_DIR="/var/lib/docker/"
# 设置大文件和小文件的大小
LARGE_FILE_SIZE=3G # 1GB
SMALL_FILE_SIZE=10M # 1MB
LARGE_FILE="${TEST_DIR}/large_test_file"
SMALL_FILE="${TEST_DIR}/small_test_file"
# 创建测试目录,如果不存在
mkdir -p "$TEST_DIR"
# 测试大文件读写速度
echo "开始大文件读写测试..."
# 写入大文件并测量时间
start_time=$(date +%s.%N)
dd if=/dev/zero of="$LARGE_FILE" bs=1M count=1024 conv=fdatasync
end_time=$(date +%s.%N)
write_time=$(echo "$end_time - $start_time" | bc)
write_speed=$(echo "scale=2; 1024 / $write_time" | bc)
echo "大文件写入速度: ${write_speed} MB/s"
# 读取大文件并测量时间
start_time=$(date +%s.%N)
dd if="$LARGE_FILE" of=/dev/null bs=1M
end_time=$(date +%s.%N)
read_time=$(echo "$end_time - $start_time" | bc)
read_speed=$(echo "scale=2; 1024 / $read_time" | bc)
echo "大文件读取速度: ${read_speed} MB/s"
# 清理大文件
rm -f "$LARGE_FILE"
# 测试小文件读写速度
echo "开始小文件读写测试..."
# 写入小文件并测量时间
start_time=$(date +%s.%N)
for i in $(seq 1 3); do
dd if=/dev/zero of="${TEST_DIR}/small_test_file_$i" bs=1M count=1 conv=fdatasync
done
end_time=$(date +%s.%N)
write_time=$(echo "$end_time - $start_time" | bc)
write_speed=$(echo "scale=2; 1024 / $write_time" | bc)
echo "小文件写入速度: ${write_speed} MB/s"
# 读取小文件并测量时间
start_time=$(date +%s.%N)
for i in $(seq 1 3); do
dd if="${TEST_DIR}/small_test_file_$i" of=/dev/null bs=1M
done
end_time=$(date +%s.%N)
read_time=$(echo "$end_time - $start_time" | bc)
read_speed=$(echo "scale=2; 1024 / $read_time" | bc)
echo "小文件读取速度: ${read_speed} MB/s"
# 清理小文件
rm -f "${TEST_DIR}/small_test_file_"*
echo "测试完成!"

View File

@@ -0,0 +1,67 @@
import os
import time
def write_large_file(file_path, size_mb):
"""写入大文件"""
with open(file_path, 'wb') as f:
f.write(os.urandom(size_mb * 1024 * 1024)) # 写入指定大小的随机数据
def read_large_file(file_path):
"""读取大文件"""
with open(file_path, 'rb') as f:
f.read() # 读取文件内容
def write_small_files(dir_path, num_files, file_size):
"""写入小文件"""
for i in range(num_files):
file_path = os.path.join(dir_path, f'small_file_{i}.txt')
with open(file_path, 'wb') as f:
f.write(os.urandom(file_size)) # 写入指定大小的随机数据
def read_small_files(dir_path, num_files):
"""读取小文件"""
for i in range(num_files):
file_path = os.path.join(dir_path, f'small_file_{i}.txt')
with open(file_path, 'rb') as f:
f.read() # 读取文件内容
def measure_io(test_type, *args):
"""测量IO性能"""
start_time = time.time()
if test_type == 'large_write':
write_large_file(*args)
elif test_type == 'large_read':
read_large_file(*args)
elif test_type == 'small_write':
write_small_files(*args)
elif test_type == 'small_read':
read_small_files(*args)
end_time = time.time()
return end_time - start_time
def main():
dir_path = '/var/lib/docker/' # 修改为你的测试目录
large_file_size_mb = 10240 # 大文件大小MB
small_file_size = 1024 # 小文件大小(字节)
num_small_files = 100 # 小文件数量
# 大文件测试
print("开始大文件写入测试...")
large_write_time = measure_io('large_write', os.path.join(dir_path, 'large_file.bin'), large_file_size_mb)
print(f"大文件写入时间: {large_write_time:.2f}")
print("开始大文件读取测试...")
large_read_time = measure_io('large_read', os.path.join(dir_path, 'large_file.bin'))
print(f"大文件读取时间: {large_read_time:.2f}")
# 小文件测试
print("开始小文件写入测试...")
small_write_time = measure_io('small_write', dir_path, num_small_files, small_file_size)
print(f"小文件写入时间: {small_write_time:.2f}")
print("开始小文件读取测试...")
small_read_time = measure_io('small_read', dir_path, num_small_files)
print(f"小文件读取时间: {small_read_time:.2f}")
if __name__ == '__main__':
main()