blob: 92e6499fa873879794e894f8a87ad175f86e84ff (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
|
SIZE_LIST="1048576 1000 12345 0"
LAST_BRICK=$(($DISPERSE - 1))
CHUNK_SIZE=512
function fragment_size
{
local fragments=$(($DISPERSE - $REDUNDANCY))
local block_size=$(($CHUNK_SIZE * $fragments))
local size=$(($1 + $block_size - 1))
echo $((( $size - ( $size ) % $block_size ) / $fragments))
}
cleanup
tmp=`mktemp -d -t ${0##*/}.XXXXXX`
if [ ! -d $tmp ]; then
exit 1
fi
TEST glusterd
TEST pidof glusterd
TEST $CLI volume create $V0 redundancy $REDUNDANCY $H0:$B0/${V0}{0..$LAST_BRICK}
TEST $CLI volume start $V0
TEST $GFS --volfile-id=/$V0 --volfile-server=$H0 $M0;
TEST dd if=/dev/urandom of=$tmp/small bs=1024 count=1
TEST dd if=/dev/urandom of=$tmp/big bs=1024 count=4096
cs_small=$(sha1sum $tmp/small | awk '{ print $1 }')
cs_big=$(sha1sum $tmp/big | awk '{ print $1 }')
cp $tmp/small $tmp/small1
for size in $SIZE_LIST; do
truncate -s $size $tmp/small1
eval cs_small_truncate[$size]=$(sha1sum $tmp/small1 | awk '{ print $1 }')
done
cp $tmp/big $tmp/big1
for size in $SIZE_LIST; do
truncate -s $size $tmp/big1
eval cs_big_truncate[$size]=$(sha1sum $tmp/big1 | awk '{ print $1 }')
done
TEST df -h
TEST stat $M0
for idx in `seq 0 $LAST_BRICK`; do
brick[$idx]=$(gf_get_gfid_backend_file_path $B0/$V0$idx)
done
cd $M0
TEST stat .
TEST mkdir dir1
TEST [ -d dir1 ]
TEST touch file1
TEST [ -f file1 ]
for dir in . dir1; do
TEST cp $tmp/small $dir/small
TEST [ -f $dir/small ]
fsize=$(fragment_size 1024)
EXPECT "1024" stat -c "%s" $dir/small
for idx in `seq 0 $LAST_BRICK`; do
EXPECT "$fsize" stat -c "%s" ${brick[$idx]}/$dir/small
done
EXPECT "$cs_small" echo $(sha1sum $dir/small | awk '{ print $1 }')
TEST cp $tmp/big $dir/big
TEST [ -f $dir/big ]
fsize=$(fragment_size 4194304)
EXPECT "4194304" stat -c "%s" $dir/big
for idx in `seq 0 $LAST_BRICK`; do
EXPECT "$fsize" stat -c "%s" ${brick[$idx]}/$dir/big
done
EXPECT "$cs_big" echo $(sha1sum $dir/big | awk '{ print $1 }')
for idx in `seq 0 $LAST_BRICK`; do
TEST kill_brick $V0 $H0 $B0/$V0$idx
EXPECT "1024" stat -c "%s" $dir/small
EXPECT "4194304" stat -c "%s" $dir/big
EXPECT "$cs_small" echo $(sha1sum $dir/small | awk '{ print $1 }')
EXPECT "$cs_big" echo $(sha1sum $dir/big | awk '{ print $1 }')
cd
EXPECT_WITHIN $UMOUNT_TIMEOUT "Y" force_umount $M0
TEST $CLI volume stop $V0 force
TEST $CLI volume start $V0
TEST $GFS --volfile-id=/$V0 --volfile-server=$H0 $M0;
cd $M0
done
for size in $SIZE_LIST; do
TEST truncate -s $size $dir/small
TEST [ -f $dir/small ]
fsize=$(fragment_size $size)
EXPECT "$size" stat -c "%s" $dir/small
for idx in `seq 0 $LAST_BRICK`; do
EXPECT "$fsize" stat -c "%s" ${brick[$idx]}/$dir/small
done
EXPECT "${cs_small_truncate[$size]}" echo $(sha1sum $dir/small | awk '{ print $1 }')
TEST truncate -s $size $dir/big
TEST [ -f $dir/big ]
EXPECT "$size" stat -c "%s" $dir/big
for idx in `seq 0 $LAST_BRICK`; do
EXPECT "$fsize" stat -c "%s" ${brick[$idx]}/$dir/big
done
EXPECT "${cs_big_truncate[$size]}" echo $(sha1sum $dir/big | awk '{ print $1 }')
done
TEST rm -f $dir/small
TEST [ ! -e $dir/small ]
for idx in `seq 0 $LAST_BRICK`; do
TEST [ ! -e ${brick[$idx]}/$dir/small ]
done
TEST rm -f $dir/big
TEST [ ! -e $dir/big ]
for idx in `seq 0 $LAST_BRICK`; do
TEST [ ! -e ${brick[$idx]}/$dir/big ]
done
done
TEST rmdir dir1
TEST [ ! -e dir1 ]
for idx in `seq 0 $LAST_BRICK`; do
TEST [ ! -e ${brick[$idx]}/dir1 ]
done
TEST rm -f file1
TEST [ ! -e file1 ]
for idx in `seq 0 $LAST_BRICK`; do
TEST [ ! -e ${brick[$idx]}/file1 ]
done
rm -rf $tmp
cleanup
|