@@ -13,9 +13,11 @@ queue_config =
13
13
:table => 'test_stress' ,
14
14
:disable_resource_limit => true , # TODO backend-specific test cases
15
15
:cleanup_interval => 200 ,
16
+ alive_time : 60 ,
16
17
insert_processes : 0 ,
17
18
worker_processes : 0 ,
18
19
}
20
+ multiple_queues = nil
19
21
opt = OptionParser . new
20
22
21
23
opt . on ( '--url URL' , 'database url' ) { |v | queue_config [ :url ] = v }
@@ -25,29 +27,31 @@ opt.on('--cleanup_interval SECOND', Integer, 'cleanup interval') {|v| queue_conf
25
27
opt . on ( '--insert_processes NUM' , Integer , 'inserts' ) { |v | queue_config [ :insert_processes ] = v }
26
28
opt . on ( '--worker_processes NUM' , Integer , 'workers' ) { |v | queue_config [ :worker_processes ] = v }
27
29
opt . on ( '--retention-time SECOND' , Integer , 'retention time' ) { |v | queue_config [ :retention_time ] = v }
30
+ opt . on ( '--task-prefetch COUNT' , Integer , 'task prefetch' ) { |v | queue_config [ :task_prefetch ] = v }
31
+ opt . on ( '--multi-queues COUNT' , Integer , 'multiple queues; spwan NUM processes for each queue' ) { |v | multiple_queues = v }
28
32
opt . parse! ( ARGV )
29
33
30
34
module PerfectQueue
31
35
class Queue
32
- def submit1000 ( data )
33
- @client . submit1000 ( data )
36
+ def submit10000 ( data )
37
+ @client . submit10000 ( data )
34
38
end
35
39
end
36
40
class Client
37
- def submit1000 ( data )
38
- @backend . submit1000 ( data )
41
+ def submit10000 ( data )
42
+ @backend . submit10000 ( data )
39
43
end
40
44
end
41
45
module Backend
42
46
class RDBCompatBackend
43
- def submit1000 ( h )
47
+ def submit10000 ( h )
44
48
rd = Random . new
45
49
i = 0
46
50
connect {
47
51
begin
48
52
begin
49
53
n = Process . clock_gettime ( Process ::CLOCK_REALTIME , :second )
50
- submit0 ( "import.1/main.action_#{ n } _ #{ rd . hex ( 80 ) } " , 'user02' , h , now : n )
54
+ submit0 ( "import.1/main.action_#{ rd . hex ( 20 ) } " , 'user02' , h , now : n )
51
55
end while ( i +=1 ) < 10000
52
56
end
53
57
}
@@ -91,8 +95,9 @@ module PerfectQueue
91
95
end
92
96
93
97
94
- def insert1000 ( queue )
98
+ def insert10000 ( queue )
95
99
Process . setproctitle 'bin/stress/insert'
100
+ rd = Random . new
96
101
while 1
97
102
t0 = t = Process . clock_gettime ( Process ::CLOCK_REALTIME , :second )
98
103
h = { "path" :"in/1/main.action.e9f070b5bfea96442af13ce6acc36699_0f7ad8aee859867aae303190e372ec1e.msgpack.gz" ,
@@ -113,13 +118,13 @@ def insert1000(queue)
113
118
"params" :{ } } ,
114
119
"params" :{ } }
115
120
begin
116
- queue . submit1000 ( h )
121
+ queue . submit10000 ( h )
117
122
rescue Sequel ::DatabaseError
118
123
p $!
119
124
sleep 5
120
125
end
121
126
t = Process . clock_gettime ( Process ::CLOCK_REALTIME , :second )
122
- puts "#{ __method__ } #{ Process . pid } : #{ t -t0 } sec for 1000 inserts\n "
127
+ puts "#{ __method__ } #{ Process . pid } : #{ t -t0 } sec for 10000 inserts\n "
123
128
end
124
129
rescue Interrupt
125
130
exit
@@ -149,7 +154,7 @@ def insert(queue)
149
154
"params" :{ } }
150
155
begin
151
156
n = Process . clock_gettime ( Process ::CLOCK_REALTIME , :second )
152
- queue . submit ( "import.1/main.action_#{ n } _ #{ rd . hex ( 20 ) } " , 'user02' , h , now : t )
157
+ queue . submit ( "import.1/main.action_#{ rd . hex ( 20 ) } " , 'user02' , h , now : t )
153
158
rescue
154
159
p $!
155
160
sleep 1
@@ -164,9 +169,10 @@ def worker(queue)
164
169
i = 0
165
170
t0 = t = Process . clock_gettime ( Process ::CLOCK_REALTIME , :second )
166
171
begin
167
- ary = queue . poll_multi ( max_acquire : 11 , now : t )
168
- sleep 1
172
+ ary = queue . poll_multi ( max_acquire : queue . config [ :task_prefetch ] . to_i +1 , now : t )
169
173
ary . each do |x |
174
+ x . heartbeat!
175
+ sleep 2 +rand
170
176
x . finish! ( { } )
171
177
end if ary
172
178
t = Process . clock_gettime ( Process ::CLOCK_REALTIME , :second )
@@ -180,17 +186,29 @@ rescue Interrupt
180
186
exit
181
187
end
182
188
183
- pids = [ ]
184
- queue = PerfectQueue . open ( queue_config )
185
- #queue.client.init_database(:force => true)
186
- queue . config [ :insert_processes ] . times do
187
- pids << fork { insert1000 ( queue ) }
189
+ def fork_processes ( pids , queue_config )
190
+ queue = PerfectQueue . open ( queue_config )
191
+ #queue.client.init_database(:force => true)
192
+ queue . config [ :insert_processes ] . times do
193
+ pids << fork { insert10000 ( queue ) }
194
+ end
195
+ queue . config [ :worker_processes ] . times do
196
+ #queue.client.backend.instance_variable_set(:@cleanup_interval_count, rand(200))
197
+ pids << fork { worker ( queue ) }
198
+ end
199
+ queue . close
188
200
end
189
- queue . config [ :worker_processes ] . times do
190
- #queue.client.backend.instance_variable_set(:@cleanup_interval_count, rand(200))
191
- pids << fork { worker ( queue ) }
201
+
202
+ pids = [ ]
203
+ if multiple_queues
204
+ multiple_queues . times do |i |
205
+ config = queue_config . dup
206
+ config [ :table ] += i . to_s
207
+ fork_processes ( pids , config )
208
+ end
209
+ else
210
+ fork_processes ( pids , queue_config )
192
211
end
193
- queue . close
194
212
195
213
trap ( :INT ) do
196
214
pids . each do |pid |
0 commit comments