Lines Matching refs:call

23 static void rxrpc_propose_ping(struct rxrpc_call *call,
28 !test_and_set_bit(RXRPC_CALL_EV_PING, &call->events))
29 rxrpc_queue_call(call);
34 if (time_before(ping_at, call->ping_at)) {
35 WRITE_ONCE(call->ping_at, ping_at);
36 rxrpc_reduce_call_timer(call, ping_at, now,
45 static void __rxrpc_propose_ACK(struct rxrpc_call *call, u8 ack_reason,
57 rxrpc_propose_ping(call, immediate, background);
66 call->ackr_reason, rxrpc_ack_priority[call->ackr_reason]);
67 if (ack_reason == call->ackr_reason) {
70 call->ackr_serial = serial;
74 } else if (prior > rxrpc_ack_priority[call->ackr_reason]) {
75 call->ackr_reason = ack_reason;
76 call->ackr_serial = serial;
104 if (test_bit(RXRPC_CALL_EV_ACK, &call->events)) {
107 _debug("immediate ACK %lx", call->events);
108 if (!test_and_set_bit(RXRPC_CALL_EV_ACK, &call->events) &&
110 rxrpc_queue_call(call);
114 if (call->peer->srtt_us != 0)
115 ack_at = usecs_to_jiffies(call->peer->srtt_us >> 3);
119 ack_at += READ_ONCE(call->tx_backoff);
121 if (time_before(ack_at, call->ack_at)) {
122 WRITE_ONCE(call->ack_at, ack_at);
123 rxrpc_reduce_call_timer(call, ack_at, now,
129 trace_rxrpc_propose_ack(call, why, ack_reason, serial, immediate,
134 * propose an ACK be sent, locking the call structure
136 void rxrpc_propose_ACK(struct rxrpc_call *call, u8 ack_reason,
140 spin_lock_bh(&call->lock);
141 __rxrpc_propose_ACK(call, ack_reason, serial,
143 spin_unlock_bh(&call->lock);
149 static void rxrpc_congestion_timeout(struct rxrpc_call *call)
151 set_bit(RXRPC_CALL_RETRANS_TIMEOUT, &call->flags);
157 static void rxrpc_resend(struct rxrpc_call *call, unsigned long now_j)
166 _enter("{%d,%d}", call->tx_hard_ack, call->tx_top);
169 max_age = ktime_sub_us(now, jiffies_to_usecs(call->peer->rto_j));
171 spin_lock_bh(&call->lock);
173 cursor = call->tx_hard_ack;
174 top = call->tx_top;
183 trace_rxrpc_resend(call, (cursor + 1) & RXRPC_RXTX_BUFF_MASK);
187 annotation = call->rxtx_annotations[ix];
193 skb = call->rxtx_buffer[ix];
207 call->rxtx_annotations[ix] = RXRPC_TX_ANNO_RETRANS | annotation;
209 trace_rxrpc_retransmit(call, seq, annotation | anno_type,
214 resend_at += jiffies + rxrpc_get_rto_backoff(call->peer, retrans);
215 WRITE_ONCE(call->resend_at, resend_at);
218 rxrpc_congestion_timeout(call);
225 rxrpc_reduce_call_timer(call, resend_at, now_j,
227 spin_unlock_bh(&call->lock);
228 ack_ts = ktime_sub(now, call->acks_latest_ts);
229 if (ktime_to_us(ack_ts) < (call->peer->srtt_us >> 3))
231 rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, false,
233 rxrpc_send_ack_packet(call, true, NULL);
244 annotation = call->rxtx_annotations[ix];
255 call->rxtx_annotations[ix] = annotation;
257 skb = call->rxtx_buffer[ix];
262 spin_unlock_bh(&call->lock);
264 if (rxrpc_send_data_packet(call, skb, true) < 0) {
269 if (rxrpc_is_client_call(call))
270 rxrpc_expose_client_call(call);
273 spin_lock_bh(&call->lock);
274 if (after(call->tx_hard_ack, seq))
275 seq = call->tx_hard_ack;
279 spin_unlock_bh(&call->lock);
289 struct rxrpc_call *call =
295 rxrpc_see_call(call);
299 call->debug_id, rxrpc_call_states[call->state], call->events);
307 if (test_and_clear_bit(RXRPC_CALL_EV_ABORT, &call->events)) {
308 rxrpc_send_abort_packet(call);
312 if (call->state == RXRPC_CALL_COMPLETE) {
313 rxrpc_delete_call_timer(call);
319 t = READ_ONCE(call->expect_rx_by);
321 trace_rxrpc_timer(call, rxrpc_timer_exp_normal, now);
322 set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
325 t = READ_ONCE(call->expect_req_by);
326 if (call->state == RXRPC_CALL_SERVER_RECV_REQUEST &&
328 trace_rxrpc_timer(call, rxrpc_timer_exp_idle, now);
329 set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
332 t = READ_ONCE(call->expect_term_by);
334 trace_rxrpc_timer(call, rxrpc_timer_exp_hard, now);
335 set_bit(RXRPC_CALL_EV_EXPIRED, &call->events);
338 t = READ_ONCE(call->ack_at);
340 trace_rxrpc_timer(call, rxrpc_timer_exp_ack, now);
341 cmpxchg(&call->ack_at, t, now + MAX_JIFFY_OFFSET);
342 set_bit(RXRPC_CALL_EV_ACK, &call->events);
345 t = READ_ONCE(call->ack_lost_at);
347 trace_rxrpc_timer(call, rxrpc_timer_exp_lost_ack, now);
348 cmpxchg(&call->ack_lost_at, t, now + MAX_JIFFY_OFFSET);
349 set_bit(RXRPC_CALL_EV_ACK_LOST, &call->events);
352 t = READ_ONCE(call->keepalive_at);
354 trace_rxrpc_timer(call, rxrpc_timer_exp_keepalive, now);
355 cmpxchg(&call->keepalive_at, t, now + MAX_JIFFY_OFFSET);
356 rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, true,
358 set_bit(RXRPC_CALL_EV_PING, &call->events);
361 t = READ_ONCE(call->ping_at);
363 trace_rxrpc_timer(call, rxrpc_timer_exp_ping, now);
364 cmpxchg(&call->ping_at, t, now + MAX_JIFFY_OFFSET);
365 set_bit(RXRPC_CALL_EV_PING, &call->events);
368 t = READ_ONCE(call->resend_at);
370 trace_rxrpc_timer(call, rxrpc_timer_exp_resend, now);
371 cmpxchg(&call->resend_at, t, now + MAX_JIFFY_OFFSET);
372 set_bit(RXRPC_CALL_EV_RESEND, &call->events);
376 if (test_and_clear_bit(RXRPC_CALL_EV_EXPIRED, &call->events)) {
377 if (test_bit(RXRPC_CALL_RX_HEARD, &call->flags) &&
378 (int)call->conn->hi_serial - (int)call->rx_serial > 0) {
379 trace_rxrpc_call_reset(call);
380 rxrpc_abort_call("EXP", call, 0, RX_CALL_DEAD, -ECONNRESET);
382 rxrpc_abort_call("EXP", call, 0, RX_CALL_TIMEOUT, -ETIME);
384 set_bit(RXRPC_CALL_EV_ABORT, &call->events);
389 if (test_and_clear_bit(RXRPC_CALL_EV_ACK_LOST, &call->events)) {
390 call->acks_lost_top = call->tx_top;
391 rxrpc_propose_ACK(call, RXRPC_ACK_PING, 0, true, false,
393 send_ack = &call->acks_lost_ping;
396 if (test_and_clear_bit(RXRPC_CALL_EV_ACK, &call->events) ||
398 if (call->ackr_reason) {
399 rxrpc_send_ack_packet(call, false, send_ack);
404 if (test_and_clear_bit(RXRPC_CALL_EV_PING, &call->events)) {
405 rxrpc_send_ack_packet(call, true, NULL);
409 if (test_and_clear_bit(RXRPC_CALL_EV_RESEND, &call->events) &&
410 call->state != RXRPC_CALL_CLIENT_RECV_REPLY) {
411 rxrpc_resend(call, now);
416 next = call->expect_rx_by;
420 set(call->expect_req_by);
421 set(call->expect_term_by);
422 set(call->ack_at);
423 set(call->ack_lost_at);
424 set(call->resend_at);
425 set(call->keepalive_at);
426 set(call->ping_at);
432 rxrpc_reduce_call_timer(call, next, now, rxrpc_timer_restart);
435 if (call->events && call->state < RXRPC_CALL_COMPLETE)
439 rxrpc_put_call(call, rxrpc_call_put);
445 __rxrpc_queue_call(call);