Lines Matching refs:umem

38  *            Set the 'len' field of tx descriptors to an invalid value (umem frame
62 * - Each thread creates one AF_XDP socket connected to a unique umem for each
166 return !!ifobj->umem->umem;
174 static u64 umem_size(struct xsk_umem_info *umem)
176 return umem->num_frames * umem->frame_size;
179 static int xsk_configure_umem(struct ifobject *ifobj, struct xsk_umem_info *umem, void *buffer,
185 .frame_size = umem->frame_size,
186 .frame_headroom = umem->frame_headroom,
191 if (umem->unaligned_mode)
194 ret = xsk_umem__create(&umem->umem, buffer, size,
195 &umem->fq, &umem->cq, &cfg);
199 umem->buffer = buffer;
201 umem->base_addr = umem_size(umem);
202 umem->next_buffer = umem_size(umem);
208 static u64 umem_alloc_buffer(struct xsk_umem_info *umem)
212 addr = umem->next_buffer;
213 umem->next_buffer += umem->frame_size;
214 if (umem->next_buffer >= umem->base_addr + umem_size(umem))
215 umem->next_buffer = umem->base_addr;
220 static void umem_reset_alloc(struct xsk_umem_info *umem)
222 umem->next_buffer = 0;
245 static int __xsk_configure_socket(struct xsk_socket_info *xsk, struct xsk_umem_info *umem,
252 xsk->umem = umem;
263 return xsk_socket__create(&xsk->xsk, ifobject->ifindex, 0, umem->umem, rxr, txr, &cfg);
271 struct xsk_umem_info *umem;
280 umem = calloc(1, sizeof(struct xsk_umem_info));
281 if (!umem) {
285 umem->frame_size = XSK_UMEM__DEFAULT_FRAME_SIZE;
286 ret = xsk_configure_umem(ifobject, umem, bufs, umem_sz);
296 ret = __xsk_configure_socket(xsk, umem, ifobject, false);
303 munmap(umem->buffer, umem_sz);
304 xsk_umem__delete(umem->umem);
305 free(umem);
406 memset(ifobj->umem, 0, sizeof(*ifobj->umem));
407 ifobj->umem->num_frames = DEFAULT_UMEM_BUFFERS;
408 ifobj->umem->frame_size = XSK_UMEM__DEFAULT_FRAME_SIZE;
599 static void pkt_set(struct xsk_umem_info *umem, struct pkt *pkt, int offset, u32 len)
609 static u32 pkt_get_buffer_len(struct xsk_umem_info *umem, u32 len)
611 return ceil_u32(len, umem->frame_size) * umem->frame_size;
614 static struct pkt_stream *pkt_stream_generate(struct xsk_umem_info *umem, u32 nb_pkts, u32 pkt_len)
628 pkt_set(umem, pkt, 0, pkt_len);
635 static struct pkt_stream *pkt_stream_clone(struct xsk_umem_info *umem,
638 return pkt_stream_generate(umem, pkt_stream->nb_pkts, pkt_stream->pkts[0].len);
645 pkt_stream = pkt_stream_generate(test->ifobj_tx->umem, nb_pkts, pkt_len);
647 pkt_stream = pkt_stream_generate(test->ifobj_rx->umem, nb_pkts, pkt_len);
654 struct xsk_umem_info *umem = ifobj->umem;
658 pkt_stream = pkt_stream_clone(umem, ifobj->pkt_stream);
660 pkt_set(umem, &pkt_stream->pkts[i], offset, pkt_len);
673 struct xsk_umem_info *umem = test->ifobj_rx->umem;
677 test->ifobj_rx->pkt_stream = pkt_stream_generate(umem, pkt_stream->nb_pkts,
684 static u64 pkt_get_addr(struct pkt *pkt, struct xsk_umem_info *umem)
688 return pkt->offset + umem_alloc_buffer(umem);
699 void *data = xsk_umem__get_data(ifobject->umem->buffer, addr);
817 static bool is_offset_correct(struct xsk_umem_info *umem, struct pkt *pkt, u64 addr)
819 u32 headroom = umem->unaligned_mode ? 0 : umem->frame_headroom;
820 u32 offset = addr % umem->frame_size, expected_offset;
823 if (!umem->unaligned_mode)
826 expected_offset = (pkt_offset + headroom + XDP_PACKET_HEADROOM) % umem->frame_size;
849 static bool is_frag_valid(struct xsk_umem_info *umem, u64 addr, u32 len, u32 expected_pkt_nb,
853 void *data = xsk_umem__get_data(umem->buffer, addr);
855 addr -= umem->base_addr;
857 if (addr >= umem->num_frames * umem->frame_size ||
858 addr + len > umem->num_frames * umem->frame_size) {
862 if (!umem->unaligned_mode && addr % umem->frame_size + len > umem->frame_size) {
950 rcvd = xsk_ring_cons__peek(&xsk->umem->cq, batch_size, &idx);
953 u64 addr = *xsk_ring_cons__comp_addr(&xsk->umem->cq, idx + rcvd - 1);
960 xsk_ring_cons__release(&xsk->umem->cq, rcvd);
974 struct xsk_umem_info *umem = xsk->umem;
1019 ret = xsk_ring_prod__reserve(&umem->fq, rcvd, &idx_fq);
1023 if (xsk_ring_prod__needs_wakeup(&umem->fq)) {
1028 ret = xsk_ring_prod__reserve(&umem->fq, rcvd, &idx_fq);
1045 if (!is_frag_valid(umem, addr, desc->len, pkt->pkt_nb, pkt_len) ||
1046 !is_offset_correct(umem, pkt, addr) ||
1047 (ifobj->use_metadata && !is_metadata_correct(pkt, umem->buffer, addr)))
1055 *xsk_ring_prod__fill_addr(&umem->fq, idx_fq++) = orig;
1061 if (!is_pkt_valid(pkt, umem->buffer, first_addr, pkt_len) ||
1062 !is_offset_correct(umem, pkt, addr))
1076 xsk_ring_prod__cancel(&umem->fq, nb_frags);
1082 xsk_ring_prod__submit(&umem->fq, frags_processed);
1100 struct xsk_umem_info *umem = ifobject->umem;
1104 buffer_len = pkt_get_buffer_len(umem, pkt_stream->max_pkt_len);
1106 if (pkts_in_flight >= (int)((umem_size(umem) - BATCH_SIZE * buffer_len) / buffer_len)) {
1141 nb_frags = pkt_nb_frags(umem->frame_size, pkt_stream, pkt);
1152 tx_desc->addr = pkt_get_addr(pkt, ifobject->umem);
1157 tx_desc->len = umem->frame_size;
1352 struct xsk_umem_info *umem, bool tx)
1361 ret = __xsk_configure_socket(&ifobject->xsk_arr[i], umem,
1378 xsk_configure_socket(test, ifobject, test->ifobj_rx->umem, true);
1381 memcpy(ifobject->umem, test->ifobj_rx->umem, sizeof(struct xsk_umem_info));
1382 ifobject->umem->base_addr = 0;
1385 static void xsk_populate_fill_ring(struct xsk_umem_info *umem, struct pkt_stream *pkt_stream,
1388 u32 rx_frame_size = umem->frame_size - XDP_PACKET_HEADROOM;
1392 if (umem->num_frames < XSK_RING_PROD__DEFAULT_NUM_DESCS)
1393 buffers_to_fill = umem->num_frames;
1397 ret = xsk_ring_prod__reserve(&umem->fq, buffers_to_fill, &idx);
1410 addr = filled * umem->frame_size + umem->base_addr;
1412 addr = pkt->offset % umem->frame_size + umem_alloc_buffer(umem);
1414 addr = pkt->offset + umem_alloc_buffer(umem);
1417 *xsk_ring_prod__fill_addr(&umem->fq, idx++) = addr;
1422 xsk_ring_prod__submit(&umem->fq, filled);
1423 xsk_ring_prod__cancel(&umem->fq, buffers_to_fill - filled);
1426 umem_reset_alloc(umem);
1431 u64 umem_sz = ifobject->umem->num_frames * ifobject->umem->frame_size;
1437 if (ifobject->umem->unaligned_mode)
1447 ret = xsk_configure_umem(ifobject, ifobject->umem, bufs, umem_sz);
1451 xsk_configure_socket(test, ifobject, ifobject->umem, false);
1458 xsk_populate_fill_ring(ifobject->umem, ifobject->pkt_stream, ifobject->use_fill_ring);
1530 u64 umem_sz = ifobj->umem->num_frames * ifobj->umem->frame_size;
1536 xsk_umem__delete(ifobj->umem->umem);
1537 munmap(ifobj->umem->buffer, umem_sz);
1666 if ((ifobj_rx->umem->unaligned_mode && !ifobj_rx->unaligned_supp) ||
1667 (ifobj_tx->umem->unaligned_mode && !ifobj_tx->unaligned_supp)) {
1755 test->ifobj_rx->umem->frame_headroom = UMEM_HEADROOM_TEST_SIZE;
1768 test->ifobj_rx->umem->frame_headroom = test->ifobj_rx->umem->frame_size -
1787 test->ifobj_rx->pkt_stream = pkt_stream_generate(test->ifobj_rx->umem,
1800 test->ifobj_rx->pkt_stream = pkt_stream_generate(test->ifobj_rx->umem,
1811 test->ifobj_tx->umem->unaligned_mode = true;
1812 test->ifobj_rx->umem->unaligned_mode = true;
1823 test->ifobj_tx->umem->unaligned_mode = true;
1824 test->ifobj_rx->umem->unaligned_mode = true;
1848 struct xsk_umem_info *umem = test->ifobj_tx->umem;
1849 u64 umem_size = umem->num_frames * umem->frame_size;
1875 if (umem->unaligned_mode) {
1888 struct xsk_umem_info *umem = test->ifobj_tx->umem;
1889 u64 umem_size = umem->num_frames * umem->frame_size;
1895 /* Straddling the start of umem */
1899 /* Up to end of umem allowed */
1900 {umem_size - MIN_PKT_SIZE - 2 * umem->frame_size, MIN_PKT_SIZE, 0, true},
1901 /* After umem ends */
1903 /* Straddle the end of umem */
1912 if (umem->unaligned_mode) {
1916 if (umem->frame_size == XSK_UMEM__DEFAULT_FRAME_SIZE / 2) {
1972 /* create invalid frame by set umem frame_size and pkt length equal to 2048 */
1973 test->ifobj_tx->umem->frame_size = 2048;
2133 test->ifobj_tx->umem->frame_size = 2048;
2134 test->ifobj_rx->umem->frame_size = 2048;
2160 test->ifobj_tx->umem->frame_size = 2048;
2161 test->ifobj_rx->umem->frame_size = 2048;
2166 test->ifobj_tx->umem->unaligned_mode = true;
2167 test->ifobj_rx->umem->unaligned_mode = true;
2175 test->ifobj_tx->umem->frame_size = 4001;
2176 test->ifobj_rx->umem->frame_size = 4001;
2177 test->ifobj_tx->umem->unaligned_mode = true;
2178 test->ifobj_rx->umem->unaligned_mode = true;
2183 umem_size = test->ifobj_tx->umem->num_frames * test->ifobj_tx->umem->frame_size;
2195 test->ifobj_tx->umem->unaligned_mode = true;
2196 test->ifobj_rx->umem->unaligned_mode = true;
2245 ifobj->umem = calloc(1, sizeof(*ifobj->umem));
2246 if (!ifobj->umem)
2260 free(ifobj->umem);
2336 tx_pkt_stream_default = pkt_stream_generate(ifobj_tx->umem, DEFAULT_PKT_CNT, MIN_PKT_SIZE);
2337 rx_pkt_stream_default = pkt_stream_generate(ifobj_rx->umem, DEFAULT_PKT_CNT, MIN_PKT_SIZE);