Lines Matching refs:buffer

122 			   struct hmm_buffer *buffer,
129 cmd.addr = (__u64)buffer->ptr;
130 cmd.ptr = (__u64)buffer->mirror;
141 buffer->cpages = cmd.cpages;
142 buffer->faults = cmd.faults;
147 static void hmm_buffer_free(struct hmm_buffer *buffer)
149 if (buffer == NULL)
152 if (buffer->ptr)
153 munmap(buffer->ptr, buffer->size);
154 free(buffer->mirror);
155 free(buffer);
222 struct hmm_buffer *buffer;
234 buffer = malloc(sizeof(*buffer));
235 ASSERT_NE(buffer, NULL);
237 buffer->fd = -1;
238 buffer->size = size;
239 buffer->mirror = malloc(size);
240 ASSERT_NE(buffer->mirror, NULL);
242 buffer->ptr = mmap(NULL, size,
245 buffer->fd, 0);
246 ASSERT_NE(buffer->ptr, MAP_FAILED);
249 * Initialize buffer in system memory but leave the first two pages
253 for (ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
256 /* Set buffer permission to read-only. */
257 ret = mprotect(buffer->ptr, size, PROT_READ);
261 val = *(int *)(buffer->ptr + self->page_size);
265 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_READ, buffer, npages);
267 ASSERT_EQ(buffer->cpages, npages);
268 ASSERT_EQ(buffer->faults, 1);
271 ptr = buffer->mirror;
277 hmm_buffer_free(buffer);
286 struct hmm_buffer *buffer;
297 buffer = malloc(sizeof(*buffer));
298 ASSERT_NE(buffer, NULL);
300 buffer->fd = -1;
301 buffer->size = size;
302 buffer->mirror = malloc(size);
303 ASSERT_NE(buffer->mirror, NULL);
305 buffer->ptr = mmap(NULL, size,
308 buffer->fd, 0);
309 ASSERT_NE(buffer->ptr, MAP_FAILED);
311 /* Initialize buffer in system memory. */
312 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
315 /* Initialize mirror buffer so we can verify it isn't written. */
316 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
319 /* Protect buffer from reading. */
320 ret = mprotect(buffer->ptr, size, PROT_NONE);
324 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_READ, buffer, npages);
327 /* Allow CPU to read the buffer so we can check it. */
328 ret = mprotect(buffer->ptr, size, PROT_READ);
330 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
334 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
337 hmm_buffer_free(buffer);
345 struct hmm_buffer *buffer;
356 buffer = malloc(sizeof(*buffer));
357 ASSERT_NE(buffer, NULL);
359 buffer->fd = -1;
360 buffer->size = size;
361 buffer->mirror = malloc(size);
362 ASSERT_NE(buffer->mirror, NULL);
364 buffer->ptr = mmap(NULL, size,
367 buffer->fd, 0);
368 ASSERT_NE(buffer->ptr, MAP_FAILED);
370 /* Initialize data that the device will write to buffer->ptr. */
371 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
375 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_WRITE, buffer, npages);
377 ASSERT_EQ(buffer->cpages, npages);
378 ASSERT_EQ(buffer->faults, 1);
381 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
384 hmm_buffer_free(buffer);
393 struct hmm_buffer *buffer;
404 buffer = malloc(sizeof(*buffer));
405 ASSERT_NE(buffer, NULL);
407 buffer->fd = -1;
408 buffer->size = size;
409 buffer->mirror = malloc(size);
410 ASSERT_NE(buffer->mirror, NULL);
412 buffer->ptr = mmap(NULL, size,
415 buffer->fd, 0);
416 ASSERT_NE(buffer->ptr, MAP_FAILED);
419 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_READ, buffer, 1);
421 ASSERT_EQ(buffer->cpages, 1);
422 ASSERT_EQ(buffer->faults, 1);
424 /* Initialize data that the device will write to buffer->ptr. */
425 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
429 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_WRITE, buffer, npages);
433 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
437 ret = mprotect(buffer->ptr, size, PROT_WRITE | PROT_READ);
441 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_WRITE, buffer, npages);
443 ASSERT_EQ(buffer->cpages, npages);
444 ASSERT_EQ(buffer->faults, 1);
447 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
450 hmm_buffer_free(buffer);
459 struct hmm_buffer *buffer;
472 buffer = malloc(sizeof(*buffer));
473 ASSERT_NE(buffer, NULL);
475 buffer->fd = -1;
476 buffer->size = size;
477 buffer->mirror = malloc(size);
478 ASSERT_NE(buffer->mirror, NULL);
480 buffer->ptr = mmap(NULL, size,
483 buffer->fd, 0);
484 ASSERT_NE(buffer->ptr, MAP_FAILED);
486 /* Initialize buffer->ptr so we can tell if it is written. */
487 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
490 /* Initialize data that the device will write to buffer->ptr. */
491 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
501 /* Check that the parent's buffer did not change. */
502 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
508 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
510 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
518 ret = hmm_dmirror_cmd(child_fd, HMM_DMIRROR_WRITE, buffer, npages);
520 ASSERT_EQ(buffer->cpages, npages);
521 ASSERT_EQ(buffer->faults, 1);
524 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
537 struct hmm_buffer *buffer;
550 buffer = malloc(sizeof(*buffer));
551 ASSERT_NE(buffer, NULL);
553 buffer->fd = -1;
554 buffer->size = size;
555 buffer->mirror = malloc(size);
556 ASSERT_NE(buffer->mirror, NULL);
558 buffer->ptr = mmap(NULL, size,
561 buffer->fd, 0);
562 ASSERT_NE(buffer->ptr, MAP_FAILED);
564 /* Initialize buffer->ptr so we can tell if it is written. */
565 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
568 /* Initialize data that the device will write to buffer->ptr. */
569 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
579 /* Check that the parent's buffer did change. */
580 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
586 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
588 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
596 ret = hmm_dmirror_cmd(child_fd, HMM_DMIRROR_WRITE, buffer, npages);
598 ASSERT_EQ(buffer->cpages, npages);
599 ASSERT_EQ(buffer->faults, 1);
602 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
614 struct hmm_buffer *buffer;
625 buffer = malloc(sizeof(*buffer));
626 ASSERT_NE(buffer, NULL);
628 buffer->fd = -1;
629 buffer->size = size;
630 buffer->mirror = malloc(size);
631 ASSERT_NE(buffer->mirror, NULL);
633 buffer->ptr = mmap(NULL, size,
636 buffer->fd, 0);
637 ASSERT_NE(buffer->ptr, MAP_FAILED);
641 map = (void *)ALIGN((uintptr_t)buffer->ptr, size);
644 old_ptr = buffer->ptr;
645 buffer->ptr = map;
647 /* Initialize data that the device will write to buffer->ptr. */
648 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
652 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_WRITE, buffer, npages);
654 ASSERT_EQ(buffer->cpages, npages);
655 ASSERT_EQ(buffer->faults, 1);
658 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
661 buffer->ptr = old_ptr;
662 hmm_buffer_free(buffer);
670 struct hmm_buffer *buffer;
691 buffer = malloc(sizeof(*buffer));
692 ASSERT_NE(buffer, NULL);
694 buffer->ptr = get_hugepage_region(size, GHR_STRICT);
695 if (buffer->ptr == NULL) {
696 free(buffer);
700 buffer->fd = -1;
701 buffer->size = size;
702 buffer->mirror = malloc(size);
703 ASSERT_NE(buffer->mirror, NULL);
705 /* Initialize data that the device will write to buffer->ptr. */
706 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
710 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_WRITE, buffer, npages);
712 ASSERT_EQ(buffer->cpages, npages);
713 ASSERT_EQ(buffer->faults, 1);
716 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
719 free_hugepage_region(buffer->ptr);
720 buffer->ptr = NULL;
721 hmm_buffer_free(buffer);
729 struct hmm_buffer *buffer;
745 buffer = malloc(sizeof(*buffer));
746 ASSERT_NE(buffer, NULL);
748 buffer->fd = fd;
749 buffer->size = size;
750 buffer->mirror = malloc(size);
751 ASSERT_NE(buffer->mirror, NULL);
754 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
756 len = pwrite(fd, buffer->mirror, size, 0);
758 memset(buffer->mirror, 0, size);
760 buffer->ptr = mmap(NULL, size,
763 buffer->fd, 0);
764 ASSERT_NE(buffer->ptr, MAP_FAILED);
767 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_READ, buffer, npages);
769 ASSERT_EQ(buffer->cpages, npages);
770 ASSERT_EQ(buffer->faults, 1);
773 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
776 hmm_buffer_free(buffer);
784 struct hmm_buffer *buffer;
800 buffer = malloc(sizeof(*buffer));
801 ASSERT_NE(buffer, NULL);
803 buffer->fd = fd;
804 buffer->size = size;
805 buffer->mirror = malloc(size);
806 ASSERT_NE(buffer->mirror, NULL);
808 buffer->ptr = mmap(NULL, size,
811 buffer->fd, 0);
812 ASSERT_NE(buffer->ptr, MAP_FAILED);
814 /* Initialize data that the device will write to buffer->ptr. */
815 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
819 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_WRITE, buffer, npages);
821 ASSERT_EQ(buffer->cpages, npages);
822 ASSERT_EQ(buffer->faults, 1);
825 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
829 len = pread(fd, buffer->mirror, size, 0);
831 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
834 hmm_buffer_free(buffer);
842 struct hmm_buffer *buffer;
853 buffer = malloc(sizeof(*buffer));
854 ASSERT_NE(buffer, NULL);
856 buffer->fd = -1;
857 buffer->size = size;
858 buffer->mirror = malloc(size);
859 ASSERT_NE(buffer->mirror, NULL);
861 buffer->ptr = mmap(NULL, size,
864 buffer->fd, 0);
865 ASSERT_NE(buffer->ptr, MAP_FAILED);
867 /* Initialize buffer in system memory. */
868 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
872 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_MIGRATE, buffer, npages);
874 ASSERT_EQ(buffer->cpages, npages);
877 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
880 hmm_buffer_free(buffer);
890 struct hmm_buffer *buffer;
901 buffer = malloc(sizeof(*buffer));
902 ASSERT_NE(buffer, NULL);
904 buffer->fd = -1;
905 buffer->size = size;
906 buffer->mirror = malloc(size);
907 ASSERT_NE(buffer->mirror, NULL);
909 buffer->ptr = mmap(NULL, size,
912 buffer->fd, 0);
913 ASSERT_NE(buffer->ptr, MAP_FAILED);
915 /* Initialize buffer in system memory. */
916 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
920 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_MIGRATE, buffer, npages);
922 ASSERT_EQ(buffer->cpages, npages);
925 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
929 for (i = 0, ptr = buffer->ptr; i < size / (2 * sizeof(*ptr)); ++i)
933 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_MIGRATE, buffer, npages);
935 ASSERT_EQ(buffer->cpages, npages);
938 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
941 hmm_buffer_free(buffer);
949 struct hmm_buffer *buffer;
958 buffer = malloc(sizeof(*buffer));
959 ASSERT_NE(buffer, NULL);
961 buffer->fd = -1;
962 buffer->size = size;
963 buffer->mirror = malloc(size);
964 ASSERT_NE(buffer->mirror, NULL);
966 buffer->ptr = mmap(NULL, size,
969 buffer->fd, 0);
970 ASSERT_NE(buffer->ptr, MAP_FAILED);
973 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_MIGRATE, buffer, npages);
976 hmm_buffer_free(buffer);
984 struct hmm_buffer *buffer;
995 buffer = malloc(sizeof(*buffer));
996 ASSERT_NE(buffer, NULL);
998 buffer->fd = -1;
999 buffer->size = size;
1000 buffer->mirror = malloc(size);
1001 ASSERT_NE(buffer->mirror, NULL);
1004 buffer->ptr = mmap(NULL, size,
1007 buffer->fd, 0);
1008 ASSERT_NE(buffer->ptr, MAP_FAILED);
1009 p = buffer->ptr;
1012 ret = hmm_dmirror_cmd(self->fd1, HMM_DMIRROR_MIGRATE, buffer, npages);
1016 ret = munmap(buffer->ptr + self->page_size, self->page_size);
1020 ret = hmm_dmirror_cmd(self->fd1, HMM_DMIRROR_MIGRATE, buffer, 3);
1024 ret = mprotect(buffer->ptr + 2 * self->page_size, self->page_size,
1027 ptr = (int *)(buffer->ptr + 2 * self->page_size);
1032 ret = mprotect(buffer->ptr + 3 * self->page_size, self->page_size,
1035 ptr = (int *)(buffer->ptr + 3 * self->page_size);
1037 ret = mprotect(buffer->ptr + 3 * self->page_size, self->page_size,
1042 ret = mprotect(buffer->ptr + 4 * self->page_size, 2 * self->page_size,
1045 ptr = (int *)(buffer->ptr + 4 * self->page_size);
1047 ptr = (int *)(buffer->ptr + 5 * self->page_size);
1051 buffer->ptr = p + 2 * self->page_size;
1052 ret = hmm_dmirror_cmd(self->fd1, HMM_DMIRROR_MIGRATE, buffer, 4);
1054 ASSERT_EQ(buffer->cpages, 4);
1057 buffer->ptr = p + 5 * self->page_size;
1058 ret = hmm_dmirror_cmd(self->fd0, HMM_DMIRROR_MIGRATE, buffer, 1);
1060 buffer->ptr = p;
1062 buffer->ptr = p;
1063 hmm_buffer_free(buffer);
1072 struct hmm_buffer *buffer;
1085 buffer = malloc(sizeof(*buffer));
1086 ASSERT_NE(buffer, NULL);
1088 buffer->fd = -1;
1089 buffer->size = size;
1090 buffer->mirror = malloc(size);
1091 ASSERT_NE(buffer->mirror, NULL);
1093 buffer->ptr = mmap(NULL, size,
1096 buffer->fd, 0);
1097 ASSERT_NE(buffer->ptr, MAP_FAILED);
1099 /* Initialize buffer in system memory. */
1100 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
1104 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_MIGRATE, buffer,
1107 ASSERT_EQ(buffer->cpages, npages);
1110 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
1114 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
1117 hmm_buffer_free(buffer);
1126 struct hmm_buffer *buffer;
1139 buffer = malloc(sizeof(*buffer));
1140 ASSERT_NE(buffer, NULL);
1142 buffer->fd = -1;
1143 buffer->size = size;
1144 buffer->mirror = malloc(size);
1145 ASSERT_NE(buffer->mirror, NULL);
1147 buffer->ptr = mmap(NULL, size,
1150 buffer->fd, 0);
1151 ASSERT_NE(buffer->ptr, MAP_FAILED);
1153 /* Initialize buffer in system memory. */
1154 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
1158 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_READ, buffer,
1161 ASSERT_EQ(buffer->cpages, npages);
1162 ASSERT_EQ(buffer->faults, 1);
1165 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
1168 hmm_buffer_free(buffer);
1174 struct hmm_buffer *buffer = p;
1176 /* Delay for a bit and then unmap buffer while it is being read. */
1178 munmap(buffer->ptr + buffer->size / 2, buffer->size / 2);
1179 buffer->ptr = NULL;
1200 struct hmm_buffer *buffer;
1205 buffer = malloc(sizeof(*buffer));
1206 ASSERT_NE(buffer, NULL);
1208 buffer->fd = -1;
1209 buffer->size = size;
1210 buffer->mirror = malloc(size);
1211 ASSERT_NE(buffer->mirror, NULL);
1213 buffer->ptr = mmap(NULL, size,
1216 buffer->fd, 0);
1217 ASSERT_NE(buffer->ptr, MAP_FAILED);
1219 /* Initialize buffer in system memory. */
1220 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
1223 rc = pthread_create(&thread, NULL, unmap_buffer, buffer);
1227 rc = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_READ, buffer,
1230 ASSERT_EQ(buffer->cpages, npages);
1231 ASSERT_EQ(buffer->faults, 1);
1234 for (i = 0, ptr = buffer->mirror;
1241 hmm_buffer_free(buffer);
1250 struct hmm_buffer *buffer;
1259 buffer = malloc(sizeof(*buffer));
1260 ASSERT_NE(buffer, NULL);
1262 buffer->fd = -1;
1263 buffer->size = size;
1264 buffer->mirror = malloc(npages);
1265 ASSERT_NE(buffer->mirror, NULL);
1269 buffer->ptr = mmap(NULL, size,
1273 ASSERT_NE(buffer->ptr, MAP_FAILED);
1276 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_SNAPSHOT, buffer, npages);
1278 ASSERT_EQ(buffer->cpages, npages);
1281 m = buffer->mirror;
1284 hmm_buffer_free(buffer);
1292 struct hmm_buffer *buffer;
1304 buffer = malloc(sizeof(*buffer));
1305 ASSERT_NE(buffer, NULL);
1307 buffer->fd = -1;
1308 buffer->size = size;
1309 buffer->mirror = malloc(npages);
1310 ASSERT_NE(buffer->mirror, NULL);
1313 buffer->ptr = mmap(NULL, size,
1316 buffer->fd, 0);
1317 ASSERT_NE(buffer->ptr, MAP_FAILED);
1318 p = buffer->ptr;
1321 ret = munmap(buffer->ptr + self->page_size, self->page_size);
1325 ret = mprotect(buffer->ptr + 2 * self->page_size, self->page_size,
1328 ptr = (int *)(buffer->ptr + 2 * self->page_size);
1333 ret = mprotect(buffer->ptr + 3 * self->page_size, self->page_size,
1336 ptr = (int *)(buffer->ptr + 3 * self->page_size);
1338 ret = mprotect(buffer->ptr + 3 * self->page_size, self->page_size,
1343 ret = mprotect(buffer->ptr + 4 * self->page_size, 3 * self->page_size,
1346 ptr = (int *)(buffer->ptr + 4 * self->page_size);
1350 buffer->ptr = p + 5 * self->page_size;
1351 ret = hmm_dmirror_cmd(self->fd0, HMM_DMIRROR_MIGRATE, buffer, 1);
1353 ASSERT_EQ(buffer->cpages, 1);
1356 buffer->ptr = p + 6 * self->page_size;
1357 ret = hmm_dmirror_cmd(self->fd1, HMM_DMIRROR_MIGRATE, buffer, 1);
1359 ASSERT_EQ(buffer->cpages, 1);
1362 buffer->ptr = p;
1363 ret = hmm_dmirror_cmd(self->fd0, HMM_DMIRROR_SNAPSHOT, buffer, npages);
1365 ASSERT_EQ(buffer->cpages, npages);
1368 m = buffer->mirror;
1378 hmm_buffer_free(buffer);
1387 struct hmm_buffer *buffer;
1409 buffer = malloc(sizeof(*buffer));
1410 ASSERT_NE(buffer, NULL);
1412 buffer->ptr = get_hugepage_region(size, GHR_STRICT);
1413 if (buffer->ptr == NULL) {
1414 free(buffer);
1418 buffer->size = size;
1419 buffer->mirror = malloc(npages);
1420 ASSERT_NE(buffer->mirror, NULL);
1422 /* Initialize the pages the device will snapshot in buffer->ptr. */
1423 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
1427 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_SNAPSHOT, buffer, npages);
1429 ASSERT_EQ(buffer->cpages, npages);
1432 m = buffer->mirror;
1438 ret = mprotect(buffer->ptr, size, PROT_READ);
1442 ret = hmm_dmirror_cmd(self->fd, HMM_DMIRROR_SNAPSHOT, buffer, npages);
1444 ASSERT_EQ(buffer->cpages, npages);
1447 m = buffer->mirror;
1452 free_hugepage_region(buffer->ptr);
1453 buffer->ptr = NULL;
1454 hmm_buffer_free(buffer);
1462 struct hmm_buffer *buffer;
1472 buffer = malloc(sizeof(*buffer));
1473 ASSERT_NE(buffer, NULL);
1475 buffer->fd = -1;
1476 buffer->size = size;
1477 buffer->mirror = malloc(npages);
1478 ASSERT_NE(buffer->mirror, NULL);
1481 buffer->ptr = mmap(NULL, size,
1484 buffer->fd, 0);
1485 ASSERT_NE(buffer->ptr, MAP_FAILED);
1487 /* Initialize buffer in system memory. */
1488 for (i = 0, ptr = buffer->ptr; i < size / sizeof(*ptr); ++i)
1492 ret = mprotect(buffer->ptr, size, PROT_READ);
1496 ret = hmm_dmirror_cmd(self->fd0, HMM_DMIRROR_READ, buffer, npages);
1498 ASSERT_EQ(buffer->cpages, npages);
1499 ASSERT_EQ(buffer->faults, 1);
1502 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
1506 ret = hmm_dmirror_cmd(self->fd1, HMM_DMIRROR_READ, buffer, npages);
1508 ASSERT_EQ(buffer->cpages, npages);
1509 ASSERT_EQ(buffer->faults, 1);
1512 for (i = 0, ptr = buffer->mirror; i < size / sizeof(*ptr); ++i)
1516 ret = munmap(buffer->ptr + self->page_size, self->page_size);
1519 hmm_buffer_free(buffer);