Lines Matching defs:par
275 struct uvesafb_par *par = info->par;
283 var->yres_virtual = (par->ypan) ?
315 static int uvesafb_vbe_find_mode(struct uvesafb_par *par,
320 for (i = 0; i < par->vbe_modes_cnt; i++) {
321 h = abs(par->vbe_modes[i].x_res - xres) +
322 abs(par->vbe_modes[i].y_res - yres) +
323 abs(depth - par->vbe_modes[i].depth);
332 if (h < d || (h == d && par->vbe_modes[i].depth > depth)) {
340 par->vbe_modes[match].depth != depth)
352 static u8 *uvesafb_vbe_state_save(struct uvesafb_par *par)
358 if (!par->vbe_state_size)
361 state = kmalloc(par->vbe_state_size, GFP_KERNEL);
375 task->t.buf_len = par->vbe_state_size;
390 static void uvesafb_vbe_state_restore(struct uvesafb_par *par, u8 *state_buf)
405 task->t.buf_len = par->vbe_state_size;
418 struct uvesafb_par *par)
425 task->buf = &par->vbe_ib;
426 memcpy(par->vbe_ib.vbe_signature, "VBE2", 4);
435 if (par->vbe_ib.vbe_version < 0x0200) {
440 if (!par->vbe_ib.mode_list_ptr) {
452 if (par->vbe_ib.oem_vendor_name_ptr)
454 ((char *)task->buf) + par->vbe_ib.oem_vendor_name_ptr);
456 if (par->vbe_ib.oem_product_name_ptr)
458 ((char *)task->buf) + par->vbe_ib.oem_product_name_ptr);
460 if (par->vbe_ib.oem_product_rev_ptr)
462 ((char *)task->buf) + par->vbe_ib.oem_product_rev_ptr);
464 if (par->vbe_ib.oem_string_ptr)
466 ((char *)task->buf) + par->vbe_ib.oem_string_ptr);
469 (par->vbe_ib.vbe_version & 0xff00) >> 8,
470 par->vbe_ib.vbe_version & 0xff);
476 struct uvesafb_par *par)
481 par->vbe_modes_cnt = 0;
484 mode = (u16 *) (((u8 *)&par->vbe_ib) + par->vbe_ib.mode_list_ptr);
486 par->vbe_modes_cnt++;
490 par->vbe_modes = kcalloc(par->vbe_modes_cnt,
493 if (!par->vbe_modes)
497 mode = (u16 *) (((u8 *)&par->vbe_ib) + par->vbe_ib.mode_list_ptr);
506 task->buf = par->vbe_modes + off;
513 par->vbe_modes_cnt--;
529 par->vbe_modes_cnt--;
543 if (par->vbe_modes_cnt > 0)
555 struct uvesafb_par *par)
567 par->pmi_setpal = par->ypan = 0;
569 par->pmi_base = (u16 *)phys_to_virt(((u32)task->t.regs.es << 4)
571 par->pmi_start = (u8 *)par->pmi_base + par->pmi_base[1];
572 par->pmi_pal = (u8 *)par->pmi_base + par->pmi_base[2];
576 par->pmi_start, par->pmi_pal);
578 if (par->pmi_base[3]) {
580 for (i = par->pmi_base[3]/2;
581 par->pmi_base[i] != 0xffff; i++)
582 pr_cont(" %x", par->pmi_base[i]);
585 if (par->pmi_base[i] != 0xffff) {
587 par->ypan = par->pmi_setpal = 0;
608 if (uvesafb_vbe_find_mode(info->par, mode->xres, mode->yres, 8,
617 struct uvesafb_par *par = info->par;
620 if (noedid || par->vbe_ib.vbe_version < 0x0300)
679 struct uvesafb_par *par = info->par;
691 par->nocrtc = 1;
711 par->nocrtc = 0;
723 for (i = 0; i < par->vbe_modes_cnt; i++) {
728 mode = &par->vbe_modes[i];
756 struct uvesafb_par *par)
776 par->vbe_state_size = 0;
780 par->vbe_state_size = 64 * (task->t.regs.ebx & 0xffff);
786 struct uvesafb_par *par = info->par;
793 err = uvesafb_vbe_getinfo(task, par);
797 err = uvesafb_vbe_getmodes(task, par);
801 par->nocrtc = nocrtc;
803 par->pmi_setpal = pmi_setpal;
804 par->ypan = ypan;
806 if (par->pmi_setpal || par->ypan) {
808 par->pmi_setpal = par->ypan = 0;
811 uvesafb_vbe_getpmi(task, par);
816 par->pmi_setpal = par->ypan = 0;
821 uvesafb_vbe_getstatesize(task, par);
832 struct uvesafb_par *par = info->par;
837 for (i = 0; i < par->vbe_modes_cnt; i++) {
838 if (par->vbe_modes[i].mode_id == vbemode) {
841 &par->vbe_modes[modeid]);
893 modeid = par->vbe_modes[0].mode_id;
895 &par->vbe_modes[modeid]);
904 modeid = uvesafb_vbe_find_mode(par, info->var.xres, info->var.yres,
910 uvesafb_setup_var(&info->var, info, &par->vbe_modes[modeid]);
917 if (par->vbe_ib.vbe_version < 0x0300 || par->nocrtc)
929 struct uvesafb_par *par = info->par;
930 int i = par->mode_idx;
943 if (i >= 0 && i < par->vbe_modes_cnt &&
944 par->vbe_modes[i].mode_attr & VBE_MODE_VGACOMPAT) {
953 else if (par->pmi_setpal) {
962 "S" (&par->pmi_pal)); /* ESI */
1084 struct uvesafb_par *par = info->par;
1092 if (par->pmi_start) {
1100 "D" (&par->pmi_start)); /* EDI */
1111 struct uvesafb_par *par = info->par;
1113 if (par->vbe_ib.capabilities & VBE_CAP_VGACOMPAT) {
1167 struct uvesafb_par *par = info->par;
1168 int cnt = atomic_read(&par->ref_count);
1171 if (!cnt && par->vbe_state_size) {
1172 buf = uvesafb_vbe_state_save(par);
1177 par->vbe_state_orig = buf;
1181 atomic_inc(&par->ref_count);
1188 struct uvesafb_par *par = info->par;
1189 int cnt = atomic_read(&par->ref_count);
1209 uvesafb_vbe_state_restore(par, par->vbe_state_orig);
1211 atomic_dec(&par->ref_count);
1218 struct uvesafb_par *par = info->par;
1228 i = uvesafb_vbe_find_mode(par, info->var.xres, info->var.yres, depth,
1231 mode = &par->vbe_modes[i];
1242 if (par->vbe_ib.vbe_version >= 0x0300 && !par->nocrtc &&
1271 memcpy(&par->crtc, crtc, sizeof(*crtc));
1273 memset(&par->crtc, 0, sizeof(*crtc));
1277 task->buf = &par->crtc;
1300 par->mode_idx = i;
1303 if (par->vbe_ib.capabilities & VBE_CAP_CAN_SWITCH_DAC &&
1333 struct uvesafb_par *par = info->par;
1342 if (par->vbe_ib.vbe_version < 0x0300) {
1368 struct uvesafb_par *par = info->par;
1381 match = uvesafb_vbe_find_mode(par, var->xres, var->yres, depth,
1386 mode = &par->vbe_modes[match];
1399 !(par->vbe_modes[match].mode_attr & 0x100))
1403 !(par->vbe_modes[match].mode_attr & 0x200))
1409 var->yres_virtual = (par->ypan) ?
1433 struct uvesafb_par *par = info->par;
1436 info->pseudo_palette = ((u8 *)info->par + sizeof(struct uvesafb_par));
1438 info->fix.ypanstep = par->ypan ? 1 : 0;
1439 info->fix.ywrapstep = (par->ypan > 1) ? 1 : 0;
1450 for (i = 0; i < par->vbe_modes_cnt; i++) {
1451 h = par->vbe_modes[i].bytes_per_scan_line *
1452 par->vbe_modes[i].y_res;
1470 size_total = par->vbe_ib.total_memory * 65536;
1499 if (par->ypan && info->var.yres_virtual > info->var.yres) {
1501 (par->ypan > 1) ? "ywrap" : "ypan",
1506 par->ypan = 0;
1509 info->flags = (par->ypan ? FBINFO_HWACCEL_YPAN : 0);
1511 if (!par->ypan)
1517 struct uvesafb_par *par = info->par;
1534 par->mtrr_handle = rc;
1547 struct uvesafb_par *par = info->par;
1549 return snprintf(buf, PAGE_SIZE, "%.4x\n", par->vbe_ib.vbe_version);
1558 struct uvesafb_par *par = info->par;
1561 for (i = 0; i < par->vbe_modes_cnt && ret < PAGE_SIZE; i++) {
1564 par->vbe_modes[i].x_res, par->vbe_modes[i].y_res,
1565 par->vbe_modes[i].depth, par->vbe_modes[i].mode_id);
1577 struct uvesafb_par *par = info->par;
1579 if (par->vbe_ib.oem_vendor_name_ptr)
1581 (&par->vbe_ib) + par->vbe_ib.oem_vendor_name_ptr);
1592 struct uvesafb_par *par = info->par;
1594 if (par->vbe_ib.oem_product_name_ptr)
1596 (&par->vbe_ib) + par->vbe_ib.oem_product_name_ptr);
1607 struct uvesafb_par *par = info->par;
1609 if (par->vbe_ib.oem_product_rev_ptr)
1611 (&par->vbe_ib) + par->vbe_ib.oem_product_rev_ptr);
1622 struct uvesafb_par *par = info->par;
1624 if (par->vbe_ib.oem_string_ptr)
1626 (char *)(&par->vbe_ib) + par->vbe_ib.oem_string_ptr);
1637 struct uvesafb_par *par = info->par;
1639 return sysfs_emit(buf, "%d\n", par->nocrtc);
1646 struct uvesafb_par *par = info->par;
1650 par->nocrtc = 0;
1652 par->nocrtc = 1;
1680 struct uvesafb_par *par;
1683 info = framebuffer_alloc(sizeof(*par) + sizeof(u32) * 256, &dev->dev);
1687 par = info->par;
1702 mode = &par->vbe_modes[i];
1746 info->fix.smem_len / 1024, par->vbe_ib.total_memory * 64);
1758 arch_phys_wc_del(par->mtrr_handle);
1768 kfree(par->vbe_modes);
1777 struct uvesafb_par *par = info->par;
1783 arch_phys_wc_del(par->mtrr_handle);
1788 kfree(par->vbe_modes);
1789 kfree(par->vbe_state_orig);
1790 kfree(par->vbe_state_saved);