Lines Matching defs:ufs
21 #include <ufs/ufshcd.h>
23 #include <ufs/ufshci.h>
24 #include <ufs/unipro.h>
26 #include "ufs-exynos.h"
155 static void exynos_ufs_auto_ctrl_hcc(struct exynos_ufs *ufs, bool en);
156 static void exynos_ufs_ctrl_clkstop(struct exynos_ufs *ufs, bool en);
158 static inline void exynos_ufs_enable_auto_ctrl_hcc(struct exynos_ufs *ufs)
160 exynos_ufs_auto_ctrl_hcc(ufs, true);
163 static inline void exynos_ufs_disable_auto_ctrl_hcc(struct exynos_ufs *ufs)
165 exynos_ufs_auto_ctrl_hcc(ufs, false);
169 struct exynos_ufs *ufs, u32 *val)
171 *val = hci_readl(ufs, HCI_MISC);
172 exynos_ufs_auto_ctrl_hcc(ufs, false);
176 struct exynos_ufs *ufs, u32 *val)
178 hci_writel(ufs, *val, HCI_MISC);
181 static inline void exynos_ufs_gate_clks(struct exynos_ufs *ufs)
183 exynos_ufs_ctrl_clkstop(ufs, true);
186 static inline void exynos_ufs_ungate_clks(struct exynos_ufs *ufs)
188 exynos_ufs_ctrl_clkstop(ufs, false);
191 static int exynos7_ufs_drv_init(struct device *dev, struct exynos_ufs *ufs)
196 static int exynosauto_ufs_drv_init(struct device *dev, struct exynos_ufs *ufs)
198 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
201 if (ufs->sysreg) {
202 return regmap_update_bits(ufs->sysreg,
203 ufs->shareability_reg_offset,
212 static int exynosauto_ufs_post_hce_enable(struct exynos_ufs *ufs)
214 struct ufs_hba *hba = ufs->hba;
219 hci_writel(ufs, ALLOW_TRANS_VH_DEFAULT, HCI_MH_ALLOWABLE_TRAN_OF_VH);
221 hci_writel(ufs, 0x1, HCI_MH_IID_IN_TASK_TAG);
226 static int exynosauto_ufs_pre_link(struct exynos_ufs *ufs)
228 struct ufs_hba *hba = ufs->hba;
232 rx_line_reset_period = (RX_LINE_RESET_TIME * ufs->mclk_rate) / NSEC_PER_MSEC;
233 tx_line_reset_period = (TX_LINE_RESET_TIME * ufs->mclk_rate) / NSEC_PER_MSEC;
236 for_each_ufs_rx_lane(ufs, i) {
238 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
253 for_each_ufs_tx_lane(ufs, i) {
255 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
280 static int exynosauto_ufs_pre_pwr_change(struct exynos_ufs *ufs,
283 struct ufs_hba *hba = ufs->hba;
293 static int exynosauto_ufs_post_pwr_change(struct exynos_ufs *ufs,
296 struct ufs_hba *hba = ufs->hba;
307 static int exynos7_ufs_pre_link(struct exynos_ufs *ufs)
309 struct ufs_hba *hba = ufs->hba;
310 u32 val = ufs->drv_data->uic_attr->pa_dbg_option_suite;
314 for_each_ufs_tx_lane(ufs, i)
316 for_each_ufs_rx_lane(ufs, i) {
322 for_each_ufs_tx_lane(ufs, i)
337 static int exynos7_ufs_post_link(struct exynos_ufs *ufs)
339 struct ufs_hba *hba = ufs->hba;
343 for_each_ufs_tx_lane(ufs, i) {
347 TX_LINERESET_N(exynos_ufs_calc_time_cntr(ufs, 200000)));
358 static int exynos7_ufs_pre_pwr_change(struct exynos_ufs *ufs,
361 unipro_writel(ufs, 0x22, UNIPRO_DBG_FORCE_DME_CTRL_STATE);
366 static int exynos7_ufs_post_pwr_change(struct exynos_ufs *ufs,
369 struct ufs_hba *hba = ufs->hba;
389 static void exynos_ufs_auto_ctrl_hcc(struct exynos_ufs *ufs, bool en)
391 u32 misc = hci_readl(ufs, HCI_MISC);
394 hci_writel(ufs, misc | HCI_CORECLK_CTRL_EN, HCI_MISC);
396 hci_writel(ufs, misc & ~HCI_CORECLK_CTRL_EN, HCI_MISC);
399 static void exynos_ufs_ctrl_clkstop(struct exynos_ufs *ufs, bool en)
401 u32 ctrl = hci_readl(ufs, HCI_CLKSTOP_CTRL);
402 u32 misc = hci_readl(ufs, HCI_MISC);
405 hci_writel(ufs, misc | CLK_CTRL_EN_MASK, HCI_MISC);
406 hci_writel(ufs, ctrl | CLK_STOP_MASK, HCI_CLKSTOP_CTRL);
408 hci_writel(ufs, ctrl & ~CLK_STOP_MASK, HCI_CLKSTOP_CTRL);
409 hci_writel(ufs, misc & ~CLK_CTRL_EN_MASK, HCI_MISC);
413 static int exynos_ufs_get_clk_info(struct exynos_ufs *ufs)
415 struct ufs_hba *hba = ufs->hba;
429 ufs->clk_hci_core = clki->clk;
431 ufs->clk_unipro_main = clki->clk;
435 if (!ufs->clk_hci_core || !ufs->clk_unipro_main) {
441 ufs->mclk_rate = clk_get_rate(ufs->clk_unipro_main);
442 pclk_rate = clk_get_rate(ufs->clk_hci_core);
443 f_min = ufs->pclk_avail_min;
444 f_max = ufs->pclk_avail_max;
446 if (ufs->opts & EXYNOS_UFS_OPT_HAS_APB_CLK_CTRL) {
462 ufs->pclk_rate = pclk_rate;
463 ufs->pclk_div = div;
469 static void exynos_ufs_set_unipro_pclk_div(struct exynos_ufs *ufs)
471 if (ufs->opts & EXYNOS_UFS_OPT_HAS_APB_CLK_CTRL) {
474 val = hci_readl(ufs, HCI_UNIPRO_APB_CLK_CTRL);
475 hci_writel(ufs, UNIPRO_APB_CLK(val, ufs->pclk_div),
480 static void exynos_ufs_set_pwm_clk_div(struct exynos_ufs *ufs)
482 struct ufs_hba *hba = ufs->hba;
483 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
489 static void exynos_ufs_calc_pwm_clk_div(struct exynos_ufs *ufs)
491 struct ufs_hba *hba = ufs->hba;
492 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
500 clk_period = UNIPRO_PCLK_PERIOD(ufs);
520 long exynos_ufs_calc_time_cntr(struct exynos_ufs *ufs, long period)
523 long pclk_rate = ufs->pclk_rate;
526 clk_period = UNIPRO_PCLK_PERIOD(ufs);
532 static void exynos_ufs_specify_phy_time_attr(struct exynos_ufs *ufs)
534 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
535 struct ufs_phy_time_cfg *t_cfg = &ufs->t_cfg;
538 exynos_ufs_calc_time_cntr(ufs, attr->tx_dif_p_nsec);
540 exynos_ufs_calc_time_cntr(ufs, attr->tx_dif_n_nsec);
542 exynos_ufs_calc_time_cntr(ufs, attr->tx_high_z_cnt_nsec);
544 exynos_ufs_calc_time_cntr(ufs, attr->tx_base_unit_nsec);
546 exynos_ufs_calc_time_cntr(ufs, attr->tx_gran_unit_nsec);
548 exynos_ufs_calc_time_cntr(ufs, attr->tx_sleep_cnt);
551 exynos_ufs_calc_time_cntr(ufs, attr->rx_dif_p_nsec);
553 exynos_ufs_calc_time_cntr(ufs, attr->rx_hibern8_wait_nsec);
555 exynos_ufs_calc_time_cntr(ufs, attr->rx_base_unit_nsec);
557 exynos_ufs_calc_time_cntr(ufs, attr->rx_gran_unit_nsec);
559 exynos_ufs_calc_time_cntr(ufs, attr->rx_sleep_cnt);
561 exynos_ufs_calc_time_cntr(ufs, attr->rx_stall_cnt);
564 static void exynos_ufs_config_phy_time_attr(struct exynos_ufs *ufs)
566 struct ufs_hba *hba = ufs->hba;
567 struct ufs_phy_time_cfg *t_cfg = &ufs->t_cfg;
570 exynos_ufs_set_pwm_clk_div(ufs);
574 for_each_ufs_rx_lane(ufs, i) {
576 ufs->drv_data->uic_attr->rx_filler_enable);
593 for_each_ufs_tx_lane(ufs, i) {
612 ufs->drv_data->uic_attr->tx_min_activatetime);
618 static void exynos_ufs_config_phy_cap_attr(struct exynos_ufs *ufs)
620 struct ufs_hba *hba = ufs->hba;
621 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
626 for_each_ufs_rx_lane(ufs, i) {
648 for_each_ufs_rx_lane(ufs, i) {
664 for_each_ufs_rx_lane(ufs, i) {
688 static void exynos_ufs_establish_connt(struct exynos_ufs *ufs)
690 struct ufs_hba *hba = ufs->hba;
711 static void exynos_ufs_config_smu(struct exynos_ufs *ufs)
715 exynos_ufs_disable_auto_ctrl_hcc_save(ufs, &val);
718 reg = ufsp_readl(ufs, UFSPRSECURITY);
719 ufsp_writel(ufs, reg | NSSMU, UFSPRSECURITY);
720 ufsp_writel(ufs, 0x0, UFSPSBEGIN0);
721 ufsp_writel(ufs, 0xffffffff, UFSPSEND0);
722 ufsp_writel(ufs, 0xff, UFSPSLUN0);
723 ufsp_writel(ufs, 0xf1, UFSPSCTRL0);
725 exynos_ufs_auto_ctrl_hcc_restore(ufs, &val);
728 static void exynos_ufs_config_sync_pattern_mask(struct exynos_ufs *ufs,
731 struct ufs_hba *hba = ufs->hba;
750 mask = exynos_ufs_calc_time_cntr(ufs, sync_len);
755 for_each_ufs_rx_lane(ufs, i)
766 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
767 struct phy *generic_phy = ufs->phy;
786 if (ufs->drv_data->pre_pwr_change)
787 ufs->drv_data->pre_pwr_change(ufs, dev_req_params);
790 exynos_ufs_config_sync_pattern_mask(ufs, dev_req_params);
814 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
815 struct phy *generic_phy = ufs->phy;
827 if (ufs->drv_data->post_pwr_change)
828 ufs->drv_data->post_pwr_change(ufs, pwr_req);
854 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
857 type = hci_readl(ufs, HCI_UTRL_NEXUS_TYPE);
860 hci_writel(ufs, type | (1 << tag), HCI_UTRL_NEXUS_TYPE);
862 hci_writel(ufs, type & ~(1 << tag), HCI_UTRL_NEXUS_TYPE);
868 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
871 type = hci_readl(ufs, HCI_UTMRL_NEXUS_TYPE);
876 hci_writel(ufs, type | (1 << tag), HCI_UTMRL_NEXUS_TYPE);
882 hci_writel(ufs, type & ~(1 << tag), HCI_UTMRL_NEXUS_TYPE);
887 static int exynos_ufs_phy_init(struct exynos_ufs *ufs)
889 struct ufs_hba *hba = ufs->hba;
890 struct phy *generic_phy = ufs->phy;
893 if (ufs->avail_ln_rx == 0 || ufs->avail_ln_tx == 0) {
895 &ufs->avail_ln_rx);
897 &ufs->avail_ln_tx);
898 WARN(ufs->avail_ln_rx != ufs->avail_ln_tx,
900 ufs->avail_ln_rx, ufs->avail_ln_tx);
903 phy_set_bus_width(generic_phy, ufs->avail_ln_rx);
923 static void exynos_ufs_config_unipro(struct exynos_ufs *ufs)
925 struct ufs_hba *hba = ufs->hba;
928 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
930 ufs->drv_data->uic_attr->tx_trailingclks);
932 ufs->drv_data->uic_attr->pa_dbg_option_suite);
935 static void exynos_ufs_config_intr(struct exynos_ufs *ufs, u32 errs, u8 index)
939 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_PA_LAYER);
942 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_DL_LAYER);
945 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_N_LAYER);
948 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_T_LAYER);
951 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_DME_LAYER);
959 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
961 if (!ufs)
965 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
966 exynos_ufs_disable_auto_ctrl_hcc(ufs);
967 exynos_ufs_ungate_clks(ufs);
969 exynos_ufs_gate_clks(ufs);
970 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
971 exynos_ufs_enable_auto_ctrl_hcc(ufs);
979 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
982 exynos_ufs_config_intr(ufs, DFES_DEF_L2_ERRS, UNIPRO_L2);
983 exynos_ufs_config_intr(ufs, DFES_DEF_L3_ERRS, UNIPRO_L3);
984 exynos_ufs_config_intr(ufs, DFES_DEF_L4_ERRS, UNIPRO_L4);
985 exynos_ufs_set_unipro_pclk_div(ufs);
988 exynos_ufs_config_unipro(ufs);
991 exynos_ufs_phy_init(ufs);
992 if (!(ufs->opts & EXYNOS_UFS_OPT_SKIP_CONFIG_PHY_ATTR)) {
993 exynos_ufs_config_phy_time_attr(ufs);
994 exynos_ufs_config_phy_cap_attr(ufs);
999 if (ufs->drv_data->pre_link)
1000 ufs->drv_data->pre_link(ufs);
1005 static void exynos_ufs_fit_aggr_timeout(struct exynos_ufs *ufs)
1009 val = exynos_ufs_calc_time_cntr(ufs, IATOVAL_NSEC / CNTR_DIV_VAL);
1010 hci_writel(ufs, val & CNT_VAL_1US_MASK, HCI_1US_TO_CNT_VAL);
1015 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1016 struct phy *generic_phy = ufs->phy;
1017 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
1019 exynos_ufs_establish_connt(ufs);
1020 exynos_ufs_fit_aggr_timeout(ufs);
1022 hci_writel(ufs, 0xa, HCI_DATA_REORDER);
1023 hci_writel(ufs, PRDT_SET_SIZE(12), HCI_TXPRDT_ENTRY_SIZE);
1024 hci_writel(ufs, PRDT_SET_SIZE(12), HCI_RXPRDT_ENTRY_SIZE);
1025 hci_writel(ufs, (1 << hba->nutrs) - 1, HCI_UTRL_NEXUS_TYPE);
1026 hci_writel(ufs, (1 << hba->nutmrs) - 1, HCI_UTMRL_NEXUS_TYPE);
1027 hci_writel(ufs, 0xf, HCI_AXIDMA_RWDATA_BURST_LEN);
1029 if (ufs->opts & EXYNOS_UFS_OPT_SKIP_CONNECTION_ESTAB)
1043 !(ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER))
1048 if (ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER) {
1072 if (ufs->drv_data->post_link)
1073 ufs->drv_data->post_link(ufs);
1078 static int exynos_ufs_parse_dt(struct device *dev, struct exynos_ufs *ufs)
1084 ufs->drv_data = device_get_match_data(dev);
1086 if (ufs->drv_data && ufs->drv_data->uic_attr) {
1087 attr = ufs->drv_data->uic_attr;
1094 ufs->sysreg = syscon_regmap_lookup_by_phandle(np, "samsung,sysreg");
1095 if (IS_ERR(ufs->sysreg))
1096 ufs->sysreg = NULL;
1099 &ufs->shareability_reg_offset)) {
1101 ufs->shareability_reg_offset = UFS_SHAREABILITY_OFFSET;
1105 ufs->pclk_avail_min = PCLK_AVAIL_MIN;
1106 ufs->pclk_avail_max = PCLK_AVAIL_MAX;
1120 struct exynos_ufs *ufs)
1122 ufs->hba = hba;
1123 ufs->opts = ufs->drv_data->opts;
1124 ufs->rx_sel_idx = PA_MAXDATALANES;
1125 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_RX_SEL_IDX)
1126 ufs->rx_sel_idx = 0;
1127 hba->priv = (void *)ufs;
1128 hba->quirks = ufs->drv_data->quirks;
1135 struct exynos_ufs *ufs;
1138 ufs = devm_kzalloc(dev, sizeof(*ufs), GFP_KERNEL);
1139 if (!ufs)
1143 ufs->reg_hci = devm_platform_ioremap_resource_byname(pdev, "vs_hci");
1144 if (IS_ERR(ufs->reg_hci)) {
1146 return PTR_ERR(ufs->reg_hci);
1150 ufs->reg_unipro = devm_platform_ioremap_resource_byname(pdev, "unipro");
1151 if (IS_ERR(ufs->reg_unipro)) {
1153 return PTR_ERR(ufs->reg_unipro);
1156 /* ufs protector */
1157 ufs->reg_ufsp = devm_platform_ioremap_resource_byname(pdev, "ufsp");
1158 if (IS_ERR(ufs->reg_ufsp)) {
1159 dev_err(dev, "cannot ioremap for ufs protector register\n");
1160 return PTR_ERR(ufs->reg_ufsp);
1163 ret = exynos_ufs_parse_dt(dev, ufs);
1169 ufs->phy = devm_phy_get(dev, "ufs-phy");
1170 if (IS_ERR(ufs->phy)) {
1171 ret = PTR_ERR(ufs->phy);
1172 dev_err(dev, "failed to get ufs-phy\n");
1176 exynos_ufs_priv_init(hba, ufs);
1178 if (ufs->drv_data->drv_init) {
1179 ret = ufs->drv_data->drv_init(dev, ufs);
1186 ret = exynos_ufs_get_clk_info(ufs);
1189 exynos_ufs_specify_phy_time_attr(ufs);
1190 exynos_ufs_config_smu(ufs);
1200 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1205 exynos_ufs_disable_auto_ctrl_hcc_save(ufs, &val);
1207 hci_writel(ufs, UFS_SW_RST_MASK, HCI_SW_RST);
1210 if (!(hci_readl(ufs, HCI_SW_RST) & UFS_SW_RST_MASK))
1218 exynos_ufs_auto_ctrl_hcc_restore(ufs, &val);
1224 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1226 hci_writel(ufs, 0 << 0, HCI_GPIO_OUT);
1228 hci_writel(ufs, 1 << 0, HCI_GPIO_OUT);
1233 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1234 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr;
1237 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
1238 exynos_ufs_disable_auto_ctrl_hcc(ufs);
1239 exynos_ufs_ungate_clks(ufs);
1241 if (ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER) {
1252 ufs->entry_hibern8_t);
1266 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1272 if (ufshcd_is_hs_mode(&ufs->dev_req_params))
1285 if (!(ufs->opts & EXYNOS_UFS_OPT_SKIP_CONNECTION_ESTAB))
1286 exynos_ufs_establish_connt(ufs);
1288 ufs->entry_hibern8_t = ktime_get();
1289 exynos_ufs_gate_clks(ufs);
1290 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)
1291 exynos_ufs_enable_auto_ctrl_hcc(ufs);
1298 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1311 if (ufs->drv_data->pre_hce_enable) {
1312 ret = ufs->drv_data->pre_hce_enable(ufs);
1323 exynos_ufs_calc_pwm_clk_div(ufs);
1324 if (!(ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL))
1325 exynos_ufs_enable_auto_ctrl_hcc(ufs);
1327 if (ufs->drv_data->post_hce_enable)
1328 ret = ufs->drv_data->post_hce_enable(ufs);
1390 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1396 phy_power_off(ufs->phy);
1403 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1406 phy_power_on(ufs->phy);
1408 exynos_ufs_config_smu(ufs);
1450 struct exynos_ufs *ufs;
1453 ufs = devm_kzalloc(dev, sizeof(*ufs), GFP_KERNEL);
1454 if (!ufs)
1458 ufs->reg_hci = devm_platform_ioremap_resource_byname(pdev, "vs_hci");
1459 if (IS_ERR(ufs->reg_hci)) {
1461 return PTR_ERR(ufs->reg_hci);
1468 ufs->drv_data = device_get_match_data(dev);
1469 if (!ufs->drv_data)
1472 exynos_ufs_priv_init(hba, ufs);
1477 static int fsd_ufs_pre_link(struct exynos_ufs *ufs)
1480 struct ufs_hba *hba = ufs->hba;
1483 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
1487 for_each_ufs_tx_lane(ufs, i) {
1489 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
1493 for_each_ufs_rx_lane(ufs, i) {
1495 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate));
1509 exynos_ufs_establish_connt(ufs);
1514 static int fsd_ufs_post_link(struct exynos_ufs *ufs)
1517 struct ufs_hba *hba = ufs->hba;
1540 for_each_ufs_rx_lane(ufs, i) {
1552 static int fsd_ufs_pre_pwr_change(struct exynos_ufs *ufs,
1555 struct ufs_hba *hba = ufs->hba;
1563 unipro_writel(ufs, 12000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER0);
1564 unipro_writel(ufs, 32000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER1);
1565 unipro_writel(ufs, 16000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER2);
1611 struct exynos_ufs *ufs = ufshcd_get_variant(hba);
1616 phy_power_off(ufs->phy);
1617 phy_exit(ufs->phy);
1739 { .compatible = "samsung,exynos7-ufs",
1741 { .compatible = "samsung,exynosautov9-ufs",
1743 { .compatible = "samsung,exynosautov9-ufs-vh",
1745 { .compatible = "tesla,fsd-ufs",