diff --git a/drivers/st/ddr/stm32mp1_ddr.c b/drivers/st/ddr/stm32mp1_ddr.c index 7d89d02..f5cffd8 100644 --- a/drivers/st/ddr/stm32mp1_ddr.c +++ b/drivers/st/ddr/stm32mp1_ddr.c @@ -29,6 +29,7 @@ #define INVALID_OFFSET 0xFFU +#define TIMESLOT_US_1US 1U #define TIMEOUT_US_1S 1000000U #define DDRCTL_REG(x, y) \ @@ -698,6 +699,83 @@ stm32mp1_wait_sw_done_ack(ctl); } +static void stm32mp1_refresh_cmd(struct stm32mp1_ddrctl *ctl) +{ + uint32_t dbgstat; + + do { + dbgstat = mmio_read_32((uintptr_t)&ctl->dbgstat); + } while ((dbgstat & DDRCTRL_DBGSTAT_RANK0_REFRESH_BUSY) != 0U); + + mmio_setbits_32((uintptr_t)&ctl->dbgcmd, DDRCTRL_DBGCMD_RANK0_REFRESH); +} + +/* Refresh compensation by forcing refresh command + * Rule1: Tref should be always < tREFW ? R x tREBW/8 + * Rule2: refcomp = RU(Tref/tREFI) = RU(RxTref/tREFW) + */ +static +void stm32mp1_refresh_compensation(const struct stm32mp1_ddr_config *config, + struct stm32mp1_ddrctl *ctl, + uint64_t start) +{ + uint32_t tck_ps; + uint64_t time_us, tref, trefi, refcomp, i; + + time_us = timeout_init_us(0) - start; + tck_ps = 1000000000U / config->info.speed; + if (tck_ps == 0U) { + return; + } + /* ref = refresh time in tck */ + tref = time_us * 1000000U / tck_ps; + trefi = ((mmio_read_32((uintptr_t)&ctl->rfshtmg) & + DDRCTRL_RFSHTMG_T_RFC_NOM_X1_X32_MASK) + >> DDRCTRL_RFSHTMG_T_RFC_NOM_X1_X32_SHIFT) * 32U; + if (trefi == 0U) { + return; + } + + /* div round up : number of refresh to compensate */ + refcomp = (tref + trefi - 1U) / trefi; + + for (i = 0; i < refcomp; i++) { + stm32mp1_refresh_cmd(ctl); + } +} + +static void stm32mp1_self_refresh_zcal(struct ddr_info *priv, uint32_t zdata) +{ + /* sequence for PUBL I/O Data Retention during Power-Down */ + + /* 10. Override ZQ calibration with previously (pre-retention) + * calibrated values. This is done by writing 1 to ZQ0CRN.ZDEN + * and the override data to ZQ0CRN.ZDATA. + */ + mmio_setbits_32((uintptr_t)&priv->phy->zq0cr0, DDRPHYC_ZQ0CRN_ZDEN); + + mmio_clrsetbits_32((uintptr_t)&priv->phy->zq0cr0, + DDRPHYC_ZQ0CRN_ZDATA_MASK, + zdata << DDRPHYC_ZQ0CRN_ZDATA_SHIFT); + + /* 11. De-assert the PHY_top data retention enable signals + * (ret_en or ret_en_i/ret_en_n_i). + */ + mmio_setbits_32((uintptr_t)(priv->pwr) + PWR_CR3, PWR_CR3_DDRSRDIS); + mmio_clrbits_32((uintptr_t)(priv->pwr) + PWR_CR3, PWR_CR3_DDRRETEN); + + /* 12. Remove ZQ calibration override by writing 0 to ZQ0CRN.ZDEN. */ + mmio_clrbits_32((uintptr_t)&priv->phy->zq0cr0, DDRPHYC_ZQ0CRN_ZDEN); + + /* 13. Trigger ZQ calibration by writing 1 to PIR.INIT + * and '1' to PIR.ZCAL + */ + /* 14. Wait for ZQ calibration to finish by polling a 1 status + * on PGSR.IDONE. + */ + stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_ZCAL); +} + static int board_ddr_power_init(enum ddr_type ddr_type) { if (dt_pmic_status() > 0) { @@ -710,8 +788,9 @@ void stm32mp1_ddr_init(struct ddr_info *priv, struct stm32mp1_ddr_config *config) { - uint32_t pir; + uint32_t pir, ddr_reten; int ret = -EINVAL; + uint64_t time; if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { ret = board_ddr_power_init(STM32MP_DDR3); @@ -730,6 +809,27 @@ VERBOSE("name = %s\n", config->info.name); VERBOSE("speed = %d kHz\n", config->info.speed); VERBOSE("size = 0x%x\n", config->info.size); + if (config->self_refresh) { + VERBOSE("sel-refresh exit (zdata = 0x%x)\n", config->zdata); + } + + /* Check DDR PHY pads retention */ + ddr_reten = mmio_read_32((uint32_t)(priv->pwr) + PWR_CR3) & + PWR_CR3_DDRRETEN; + if (config->self_refresh) { + if (ddr_reten == 0U) { + VERBOSE("self-refresh aborted: no retention\n"); + config->self_refresh = false; + } + } else { + if (ddr_reten != 0U) { + VERBOSE("disable DDR PHY retention\n"); + mmio_setbits_32((uint32_t)(priv->pwr) + PWR_CR3, + PWR_CR3_DDRSRDIS); + mmio_clrbits_32((uint32_t)(priv->pwr) + PWR_CR3, + PWR_CR3_DDRRETEN); + } + } /* DDR INIT SEQUENCE */ @@ -790,6 +890,12 @@ set_reg(priv, REG_TIMING, &config->c_timing); set_reg(priv, REG_MAP, &config->c_map); + /* Keep the controller in self-refresh mode */ + if (config->self_refresh) { + mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl, + DDRCTRL_PWRCTL_SELFREF_SW); + } + /* Skip CTRL init, SDRAM init is done by PHY PUBL */ mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0, DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK, @@ -843,8 +949,20 @@ pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */ } + /* Treat self-refresh exit : hot boot */ + if (config->self_refresh) { + /* DDR in self refresh mode, remove zcal & reset & init */ + pir &= ~(DDRPHYC_PIR_ZCAL & DDRPHYC_PIR_DRAMRST + & DDRPHYC_PIR_DRAMINIT); + pir |= DDRPHYC_PIR_ZCALBYP; + } + stm32mp1_ddrphy_init(priv->phy, pir); + if (config->self_refresh) { + stm32mp1_self_refresh_zcal(priv, config->zdata); + } + /* * 6. SET DFIMISC.dfi_init_complete_en to 1 * Enable quasi-dynamic register programming. @@ -865,6 +983,13 @@ */ /* Wait uMCTL2 ready */ + + /* Trigger self-refresh exit */ + if (config->self_refresh) { + mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl, + DDRCTRL_PWRCTL_SELFREF_SW); + } + stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); /* Switch to DLL OFF mode */ @@ -874,6 +999,8 @@ VERBOSE("DDR DQS training : "); + time = timeout_init_us(0); + /* * 8. Disable Auto refresh and power down by setting * - RFSHCTL3.dis_au_refresh = 1 @@ -898,6 +1025,11 @@ /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */ stm32mp1_ddrphy_idone_wait(priv->phy); + /* Refresh compensation: forcing refresh command */ + if (config->self_refresh) { + stm32mp1_refresh_compensation(config, priv->ctl, time); + } + /* * 12. set back registers in step 8 to the orginal values if desidered */ diff --git a/drivers/st/ddr/stm32mp1_ddr_helpers.c b/drivers/st/ddr/stm32mp1_ddr_helpers.c index fcb4cfc..2071bb2 100644 --- a/drivers/st/ddr/stm32mp1_ddr_helpers.c +++ b/drivers/st/ddr/stm32mp1_ddr_helpers.c @@ -6,8 +6,14 @@ #include +#include +#include +#include #include #include +#include + +#define TIMEOUT_500US 500U void ddr_enable_clock(void) { @@ -22,3 +28,469 @@ stm32mp1_clk_rcc_regs_unlock(); } + +static void do_sw_handshake(void) +{ + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + + mmio_clrbits_32(ddrctrl_base + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE); +} + +static void do_sw_ack(void) +{ + uint64_t timeout; + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + + mmio_setbits_32(ddrctrl_base + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE); + + timeout = timeout_init_us(TIMEOUT_500US); + while ((mmio_read_32(ddrctrl_base + DDRCTRL_SWSTAT) & + DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U) { + if (timeout_elapsed(timeout)) { + panic(); + } + } +} + +static int ddr_sw_self_refresh_in(void) +{ + uint64_t timeout; + uint32_t stat; + uint32_t operating_mode; + uint32_t selref_type; + uint8_t op_mode_changed = 0; + uintptr_t rcc_base = stm32mp_rcc_base(); + uintptr_t pwr_base = stm32mp_pwr_base(); + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + uintptr_t ddrphyc_base = stm32mp_ddrphyc_base(); + + stm32mp1_clk_rcc_regs_lock(); + + mmio_clrbits_32(rcc_base + RCC_DDRITFCR, RCC_DDRITFCR_AXIDCGEN); + + stm32mp1_clk_rcc_regs_unlock(); + + /* Blocks AXI ports from taking anymore transactions */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_PCTRL_0, + DDRCTRL_PCTRL_N_PORT_EN); + mmio_clrbits_32(ddrctrl_base + DDRCTRL_PCTRL_1, + DDRCTRL_PCTRL_N_PORT_EN); + + /* Waits unit all AXI ports are idle + * Poll PSTAT.rd_port_busy_n = 0 + * Poll PSTAT.wr_port_busy_n = 0 + */ + timeout = timeout_init_us(TIMEOUT_500US); + while (mmio_read_32(ddrctrl_base + DDRCTRL_PSTAT)) { + if (timeout_elapsed(timeout)) { + goto pstat_failed; + } + } + /* SW Self-Refresh entry */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_SELFREF_SW); + + /* Wait operating mode change in self-refresh mode + * with STAT.operating_mode[1:0]==11. + * Ensure transition to self-refresh was due to software + * by checking also that STAT.selfref_type[1:0]=2. + */ + timeout = timeout_init_us(TIMEOUT_500US); + while (!timeout_elapsed(timeout)) { + stat = mmio_read_32(ddrctrl_base + DDRCTRL_STAT); + operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK; + selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK; + + if ((operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) && + (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) { + op_mode_changed = 1; + break; + } + } + + if (op_mode_changed == 0U) + goto selfref_sw_failed; + + /* IOs powering down (PUBL registers) */ + mmio_setbits_32(ddrphyc_base + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDD); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDR); + + mmio_clrsetbits_32(ddrphyc_base + DDRPHYC_ACIOCR, + DDRPHYC_ACIOCR_CKPDD_MASK, + DDRPHYC_ACIOCR_CKPDD_0); + + mmio_clrsetbits_32(ddrphyc_base + DDRPHYC_ACIOCR, + DDRPHYC_ACIOCR_CKPDR_MASK, + DDRPHYC_ACIOCR_CKPDR_0); + + mmio_clrsetbits_32(ddrphyc_base + DDRPHYC_ACIOCR, + DDRPHYC_ACIOCR_CSPDD_MASK, + DDRPHYC_ACIOCR_CSPDD_0); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDD); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDR); + + mmio_clrsetbits_32(ddrphyc_base + DDRPHYC_DSGCR, + DDRPHYC_DSGCR_ODTPDD_MASK, + DDRPHYC_DSGCR_ODTPDD_0); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DSGCR, DDRPHYC_DSGCR_NL2PD); + + mmio_clrsetbits_32(ddrphyc_base + DDRPHYC_DSGCR, + DDRPHYC_DSGCR_CKEPDD_MASK, + DDRPHYC_DSGCR_CKEPDD_0); + + /* Disable PZQ cell (PUBL register) */ + mmio_setbits_32(ddrphyc_base + DDRPHYC_ZQ0CR0, DDRPHYC_ZQ0CRN_ZQPD); + + /* Activate sw retention in PWRCTRL */ + stm32mp_pwr_regs_lock(); + mmio_setbits_32(pwr_base + PWR_CR3, PWR_CR3_DDRRETEN); + stm32mp_pwr_regs_unlock(); + + /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */ + stm32mp1_clk_rcc_regs_lock(); + mmio_setbits_32(rcc_base + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL); + stm32mp1_clk_rcc_regs_unlock(); + + /* Disable all DLLs: GLITCH window */ + mmio_setbits_32(ddrphyc_base + DDRPHYC_ACDLLCR, + DDRPHYC_ACDLLCR_DLLDIS); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DX0DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DX1DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DX2DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_DX3DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + stm32mp1_clk_rcc_regs_lock(); + + /* Switch controller clocks (uMCTL2/PUBL) to DLL output clock */ + mmio_clrbits_32(rcc_base + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL); + + /* Deactivate all DDR clocks */ + mmio_clrbits_32(rcc_base + RCC_DDRITFCR, + RCC_DDRITFCR_DDRC1EN | + RCC_DDRITFCR_DDRC2EN | + RCC_DDRITFCR_DDRCAPBEN | + RCC_DDRITFCR_DDRPHYCAPBEN); + + stm32mp1_clk_rcc_regs_unlock(); + + return 0; + +selfref_sw_failed: + /* This bit should be cleared to restore DDR in its previous state */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_SELFREF_SW); + +pstat_failed: + mmio_setbits_32(ddrctrl_base + DDRCTRL_PCTRL_0, + DDRCTRL_PCTRL_N_PORT_EN); + mmio_setbits_32(ddrctrl_base + DDRCTRL_PCTRL_1, + DDRCTRL_PCTRL_N_PORT_EN); + + return -1; +} + +int ddr_sw_self_refresh_exit(void) +{ + uint64_t timeout; + uintptr_t rcc_base = stm32mp_rcc_base(); + uintptr_t pwr_base = stm32mp_pwr_base(); + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + uintptr_t ddrphyc_base = stm32mp_ddrphyc_base(); + + /* Enable all clocks */ + ddr_enable_clock(); + + do_sw_handshake(); + + /* Mask dfi_init_complete_en */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_DFIMISC, + DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); + + do_sw_ack(); + + /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */ + stm32mp1_clk_rcc_regs_lock(); + mmio_setbits_32(rcc_base + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL); + stm32mp1_clk_rcc_regs_unlock(); + + /* Enable all DLLs: GLITCH window */ + mmio_clrbits_32(ddrphyc_base + DDRPHYC_ACDLLCR, + DDRPHYC_ACDLLCR_DLLDIS); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DX0DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DX1DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DX2DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DX3DLLCR, + DDRPHYC_DXNDLLCR_DLLDIS); + + /* Additional delay to avoid early DLL clock switch */ + udelay(10); + + /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */ + stm32mp1_clk_rcc_regs_lock(); + mmio_clrbits_32(rcc_base + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL); + stm32mp1_clk_rcc_regs_unlock(); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_ACDLLCR, + DDRPHYC_ACDLLCR_DLLSRST); + + udelay(10); + + mmio_setbits_32(ddrphyc_base + DDRPHYC_ACDLLCR, + DDRPHYC_ACDLLCR_DLLSRST); + + /* PHY partial init: (DLL lock and ITM reset) */ + mmio_write_32(ddrphyc_base + DDRPHYC_PIR, + DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | + DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_INIT); + + /* Need to wait at least 10 clock cycles before accessing PGSR */ + udelay(1); + + /* Pool end of init */ + timeout = timeout_init_us(TIMEOUT_500US); + + while ((mmio_read_32(ddrphyc_base + DDRPHYC_PGSR) & + DDRPHYC_PGSR_IDONE) == 0U) { + if (timeout_elapsed(timeout)) { + return -1; + } + } + + do_sw_handshake(); + + /* Unmask dfi_init_complete_en to uMCTL2 */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_DFIMISC, + DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); + + do_sw_ack(); + + /* Deactivate sw retention in PWR */ + stm32mp_pwr_regs_lock(); + mmio_clrbits_32(pwr_base + PWR_CR3, PWR_CR3_DDRRETEN); + stm32mp_pwr_regs_unlock(); + + /* Enable PZQ cell (PUBL register) */ + mmio_clrbits_32(ddrphyc_base + DDRPHYC_ZQ0CR0, DDRPHYC_ZQ0CRN_ZQPD); + + /* Enable pad drivers */ + mmio_clrbits_32(ddrphyc_base + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDD); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_ACIOCR, + DDRPHYC_ACIOCR_CKPDD_MASK); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_ACIOCR, + DDRPHYC_ACIOCR_CSPDD_MASK); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDD); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDR); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DSGCR, + DDRPHYC_DSGCR_ODTPDD_MASK); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DSGCR, DDRPHYC_DSGCR_NL2PD); + + mmio_clrbits_32(ddrphyc_base + DDRPHYC_DSGCR, + DDRPHYC_DSGCR_CKEPDD_MASK); + + /* Remove selfrefresh */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_SELFREF_SW); + + /* Wait operating_mode == normal */ + timeout = timeout_init_us(TIMEOUT_500US); + while ((mmio_read_32(ddrctrl_base + DDRCTRL_STAT) & + DDRCTRL_STAT_OPERATING_MODE_MASK) != + DDRCTRL_STAT_OPERATING_MODE_NORMAL) { + if (timeout_elapsed(timeout)) { + return -1; + } + } + + /* AXI ports are no longer blocked from taking transactions */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_PCTRL_0, + DDRCTRL_PCTRL_N_PORT_EN); + mmio_setbits_32(ddrctrl_base + DDRCTRL_PCTRL_1, + DDRCTRL_PCTRL_N_PORT_EN); + + stm32mp1_clk_rcc_regs_lock(); + + mmio_setbits_32(rcc_base + RCC_DDRITFCR, RCC_DDRITFCR_AXIDCGEN); + + stm32mp1_clk_rcc_regs_unlock(); + + return 0; +} + +int ddr_standby_sr_entry(uint32_t *zq0cr0_zdata) +{ + uintptr_t pwr_base = stm32mp_pwr_base(); + uintptr_t ddrphyc_base = stm32mp_ddrphyc_base(); + + /* Save IOs calibration values */ + if (zq0cr0_zdata != NULL) { + *zq0cr0_zdata = mmio_read_32(ddrphyc_base + DDRPHYC_ZQ0CR0) & + DDRPHYC_ZQ0CRN_ZDATA_MASK; + } + + /* Put DDR in Self-Refresh */ + if (ddr_sw_self_refresh_in() != 0) { + return -1; + } + + /* Enable I/O retention mode in standby */ + stm32mp_pwr_regs_lock(); + mmio_setbits_32(pwr_base + PWR_CR3, PWR_CR3_DDRSREN); + stm32mp_pwr_regs_unlock(); + + return 0; +} + +void ddr_sr_mode_ssr(void) +{ + uintptr_t rcc_ddritfcr = stm32mp_rcc_base() + RCC_DDRITFCR; + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + + stm32mp1_clk_rcc_regs_lock(); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC1LPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC2LPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC1EN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC2EN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRCAPBLPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRPHYCAPBLPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRCAPBEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRPHYCAPBEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRPHYCEN); + + mmio_clrbits_32(rcc_ddritfcr, RCC_DDRITFCR_AXIDCGEN); + + mmio_clrbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRCKMOD_MASK); + + stm32mp1_clk_rcc_regs_unlock(); + + /* Disable HW LP interface of uMCTL2 */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_HWLPCTL, + DDRCTRL_HWLPCTL_HW_LP_EN); + + /* Configure Automatic LP modes of uMCTL2 */ + mmio_clrsetbits_32(ddrctrl_base + DDRCTRL_PWRTMG, + DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK, + DDRCTRL_PWRTMG_SELFREF_TO_X32_0); + + /* + * Disable Clock disable with LP modes + * (used in RUN mode for LPDDR2 with specific timing). + */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE); + + /* Disable automatic Self-Refresh mode */ + mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_SELFREF_EN); +} + +void ddr_sr_mode_asr(void) +{ + uintptr_t rcc_ddritfcr = stm32mp_rcc_base() + RCC_DDRITFCR; + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + + stm32mp1_clk_rcc_regs_lock(); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_AXIDCGEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC1LPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC2LPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRPHYCLPEN); + + mmio_clrsetbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRCKMOD_MASK, + RCC_DDRITFCR_DDRCKMOD_ASR1); + + stm32mp1_clk_rcc_regs_unlock(); + + /* Enable HW LP interface of uMCTL2 */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_HWLPCTL, + DDRCTRL_HWLPCTL_HW_LP_EN); + + /* Configure Automatic LP modes of uMCTL2 */ + mmio_clrsetbits_32(ddrctrl_base + DDRCTRL_PWRTMG, + DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK, + DDRCTRL_PWRTMG_SELFREF_TO_X32_0); + + /* + * Enable Clock disable with LP modes + * (used in RUN mode for LPDDR2 with specific timing). + */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE); + + /* Enable automatic Self-Refresh for ASR mode */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_SELFREF_EN); +} + +void ddr_sr_mode_hsr(void) +{ + uintptr_t rcc_ddritfcr = stm32mp_rcc_base() + RCC_DDRITFCR; + uintptr_t ddrctrl_base = stm32mp_ddrctrl_base(); + + stm32mp1_clk_rcc_regs_lock(); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_AXIDCGEN); + + mmio_clrbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC1LPEN); + + mmio_clrbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRC2LPEN); + + mmio_setbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRPHYCLPEN); + + mmio_clrsetbits_32(rcc_ddritfcr, RCC_DDRITFCR_DDRCKMOD_MASK, + RCC_DDRITFCR_DDRCKMOD_HSR1); + + stm32mp1_clk_rcc_regs_unlock(); + + /* Enable HW LP interface of uMCTL2 */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_HWLPCTL, + DDRCTRL_HWLPCTL_HW_LP_EN); + + /* Configure Automatic LP modes of uMCTL2 */ + mmio_clrsetbits_32(ddrctrl_base + DDRCTRL_PWRTMG, + DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK, + DDRCTRL_PWRTMG_SELFREF_TO_X32_0); + + /* + * Enable Clock disable with LP modes + * (used in RUN mode for LPDDR2 with specific timing). + */ + mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, + DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE); +} diff --git a/include/drivers/st/stm32mp1_ddr.h b/include/drivers/st/stm32mp1_ddr.h index 4ab37d6..f52609f 100644 --- a/include/drivers/st/stm32mp1_ddr.h +++ b/include/drivers/st/stm32mp1_ddr.h @@ -8,9 +8,6 @@ #define STM32MP1_DDR_H #include -#include - -#define DT_DDR_COMPAT "st,stm32mp1-ddr" struct stm32mp1_ddr_size { uint64_t base; @@ -166,9 +163,12 @@ struct stm32mp1_ddrphy_reg p_reg; struct stm32mp1_ddrphy_timing p_timing; struct stm32mp1_ddrphy_cal p_cal; + bool self_refresh; + uint32_t zdata; }; int stm32mp1_ddr_clk_enable(struct ddr_info *priv, uint32_t mem_speed); void stm32mp1_ddr_init(struct ddr_info *priv, struct stm32mp1_ddr_config *config); + #endif /* STM32MP1_DDR_H */ diff --git a/include/drivers/st/stm32mp1_ddr_helpers.h b/include/drivers/st/stm32mp1_ddr_helpers.h index 38f2415..80bf9de 100644 --- a/include/drivers/st/stm32mp1_ddr_helpers.h +++ b/include/drivers/st/stm32mp1_ddr_helpers.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2018, STMicroelectronics - All Rights Reserved + * Copyright (c) 2017-2019, STMicroelectronics - All Rights Reserved * * SPDX-License-Identifier: BSD-3-Clause */ @@ -7,6 +7,13 @@ #ifndef STM32MP1_DDR_HELPERS_H #define STM32MP1_DDR_HELPERS_H +#include + void ddr_enable_clock(void); +int ddr_sw_self_refresh_exit(void); +int ddr_standby_sr_entry(uint32_t *zq0cr0_zdata); +void ddr_sr_mode_ssr(void); +void ddr_sr_mode_asr(void); +void ddr_sr_mode_hsr(void); #endif /* STM32MP1_DDR_HELPERS_H */ diff --git a/plat/st/common/stm32mp_dt.c b/plat/st/common/stm32mp_dt.c index 17da490..863c2cc 100644 --- a/plat/st/common/stm32mp_dt.c +++ b/plat/st/common/stm32mp_dt.c @@ -13,8 +13,6 @@ #include #include -#include -#include #include diff --git a/plat/st/stm32mp1/stm32mp1_def.h b/plat/st/stm32mp1/stm32mp1_def.h index a40852b..dbe62f0 100644 --- a/plat/st/stm32mp1/stm32mp1_def.h +++ b/plat/st/stm32mp1/stm32mp1_def.h @@ -17,6 +17,8 @@ #ifndef __ASSEMBLER__ #include #include +#include +#include #include #include @@ -336,6 +338,7 @@ * Device Tree defines ******************************************************************************/ #define DT_BSEC_COMPAT "st,stm32mp15-bsec" +#define DT_DDR_COMPAT "st,stm32mp1-ddr" #define DT_IWDG_COMPAT "st,stm32mp1-iwdg" #define DT_PWR_COMPAT "st,stm32mp1-pwr" #define DT_RCC_CLK_COMPAT "st,stm32mp1-rcc"