提交 f3d7c2fe 编写于 作者: R Rajeshwari Birje 提交者: Minkyu Kang

Exynos5420: Add DDR3 initialization for 5420

This patch intends to add DDR3 initialization code for Exynos5420.
Signed-off-by: NAkshay Saraswat <akshay.s@samsung.com>
Signed-off-by: NRajeshwari S Shinde <rajeshwari.s@samsung.com>
Acked-by: NSimon Glass <sjg@chromium.org>
Signed-off-by: NMinkyu Kang <mk7.kang@samsung.com>
上级 060c227a
/*
* Mem setup common file for different types of DDR present on SMDK5250 boards.
* Mem setup common file for different types of DDR present on Exynos boards.
*
* Copyright (C) 2012 Samsung Electronics
*
......@@ -15,9 +15,9 @@
#define ZQ_INIT_TIMEOUT 10000
int dmc_config_zq(struct mem_timings *mem,
struct exynos5_phy_control *phy0_ctrl,
struct exynos5_phy_control *phy1_ctrl)
int dmc_config_zq(struct mem_timings *mem, uint32_t *phy0_con16,
uint32_t *phy1_con16, uint32_t *phy0_con17,
uint32_t *phy1_con17)
{
unsigned long val = 0;
int i;
......@@ -31,19 +31,19 @@ int dmc_config_zq(struct mem_timings *mem,
val |= mem->zq_mode_dds << PHY_CON16_ZQ_MODE_DDS_SHIFT;
val |= mem->zq_mode_term << PHY_CON16_ZQ_MODE_TERM_SHIFT;
val |= ZQ_CLK_DIV_EN;
writel(val, &phy0_ctrl->phy_con16);
writel(val, &phy1_ctrl->phy_con16);
writel(val, phy0_con16);
writel(val, phy1_con16);
/* Disable termination */
if (mem->zq_mode_noterm)
val |= PHY_CON16_ZQ_MODE_NOTERM_MASK;
writel(val, &phy0_ctrl->phy_con16);
writel(val, &phy1_ctrl->phy_con16);
writel(val, phy0_con16);
writel(val, phy1_con16);
/* ZQ_MANUAL_START: Enable */
val |= ZQ_MANUAL_STR;
writel(val, &phy0_ctrl->phy_con16);
writel(val, &phy1_ctrl->phy_con16);
writel(val, phy0_con16);
writel(val, phy1_con16);
/* ZQ_MANUAL_START: Disable */
val &= ~ZQ_MANUAL_STR;
......@@ -53,47 +53,47 @@ int dmc_config_zq(struct mem_timings *mem,
* we are looping for the ZQ_init to complete.
*/
i = ZQ_INIT_TIMEOUT;
while ((readl(&phy0_ctrl->phy_con17) & ZQ_DONE) != ZQ_DONE && i > 0) {
while ((readl(phy0_con17) & ZQ_DONE) != ZQ_DONE && i > 0) {
sdelay(100);
i--;
}
if (!i)
return -1;
writel(val, &phy0_ctrl->phy_con16);
writel(val, phy0_con16);
i = ZQ_INIT_TIMEOUT;
while ((readl(&phy1_ctrl->phy_con17) & ZQ_DONE) != ZQ_DONE && i > 0) {
while ((readl(phy1_con17) & ZQ_DONE) != ZQ_DONE && i > 0) {
sdelay(100);
i--;
}
if (!i)
return -1;
writel(val, &phy1_ctrl->phy_con16);
writel(val, phy1_con16);
return 0;
}
void update_reset_dll(struct exynos5_dmc *dmc, enum ddr_mode mode)
void update_reset_dll(uint32_t *phycontrol0, enum ddr_mode mode)
{
unsigned long val;
if (mode == DDR_MODE_DDR3) {
val = MEM_TERM_EN | PHY_TERM_EN | DMC_CTRL_SHGATE;
writel(val, &dmc->phycontrol0);
writel(val, phycontrol0);
}
/* Update DLL Information: Force DLL Resyncronization */
val = readl(&dmc->phycontrol0);
val = readl(phycontrol0);
val |= FP_RSYNC;
writel(val, &dmc->phycontrol0);
writel(val, phycontrol0);
/* Reset Force DLL Resyncronization */
val = readl(&dmc->phycontrol0);
val = readl(phycontrol0);
val &= ~FP_RSYNC;
writel(val, &dmc->phycontrol0);
writel(val, phycontrol0);
}
void dmc_config_mrs(struct mem_timings *mem, struct exynos5_dmc *dmc)
void dmc_config_mrs(struct mem_timings *mem, uint32_t *directcmd)
{
int channel, chip;
......@@ -107,7 +107,7 @@ void dmc_config_mrs(struct mem_timings *mem, struct exynos5_dmc *dmc)
mask |= chip << DIRECT_CMD_CHIP_SHIFT;
/* Sending NOP command */
writel(DIRECT_CMD_NOP | mask, &dmc->directcmd);
writel(DIRECT_CMD_NOP | mask, directcmd);
/*
* TODO(alim.akhtar@samsung.com): Do we need these
......@@ -119,14 +119,14 @@ void dmc_config_mrs(struct mem_timings *mem, struct exynos5_dmc *dmc)
/* Sending EMRS/MRS commands */
for (i = 0; i < MEM_TIMINGS_MSR_COUNT; i++) {
writel(mem->direct_cmd_msr[i] | mask,
&dmc->directcmd);
directcmd);
sdelay(0x10000);
}
if (mem->send_zq_init) {
/* Sending ZQINIT command */
writel(DIRECT_CMD_ZQINIT | mask,
&dmc->directcmd);
directcmd);
sdelay(10000);
}
......@@ -134,7 +134,7 @@ void dmc_config_mrs(struct mem_timings *mem, struct exynos5_dmc *dmc)
}
}
void dmc_config_prech(struct mem_timings *mem, struct exynos5_dmc *dmc)
void dmc_config_prech(struct mem_timings *mem, uint32_t *directcmd)
{
int channel, chip;
......@@ -146,20 +146,12 @@ void dmc_config_prech(struct mem_timings *mem, struct exynos5_dmc *dmc)
mask |= chip << DIRECT_CMD_CHIP_SHIFT;
/* PALL (all banks precharge) CMD */
writel(DIRECT_CMD_PALL | mask, &dmc->directcmd);
writel(DIRECT_CMD_PALL | mask, directcmd);
sdelay(0x10000);
}
}
}
void dmc_config_memory(struct mem_timings *mem, struct exynos5_dmc *dmc)
{
writel(mem->memconfig, &dmc->memconfig0);
writel(mem->memconfig, &dmc->memconfig1);
writel(DMC_MEMBASECONFIG0_VAL, &dmc->membaseconfig0);
writel(DMC_MEMBASECONFIG1_VAL, &dmc->membaseconfig1);
}
void mem_ctrl_init(int reset)
{
struct spl_machine_param *param = spl_get_machine_params();
......
/*
* DDR3 mem setup file for SMDK5250 board based on EXYNOS5
* DDR3 mem setup file for board based on EXYNOS5
*
* Copyright (C) 2012 Samsung Electronics
*
......@@ -11,12 +11,14 @@
#include <asm/arch/clock.h>
#include <asm/arch/cpu.h>
#include <asm/arch/dmc.h>
#include <asm/arch/power.h>
#include "common_setup.h"
#include "exynos5_setup.h"
#include "clock_init.h"
#define RDLVL_COMPLETE_TIMEOUT 10000
#define TIMEOUT 10000
#ifdef CONFIG_EXYNOS5250
static void reset_phy_ctrl(void)
{
struct exynos5_clock *clk =
......@@ -57,7 +59,8 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
writel(val, &phy1_ctrl->phy_con42);
/* ZQ Calibration */
if (dmc_config_zq(mem, phy0_ctrl, phy1_ctrl))
if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
&phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
/* DQ Signal */
......@@ -68,7 +71,7 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
| (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
&dmc->concontrol);
update_reset_dll(dmc, DDR_MODE_DDR3);
update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
/* DQS Signal */
writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
......@@ -93,7 +96,7 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
&phy1_ctrl->phy_con12);
update_reset_dll(dmc, DDR_MODE_DDR3);
update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
&dmc->concontrol);
......@@ -124,10 +127,10 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
writel(mem->timing_power, &dmc->timingpower);
/* Send PALL command */
dmc_config_prech(mem, dmc);
dmc_config_prech(mem, &dmc->directcmd);
/* Send NOP, MRS and ZQINIT commands */
dmc_config_mrs(mem, dmc);
dmc_config_mrs(mem, &dmc->directcmd);
if (mem->gate_leveling_enable) {
val = PHY_CON0_RESET_VAL;
......@@ -174,7 +177,7 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
writel(val, &phy1_ctrl->phy_con1);
writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
i = RDLVL_COMPLETE_TIMEOUT;
i = TIMEOUT;
while ((readl(&dmc->phystatus) &
(RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
(RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
......@@ -202,11 +205,11 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
writel(val, &phy0_ctrl->phy_con12);
writel(val, &phy1_ctrl->phy_con12);
update_reset_dll(dmc, DDR_MODE_DDR3);
update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
}
/* Send PALL command */
dmc_config_prech(mem, dmc);
dmc_config_prech(mem, &dmc->directcmd);
writel(mem->memcontrol, &dmc->memcontrol);
......@@ -215,3 +218,419 @@ int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
| (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
return 0;
}
#endif
#ifdef CONFIG_EXYNOS5420
int ddr3_mem_ctrl_init(struct mem_timings *mem, unsigned long mem_iv_size,
int reset)
{
struct exynos5420_clock *clk =
(struct exynos5420_clock *)samsung_get_base_clock();
struct exynos5420_power *power =
(struct exynos5420_power *)samsung_get_base_power();
struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
struct exynos5420_dmc *drex0, *drex1;
struct exynos5420_tzasc *tzasc0, *tzasc1;
uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
int chip;
int i;
phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
+ DMC_OFFSET);
drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
+ DMC_OFFSET);
tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
+ DMC_OFFSET);
/* Enable PAUSE for DREX */
setbits_le32(&clk->pause, ENABLE_BIT);
/* Enable BYPASS mode */
setbits_le32(&clk->bpll_con1, BYPASS_EN);
writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
do {
val = readl(&clk->mux_stat_cdrex);
val &= BPLL_SEL_MASK;
} while (val != FOUTBPLL);
clrbits_le32(&clk->bpll_con1, BYPASS_EN);
/* Specify the DDR memory type as DDR3 */
val = readl(&phy0_ctrl->phy_con0);
val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
writel(val, &phy0_ctrl->phy_con0);
val = readl(&phy1_ctrl->phy_con0);
val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
writel(val, &phy1_ctrl->phy_con0);
/* Set Read Latency and Burst Length for PHY0 and PHY1 */
val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
(mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
writel(val, &phy0_ctrl->phy_con42);
writel(val, &phy1_ctrl->phy_con42);
val = readl(&phy0_ctrl->phy_con26);
val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
writel(val, &phy0_ctrl->phy_con26);
val = readl(&phy1_ctrl->phy_con26);
val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
writel(val, &phy1_ctrl->phy_con26);
/*
* Set Driver strength for CK, CKE, CS & CA to 0x7
* Set Driver strength for Data Slice 0~3 to 0x7
*/
val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
(0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
(0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
writel(val, &phy0_ctrl->phy_con39);
writel(val, &phy1_ctrl->phy_con39);
/* ZQ Calibration */
if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
&phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
/* DQ Signal */
val = readl(&phy0_ctrl->phy_con14);
val |= mem->phy0_pulld_dqs;
writel(val, &phy0_ctrl->phy_con14);
val = readl(&phy1_ctrl->phy_con14);
val |= mem->phy1_pulld_dqs;
writel(val, &phy1_ctrl->phy_con14);
val = MEM_TERM_EN | PHY_TERM_EN;
writel(val, &drex0->phycontrol0);
writel(val, &drex1->phycontrol0);
writel(mem->concontrol |
(mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
&drex0->concontrol);
writel(mem->concontrol |
(mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
&drex1->concontrol);
do {
val = readl(&drex0->phystatus);
} while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
do {
val = readl(&drex1->phystatus);
} while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
clrbits_le32(&drex0->concontrol, DFI_INIT_START);
clrbits_le32(&drex1->concontrol, DFI_INIT_START);
update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
/*
* Set Base Address:
* 0x2000_0000 ~ 0x5FFF_FFFF
* 0x6000_0000 ~ 0x9FFF_FFFF
*/
/* MEMBASECONFIG0 */
val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
writel(val, &tzasc0->membaseconfig0);
writel(val, &tzasc1->membaseconfig0);
/* MEMBASECONFIG1 */
val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
writel(val, &tzasc0->membaseconfig1);
writel(val, &tzasc1->membaseconfig1);
/*
* Memory Channel Inteleaving Size
* Ares Channel interleaving = 128 bytes
*/
/* MEMCONFIG0/1 */
writel(mem->memconfig, &tzasc0->memconfig0);
writel(mem->memconfig, &tzasc1->memconfig0);
writel(mem->memconfig, &tzasc0->memconfig1);
writel(mem->memconfig, &tzasc1->memconfig1);
/* Precharge Configuration */
writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
&drex0->prechconfig0);
writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
&drex1->prechconfig0);
/*
* TimingRow, TimingData, TimingPower and Timingaref
* values as per Memory AC parameters
*/
writel(mem->timing_ref, &drex0->timingref);
writel(mem->timing_ref, &drex1->timingref);
writel(mem->timing_row, &drex0->timingrow0);
writel(mem->timing_row, &drex1->timingrow0);
writel(mem->timing_data, &drex0->timingdata0);
writel(mem->timing_data, &drex1->timingdata0);
writel(mem->timing_power, &drex0->timingpower0);
writel(mem->timing_power, &drex1->timingpower0);
if (reset) {
/*
* Send NOP, MRS and ZQINIT commands
* Sending MRS command will reset the DRAM. We should not be
* reseting the DRAM after resume, this will lead to memory
* corruption as DRAM content is lost after DRAM reset
*/
dmc_config_mrs(mem, &drex0->directcmd);
dmc_config_mrs(mem, &drex1->directcmd);
} else {
/*
* During Suspend-Resume & S/W-Reset, as soon as PMU releases
* pad retention, CKE goes high. This causes memory contents
* not to be retained during DRAM initialization. Therfore,
* there is a new control register(0x100431e8[28]) which lets us
* release pad retention and retain the memory content until the
* initialization is complete.
*/
writel(PAD_RETENTION_DRAM_COREBLK_VAL,
&power->pad_retention_dram_coreblk_option);
do {
val = readl(&power->pad_retention_dram_status);
} while (val != 0x1);
/*
* CKE PAD retention disables DRAM self-refresh mode.
* Send auto refresh command for DRAM refresh.
*/
for (i = 0; i < 128; i++) {
for (chip = 0; chip < mem->chips_to_configure; chip++) {
writel(DIRECT_CMD_REFA |
(chip << DIRECT_CMD_CHIP_SHIFT),
&drex0->directcmd);
writel(DIRECT_CMD_REFA |
(chip << DIRECT_CMD_CHIP_SHIFT),
&drex1->directcmd);
}
}
}
if (mem->gate_leveling_enable) {
writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
val = PHY_CON2_RESET_VAL;
val |= INIT_DESKEW_EN;
writel(val, &phy0_ctrl->phy_con2);
writel(val, &phy1_ctrl->phy_con2);
val = readl(&phy0_ctrl->phy_con1);
val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
writel(val, &phy0_ctrl->phy_con1);
val = readl(&phy1_ctrl->phy_con1);
val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
writel(val, &phy1_ctrl->phy_con1);
n_lock_r = readl(&phy0_ctrl->phy_con13);
n_lock_w_phy0 = (n_lock_r & CTRL_LOCK_COARSE_MASK) >> 2;
n_lock_r = readl(&phy0_ctrl->phy_con12);
n_lock_r &= ~CTRL_DLL_ON;
n_lock_r |= n_lock_w_phy0;
writel(n_lock_r, &phy0_ctrl->phy_con12);
n_lock_r = readl(&phy1_ctrl->phy_con13);
n_lock_w_phy1 = (n_lock_r & CTRL_LOCK_COARSE_MASK) >> 2;
n_lock_r = readl(&phy1_ctrl->phy_con12);
n_lock_r &= ~CTRL_DLL_ON;
n_lock_r |= n_lock_w_phy1;
writel(n_lock_r, &phy1_ctrl->phy_con12);
val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
for (chip = 0; chip < mem->chips_to_configure; chip++) {
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex0->directcmd);
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex1->directcmd);
}
setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
val = readl(&phy0_ctrl->phy_con1);
val &= ~(CTRL_GATEDURADJ_MASK);
writel(val, &phy0_ctrl->phy_con1);
val = readl(&phy1_ctrl->phy_con1);
val &= ~(CTRL_GATEDURADJ_MASK);
writel(val, &phy1_ctrl->phy_con1);
writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
i = TIMEOUT;
while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
RDLVL_COMPLETE_CHO) && (i > 0)) {
/*
* TODO(waihong): Comment on how long this take to
* timeout
*/
sdelay(100);
i--;
}
if (!i)
return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
i = TIMEOUT;
while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
RDLVL_COMPLETE_CHO) && (i > 0)) {
/*
* TODO(waihong): Comment on how long this take to
* timeout
*/
sdelay(100);
i--;
}
if (!i)
return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
writel(0, &phy0_ctrl->phy_con14);
writel(0, &phy1_ctrl->phy_con14);
val = (0x3 << DIRECT_CMD_BANK_SHIFT);
for (chip = 0; chip < mem->chips_to_configure; chip++) {
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex0->directcmd);
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex1->directcmd);
}
if (mem->read_leveling_enable) {
/* Set Read DQ Calibration */
val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
for (chip = 0; chip < mem->chips_to_configure; chip++) {
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex0->directcmd);
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex1->directcmd);
}
val = readl(&phy0_ctrl->phy_con1);
val |= READ_LEVELLING_DDR3;
writel(val, &phy0_ctrl->phy_con1);
val = readl(&phy1_ctrl->phy_con1);
val |= READ_LEVELLING_DDR3;
writel(val, &phy1_ctrl->phy_con1);
val = readl(&phy0_ctrl->phy_con2);
val |= (RDLVL_EN | RDLVL_INCR_ADJ);
writel(val, &phy0_ctrl->phy_con2);
val = readl(&phy1_ctrl->phy_con2);
val |= (RDLVL_EN | RDLVL_INCR_ADJ);
writel(val, &phy1_ctrl->phy_con2);
setbits_le32(&drex0->rdlvl_config,
CTRL_RDLVL_DATA_ENABLE);
i = TIMEOUT;
while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO)
!= RDLVL_COMPLETE_CHO) && (i > 0)) {
/*
* TODO(waihong): Comment on how long this take
* to timeout
*/
sdelay(100);
i--;
}
if (!i)
return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
clrbits_le32(&drex0->rdlvl_config,
CTRL_RDLVL_DATA_ENABLE);
setbits_le32(&drex1->rdlvl_config,
CTRL_RDLVL_DATA_ENABLE);
i = TIMEOUT;
while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO)
!= RDLVL_COMPLETE_CHO) && (i > 0)) {
/*
* TODO(waihong): Comment on how long this take
* to timeout
*/
sdelay(100);
i--;
}
if (!i)
return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
clrbits_le32(&drex1->rdlvl_config,
CTRL_RDLVL_DATA_ENABLE);
val = (0x3 << DIRECT_CMD_BANK_SHIFT);
for (chip = 0; chip < mem->chips_to_configure; chip++) {
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex0->directcmd);
writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
&drex1->directcmd);
}
update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
}
/* Common Settings for Leveling */
val = PHY_CON12_RESET_VAL;
writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
}
/* Send PALL command */
dmc_config_prech(mem, &drex0->directcmd);
dmc_config_prech(mem, &drex1->directcmd);
writel(mem->memcontrol, &drex0->memcontrol);
writel(mem->memcontrol, &drex1->memcontrol);
/*
* Set DMC Concontrol: Enable auto-refresh counter, provide
* read data fetch cycles and enable DREX auto set powerdown
* for input buffer of I/O in none read memory state.
*/
writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
DMC_CONCONTROL_IO_PD_CON(0x2),
&drex0->concontrol);
writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
(mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
DMC_CONCONTROL_IO_PD_CON(0x2),
&drex1->concontrol);
/*
* Enable Clock Gating Control for DMC
* this saves around 25 mw dmc power as compared to the power
* consumption without these bits enabled
*/
setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);
return 0;
}
#endif
......@@ -696,6 +696,7 @@
#define CLK_DIV_CPERI1_VAL NOT_AVAILABLE
#else
#define PAD_RETENTION_DRAM_COREBLK_VAL 0x10000000
/* APLL_CON1 */
#define APLL_CON1_VAL (0x0020F300)
......@@ -907,37 +908,38 @@ void lpddr3_mem_ctrl_init(void);
* Configure ZQ I/O interface
*
* @param mem Memory timings for this memory type.
* @param phy0_ctrl Pointer to struct containing PHY0 control reg
* @param phy1_ctrl Pointer to struct containing PHY1 control reg
* @param phy0_con16 Register address for dmc_phy0->phy_con16
* @param phy1_con16 Register address for dmc_phy1->phy_con16
* @param phy0_con17 Register address for dmc_phy0->phy_con17
* @param phy1_con17 Register address for dmc_phy1->phy_con17
* @return 0 if ok, -1 on error
*/
int dmc_config_zq(struct mem_timings *mem,
struct exynos5_phy_control *phy0_ctrl,
struct exynos5_phy_control *phy1_ctrl);
int dmc_config_zq(struct mem_timings *mem, uint32_t *phy0_con16,
uint32_t *phy1_con16, uint32_t *phy0_con17,
uint32_t *phy1_con17);
/*
* Send NOP and MRS/EMRS Direct commands
*
* @param mem Memory timings for this memory type.
* @param dmc Pointer to struct of DMC registers
* @param directcmd Register address for dmc_phy->directcmd
*/
void dmc_config_mrs(struct mem_timings *mem, struct exynos5_dmc *dmc);
void dmc_config_mrs(struct mem_timings *mem, uint32_t *directcmd);
/*
* Send PALL Direct commands
*
* @param mem Memory timings for this memory type.
* @param dmc Pointer to struct of DMC registers
* @param directcmd Register address for dmc_phy->directcmd
*/
void dmc_config_prech(struct mem_timings *mem, struct exynos5_dmc *dmc);
void dmc_config_prech(struct mem_timings *mem, uint32_t *directcmd);
/*
* Reset the DLL. This function is common between DDR3 and LPDDR2.
* However, the reset value is different. So we are passing a flag
* ddr_mode to distinguish between LPDDR2 and DDR3.
*
* @param exynos5_dmc Pointer to struct of DMC registers
* @param phycontrol0 Register address for dmc_phy->phycontrol0
* @param ddr_mode Type of DDR memory
*/
void update_reset_dll(struct exynos5_dmc *, enum ddr_mode);
void update_reset_dll(uint32_t *phycontrol0, enum ddr_mode);
#endif
......@@ -53,6 +53,7 @@
#define EXYNOS4_AUDIOSS_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS4_USB_HOST_XHCI_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS4_USB3PHY_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS4_DMC_TZASC_BASE DEVICE_NOT_AVAILABLE
/* EXYNOS4X12 */
#define EXYNOS4X12_GPIO_PART3_BASE 0x03860000
......@@ -91,6 +92,7 @@
#define EXYNOS4X12_AUDIOSS_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS4X12_USB_HOST_XHCI_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS4X12_USB3PHY_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS4X12_DMC_TZASC_BASE DEVICE_NOT_AVAILABLE
/* EXYNOS5 */
#define EXYNOS5_I2C_SPACING 0x10000
......@@ -129,6 +131,7 @@
#define EXYNOS5_ADC_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS5_MODEM_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS5_DMC_TZASC_BASE DEVICE_NOT_AVAILABLE
/* EXYNOS5420 */
#define EXYNOS5420_AUDIOSS_BASE 0x03810000
......@@ -143,8 +146,7 @@
#define EXYNOS5420_ACE_SFR_BASE 0x10830000
#define EXYNOS5420_DMC_PHY_BASE 0x10C00000
#define EXYNOS5420_DMC_CTRL_BASE 0x10C20000
#define EXYNOS5420_DMC_TZASC0_BASE 0x10D40000
#define EXYNOS5420_DMC_TZASC1_BASE 0x10D50000
#define EXYNOS5420_DMC_TZASC_BASE 0x10D40000
#define EXYNOS5420_USB_HOST_EHCI_BASE 0x12110000
#define EXYNOS5420_MMC_BASE 0x12200000
#define EXYNOS5420_SROMC_BASE 0x12250000
......@@ -284,6 +286,7 @@ SAMSUNG_BASE(spi_isp, SPI_ISP_BASE)
SAMSUNG_BASE(tzpc, TZPC_BASE)
SAMSUNG_BASE(dmc_ctrl, DMC_CTRL_BASE)
SAMSUNG_BASE(dmc_phy, DMC_PHY_BASE)
SAMSUNG_BASE(dmc_tzasc, DMC_TZASC_BASE)
SAMSUNG_BASE(audio_ass, AUDIOSS_BASE)
#endif
......
......@@ -419,6 +419,15 @@ struct exynos5420_phy_control {
unsigned int phy_con42;
};
struct exynos5420_tzasc {
unsigned char res1[0xf00];
unsigned int membaseconfig0;
unsigned int membaseconfig1;
unsigned char res2[0x8];
unsigned int memconfig0;
unsigned int memconfig1;
};
enum ddr_mode {
DDR_MODE_DDR2,
DDR_MODE_DDR3,
......@@ -453,6 +462,7 @@ enum mem_manuf {
#define PHY_CON0_T_WRRDCMD_SHIFT 17
#define PHY_CON0_T_WRRDCMD_MASK (0x7 << PHY_CON0_T_WRRDCMD_SHIFT)
#define PHY_CON0_CTRL_DDR_MODE_SHIFT 11
#define PHY_CON0_CTRL_DDR_MODE_MASK 0x3
/* PHY_CON1 register fields */
#define PHY_CON1_RDLVL_RDDATA_ADJ_SHIFT 0
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册