aboutsummaryrefslogtreecommitdiff
path: root/plat/rockchip/rk3399/drivers/dram
diff options
context:
space:
mode:
Diffstat (limited to 'plat/rockchip/rk3399/drivers/dram')
-rw-r--r--plat/rockchip/rk3399/drivers/dram/dfs.c2112
-rw-r--r--plat/rockchip/rk3399/drivers/dram/dfs.h48
-rw-r--r--plat/rockchip/rk3399/drivers/dram/dram.c53
-rw-r--r--plat/rockchip/rk3399/drivers/dram/dram.h155
-rw-r--r--plat/rockchip/rk3399/drivers/dram/dram_spec_timing.c1318
-rw-r--r--plat/rockchip/rk3399/drivers/dram/dram_spec_timing.h506
-rw-r--r--plat/rockchip/rk3399/drivers/dram/suspend.c772
-rw-r--r--plat/rockchip/rk3399/drivers/dram/suspend.h25
8 files changed, 4989 insertions, 0 deletions
diff --git a/plat/rockchip/rk3399/drivers/dram/dfs.c b/plat/rockchip/rk3399/drivers/dram/dfs.c
new file mode 100644
index 00000000..d629e4bf
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/dfs.c
@@ -0,0 +1,2112 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch_helpers.h>
+#include <debug.h>
+#include <delay_timer.h>
+#include <m0_ctl.h>
+#include <mmio.h>
+#include <plat_private.h>
+#include "dfs.h"
+#include "dram.h"
+#include "dram_spec_timing.h"
+#include "pmu.h"
+#include "soc.h"
+#include "string.h"
+
+#define ENPER_CS_TRAINING_FREQ (666)
+#define TDFI_LAT_THRESHOLD_FREQ (928)
+#define PHY_DLL_BYPASS_FREQ (260)
+
+static const struct pll_div dpll_rates_table[] = {
+
+ /* _mhz, _refdiv, _fbdiv, _postdiv1, _postdiv2 */
+ {.mhz = 928, .refdiv = 1, .fbdiv = 116, .postdiv1 = 3, .postdiv2 = 1},
+ {.mhz = 800, .refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1},
+ {.mhz = 732, .refdiv = 1, .fbdiv = 61, .postdiv1 = 2, .postdiv2 = 1},
+ {.mhz = 666, .refdiv = 1, .fbdiv = 111, .postdiv1 = 4, .postdiv2 = 1},
+ {.mhz = 600, .refdiv = 1, .fbdiv = 50, .postdiv1 = 2, .postdiv2 = 1},
+ {.mhz = 528, .refdiv = 1, .fbdiv = 66, .postdiv1 = 3, .postdiv2 = 1},
+ {.mhz = 400, .refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 1},
+ {.mhz = 300, .refdiv = 1, .fbdiv = 50, .postdiv1 = 4, .postdiv2 = 1},
+ {.mhz = 200, .refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 2},
+};
+
+struct rk3399_dram_status {
+ uint32_t current_index;
+ uint32_t index_freq[2];
+ uint32_t boot_freq;
+ uint32_t low_power_stat;
+ struct timing_related_config timing_config;
+ struct drv_odt_lp_config drv_odt_lp_cfg;
+};
+
+struct rk3399_saved_status {
+ uint32_t freq;
+ uint32_t low_power_stat;
+ uint32_t odt;
+};
+
+static struct rk3399_dram_status rk3399_dram_status;
+static struct rk3399_saved_status rk3399_suspend_status;
+static uint32_t wrdqs_delay_val[2][2][4];
+static uint32_t rddqs_delay_ps;
+
+static struct rk3399_sdram_default_config ddr3_default_config = {
+ .bl = 8,
+ .ap = 0,
+ .burst_ref_cnt = 1,
+ .zqcsi = 0
+};
+
+static struct rk3399_sdram_default_config lpddr3_default_config = {
+ .bl = 8,
+ .ap = 0,
+ .burst_ref_cnt = 1,
+ .zqcsi = 0
+};
+
+static struct rk3399_sdram_default_config lpddr4_default_config = {
+ .bl = 16,
+ .ap = 0,
+ .caodt = 240,
+ .burst_ref_cnt = 1,
+ .zqcsi = 0
+};
+
+static uint32_t get_cs_die_capability(struct rk3399_sdram_params *sdram_config,
+ uint8_t channel, uint8_t cs)
+{
+ struct rk3399_sdram_channel *ch = &sdram_config->ch[channel];
+ uint32_t bandwidth;
+ uint32_t die_bandwidth;
+ uint32_t die;
+ uint32_t cs_cap;
+ uint32_t row;
+
+ row = cs == 0 ? ch->cs0_row : ch->cs1_row;
+ bandwidth = 8 * (1 << ch->bw);
+ die_bandwidth = 8 * (1 << ch->dbw);
+ die = bandwidth / die_bandwidth;
+ cs_cap = (1 << (row + ((1 << ch->bk) / 4 + 1) + ch->col +
+ (bandwidth / 16)));
+ if (ch->row_3_4)
+ cs_cap = cs_cap * 3 / 4;
+
+ return (cs_cap / die);
+}
+
+static void get_dram_drv_odt_val(uint32_t dram_type,
+ struct drv_odt_lp_config *drv_config)
+{
+ uint32_t tmp;
+ uint32_t mr1_val, mr3_val, mr11_val;
+
+ switch (dram_type) {
+ case DDR3:
+ mr1_val = (mmio_read_32(CTL_REG(0, 133)) >> 16) & 0xffff;
+ tmp = ((mr1_val >> 1) & 1) | ((mr1_val >> 4) & 1);
+ if (tmp)
+ drv_config->dram_side_drv = 34;
+ else
+ drv_config->dram_side_drv = 40;
+ tmp = ((mr1_val >> 2) & 1) | ((mr1_val >> 5) & 1) |
+ ((mr1_val >> 7) & 1);
+ if (tmp == 0)
+ drv_config->dram_side_dq_odt = 0;
+ else if (tmp == 1)
+ drv_config->dram_side_dq_odt = 60;
+ else if (tmp == 3)
+ drv_config->dram_side_dq_odt = 40;
+ else
+ drv_config->dram_side_dq_odt = 120;
+ break;
+ case LPDDR3:
+ mr3_val = mmio_read_32(CTL_REG(0, 138)) & 0xf;
+ mr11_val = (mmio_read_32(CTL_REG(0, 139)) >> 24) & 0x3;
+ if (mr3_val == 0xb)
+ drv_config->dram_side_drv = 3448;
+ else if (mr3_val == 0xa)
+ drv_config->dram_side_drv = 4048;
+ else if (mr3_val == 0x9)
+ drv_config->dram_side_drv = 3440;
+ else if (mr3_val == 0x4)
+ drv_config->dram_side_drv = 60;
+ else if (mr3_val == 0x3)
+ drv_config->dram_side_drv = 48;
+ else if (mr3_val == 0x2)
+ drv_config->dram_side_drv = 40;
+ else
+ drv_config->dram_side_drv = 34;
+
+ if (mr11_val == 1)
+ drv_config->dram_side_dq_odt = 60;
+ else if (mr11_val == 2)
+ drv_config->dram_side_dq_odt = 120;
+ else if (mr11_val == 0)
+ drv_config->dram_side_dq_odt = 0;
+ else
+ drv_config->dram_side_dq_odt = 240;
+ break;
+ case LPDDR4:
+ default:
+ mr3_val = (mmio_read_32(CTL_REG(0, 138)) >> 3) & 0x7;
+ mr11_val = (mmio_read_32(CTL_REG(0, 139)) >> 24) & 0xff;
+
+ if ((mr3_val == 0) || (mr3_val == 7))
+ drv_config->dram_side_drv = 40;
+ else
+ drv_config->dram_side_drv = 240 / mr3_val;
+
+ tmp = mr11_val & 0x7;
+ if ((tmp == 7) || (tmp == 0))
+ drv_config->dram_side_dq_odt = 0;
+ else
+ drv_config->dram_side_dq_odt = 240 / tmp;
+
+ tmp = (mr11_val >> 4) & 0x7;
+ if ((tmp == 7) || (tmp == 0))
+ drv_config->dram_side_ca_odt = 0;
+ else
+ drv_config->dram_side_ca_odt = 240 / tmp;
+ break;
+ }
+}
+
+static void sdram_timing_cfg_init(struct timing_related_config *ptiming_config,
+ struct rk3399_sdram_params *sdram_params,
+ struct drv_odt_lp_config *drv_config)
+{
+ uint32_t i, j;
+
+ for (i = 0; i < sdram_params->num_channels; i++) {
+ ptiming_config->dram_info[i].speed_rate = DDR3_DEFAULT;
+ ptiming_config->dram_info[i].cs_cnt = sdram_params->ch[i].rank;
+ for (j = 0; j < sdram_params->ch[i].rank; j++) {
+ ptiming_config->dram_info[i].per_die_capability[j] =
+ get_cs_die_capability(sdram_params, i, j);
+ }
+ }
+ ptiming_config->dram_type = sdram_params->dramtype;
+ ptiming_config->ch_cnt = sdram_params->num_channels;
+ switch (sdram_params->dramtype) {
+ case DDR3:
+ ptiming_config->bl = ddr3_default_config.bl;
+ ptiming_config->ap = ddr3_default_config.ap;
+ break;
+ case LPDDR3:
+ ptiming_config->bl = lpddr3_default_config.bl;
+ ptiming_config->ap = lpddr3_default_config.ap;
+ break;
+ case LPDDR4:
+ ptiming_config->bl = lpddr4_default_config.bl;
+ ptiming_config->ap = lpddr4_default_config.ap;
+ ptiming_config->rdbi = 0;
+ ptiming_config->wdbi = 0;
+ break;
+ }
+ ptiming_config->dramds = drv_config->dram_side_drv;
+ ptiming_config->dramodt = drv_config->dram_side_dq_odt;
+ ptiming_config->caodt = drv_config->dram_side_ca_odt;
+ ptiming_config->odt = (mmio_read_32(PHY_REG(0, 5)) >> 16) & 0x1;
+}
+
+struct lat_adj_pair {
+ uint32_t cl;
+ uint32_t rdlat_adj;
+ uint32_t cwl;
+ uint32_t wrlat_adj;
+};
+
+const struct lat_adj_pair ddr3_lat_adj[] = {
+ {6, 5, 5, 4},
+ {8, 7, 6, 5},
+ {10, 9, 7, 6},
+ {11, 9, 8, 7},
+ {13, 0xb, 9, 8},
+ {14, 0xb, 0xa, 9}
+};
+
+const struct lat_adj_pair lpddr3_lat_adj[] = {
+ {3, 2, 1, 0},
+ {6, 5, 3, 2},
+ {8, 7, 4, 3},
+ {9, 8, 5, 4},
+ {10, 9, 6, 5},
+ {11, 9, 6, 5},
+ {12, 0xa, 6, 5},
+ {14, 0xc, 8, 7},
+ {16, 0xd, 8, 7}
+};
+
+const struct lat_adj_pair lpddr4_lat_adj[] = {
+ {6, 5, 4, 2},
+ {10, 9, 6, 4},
+ {14, 0xc, 8, 6},
+ {20, 0x11, 0xa, 8},
+ {24, 0x15, 0xc, 0xa},
+ {28, 0x18, 0xe, 0xc},
+ {32, 0x1b, 0x10, 0xe},
+ {36, 0x1e, 0x12, 0x10}
+};
+
+static uint32_t get_rdlat_adj(uint32_t dram_type, uint32_t cl)
+{
+ const struct lat_adj_pair *p;
+ uint32_t cnt;
+ uint32_t i;
+
+ if (dram_type == DDR3) {
+ p = ddr3_lat_adj;
+ cnt = ARRAY_SIZE(ddr3_lat_adj);
+ } else if (dram_type == LPDDR3) {
+ p = lpddr3_lat_adj;
+ cnt = ARRAY_SIZE(lpddr3_lat_adj);
+ } else {
+ p = lpddr4_lat_adj;
+ cnt = ARRAY_SIZE(lpddr4_lat_adj);
+ }
+
+ for (i = 0; i < cnt; i++) {
+ if (cl == p[i].cl)
+ return p[i].rdlat_adj;
+ }
+ /* fail */
+ return 0xff;
+}
+
+static uint32_t get_wrlat_adj(uint32_t dram_type, uint32_t cwl)
+{
+ const struct lat_adj_pair *p;
+ uint32_t cnt;
+ uint32_t i;
+
+ if (dram_type == DDR3) {
+ p = ddr3_lat_adj;
+ cnt = ARRAY_SIZE(ddr3_lat_adj);
+ } else if (dram_type == LPDDR3) {
+ p = lpddr3_lat_adj;
+ cnt = ARRAY_SIZE(lpddr3_lat_adj);
+ } else {
+ p = lpddr4_lat_adj;
+ cnt = ARRAY_SIZE(lpddr4_lat_adj);
+ }
+
+ for (i = 0; i < cnt; i++) {
+ if (cwl == p[i].cwl)
+ return p[i].wrlat_adj;
+ }
+ /* fail */
+ return 0xff;
+}
+
+#define PI_REGS_DIMM_SUPPORT (0)
+#define PI_ADD_LATENCY (0)
+#define PI_DOUBLEFREEK (1)
+
+#define PI_PAD_DELAY_PS_VALUE (1000)
+#define PI_IE_ENABLE_VALUE (3000)
+#define PI_TSEL_ENABLE_VALUE (700)
+
+static uint32_t get_pi_rdlat_adj(struct dram_timing_t *pdram_timing)
+{
+ /*[DLLSUBTYPE2] == "STD_DENALI_HS" */
+ uint32_t rdlat, delay_adder, ie_enable, hs_offset, tsel_adder,
+ extra_adder, tsel_enable;
+
+ ie_enable = PI_IE_ENABLE_VALUE;
+ tsel_enable = PI_TSEL_ENABLE_VALUE;
+
+ rdlat = pdram_timing->cl + PI_ADD_LATENCY;
+ delay_adder = ie_enable / (1000000 / pdram_timing->mhz);
+ if ((ie_enable % (1000000 / pdram_timing->mhz)) != 0)
+ delay_adder++;
+ hs_offset = 0;
+ tsel_adder = 0;
+ extra_adder = 0;
+ /* rdlat = rdlat - (PREAMBLE_SUPPORT & 0x1); */
+ tsel_adder = tsel_enable / (1000000 / pdram_timing->mhz);
+ if ((tsel_enable % (1000000 / pdram_timing->mhz)) != 0)
+ tsel_adder++;
+ delay_adder = delay_adder - 1;
+ if (tsel_adder > delay_adder)
+ extra_adder = tsel_adder - delay_adder;
+ else
+ extra_adder = 0;
+ if (PI_REGS_DIMM_SUPPORT && PI_DOUBLEFREEK)
+ hs_offset = 2;
+ else
+ hs_offset = 1;
+
+ if (delay_adder > (rdlat - 1 - hs_offset)) {
+ rdlat = rdlat - tsel_adder;
+ } else {
+ if ((rdlat - delay_adder) < 2)
+ rdlat = 2;
+ else
+ rdlat = rdlat - delay_adder - extra_adder;
+ }
+
+ return rdlat;
+}
+
+static uint32_t get_pi_wrlat(struct dram_timing_t *pdram_timing,
+ struct timing_related_config *timing_config)
+{
+ uint32_t tmp;
+
+ if (timing_config->dram_type == LPDDR3) {
+ tmp = pdram_timing->cl;
+ if (tmp >= 14)
+ tmp = 8;
+ else if (tmp >= 10)
+ tmp = 6;
+ else if (tmp == 9)
+ tmp = 5;
+ else if (tmp == 8)
+ tmp = 4;
+ else if (tmp == 6)
+ tmp = 3;
+ else
+ tmp = 1;
+ } else {
+ tmp = 1;
+ }
+
+ return tmp;
+}
+
+static uint32_t get_pi_wrlat_adj(struct dram_timing_t *pdram_timing,
+ struct timing_related_config *timing_config)
+{
+ return get_pi_wrlat(pdram_timing, timing_config) + PI_ADD_LATENCY - 1;
+}
+
+static uint32_t get_pi_tdfi_phy_rdlat(struct dram_timing_t *pdram_timing,
+ struct timing_related_config *timing_config)
+{
+ /* [DLLSUBTYPE2] == "STD_DENALI_HS" */
+ uint32_t cas_lat, delay_adder, ie_enable, hs_offset, ie_delay_adder;
+ uint32_t mem_delay_ps, round_trip_ps;
+ uint32_t phy_internal_delay, lpddr_adder, dfi_adder, rdlat_delay;
+
+ ie_enable = PI_IE_ENABLE_VALUE;
+
+ delay_adder = ie_enable / (1000000 / pdram_timing->mhz);
+ if ((ie_enable % (1000000 / pdram_timing->mhz)) != 0)
+ delay_adder++;
+ delay_adder = delay_adder - 1;
+ if (PI_REGS_DIMM_SUPPORT && PI_DOUBLEFREEK)
+ hs_offset = 2;
+ else
+ hs_offset = 1;
+
+ cas_lat = pdram_timing->cl + PI_ADD_LATENCY;
+
+ if (delay_adder > (cas_lat - 1 - hs_offset)) {
+ ie_delay_adder = 0;
+ } else {
+ ie_delay_adder = ie_enable / (1000000 / pdram_timing->mhz);
+ if ((ie_enable % (1000000 / pdram_timing->mhz)) != 0)
+ ie_delay_adder++;
+ }
+
+ if (timing_config->dram_type == DDR3) {
+ mem_delay_ps = 0;
+ } else if (timing_config->dram_type == LPDDR4) {
+ mem_delay_ps = 3600;
+ } else if (timing_config->dram_type == LPDDR3) {
+ mem_delay_ps = 5500;
+ } else {
+ NOTICE("get_pi_tdfi_phy_rdlat:dramtype unsupport\n");
+ return 0;
+ }
+ round_trip_ps = 1100 + 500 + mem_delay_ps + 500 + 600;
+ delay_adder = round_trip_ps / (1000000 / pdram_timing->mhz);
+ if ((round_trip_ps % (1000000 / pdram_timing->mhz)) != 0)
+ delay_adder++;
+
+ phy_internal_delay = 5 + 2 + 4;
+ lpddr_adder = mem_delay_ps / (1000000 / pdram_timing->mhz);
+ if ((mem_delay_ps % (1000000 / pdram_timing->mhz)) != 0)
+ lpddr_adder++;
+ dfi_adder = 0;
+ phy_internal_delay = phy_internal_delay + 2;
+ rdlat_delay = delay_adder + phy_internal_delay +
+ ie_delay_adder + lpddr_adder + dfi_adder;
+
+ rdlat_delay = rdlat_delay + 2;
+ return rdlat_delay;
+}
+
+static uint32_t get_pi_todtoff_min(struct dram_timing_t *pdram_timing,
+ struct timing_related_config *timing_config)
+{
+ uint32_t tmp, todtoff_min_ps;
+
+ if (timing_config->dram_type == LPDDR3)
+ todtoff_min_ps = 2500;
+ else if (timing_config->dram_type == LPDDR4)
+ todtoff_min_ps = 1500;
+ else
+ todtoff_min_ps = 0;
+ /* todtoff_min */
+ tmp = todtoff_min_ps / (1000000 / pdram_timing->mhz);
+ if ((todtoff_min_ps % (1000000 / pdram_timing->mhz)) != 0)
+ tmp++;
+ return tmp;
+}
+
+static uint32_t get_pi_todtoff_max(struct dram_timing_t *pdram_timing,
+ struct timing_related_config *timing_config)
+{
+ uint32_t tmp, todtoff_max_ps;
+
+ if ((timing_config->dram_type == LPDDR4)
+ || (timing_config->dram_type == LPDDR3))
+ todtoff_max_ps = 3500;
+ else
+ todtoff_max_ps = 0;
+
+ /* todtoff_max */
+ tmp = todtoff_max_ps / (1000000 / pdram_timing->mhz);
+ if ((todtoff_max_ps % (1000000 / pdram_timing->mhz)) != 0)
+ tmp++;
+ return tmp;
+}
+
+static void gen_rk3399_ctl_params_f0(struct timing_related_config
+ *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t i;
+ uint32_t tmp, tmp1;
+
+ for (i = 0; i < timing_config->ch_cnt; i++) {
+ if (timing_config->dram_type == DDR3) {
+ tmp = ((700000 + 10) * timing_config->freq +
+ 999) / 1000;
+ tmp += pdram_timing->txsnr + (pdram_timing->tmrd * 3) +
+ pdram_timing->tmod + pdram_timing->tzqinit;
+ mmio_write_32(CTL_REG(i, 5), tmp);
+
+ mmio_clrsetbits_32(CTL_REG(i, 22), 0xffff,
+ pdram_timing->tdllk);
+
+ mmio_write_32(CTL_REG(i, 32),
+ (pdram_timing->tmod << 8) |
+ pdram_timing->tmrd);
+
+ mmio_clrsetbits_32(CTL_REG(i, 59), 0xffff << 16,
+ (pdram_timing->txsr -
+ pdram_timing->trcd) << 16);
+ } else if (timing_config->dram_type == LPDDR4) {
+ mmio_write_32(CTL_REG(i, 5), pdram_timing->tinit1 +
+ pdram_timing->tinit3);
+ mmio_write_32(CTL_REG(i, 32),
+ (pdram_timing->tmrd << 8) |
+ pdram_timing->tmrd);
+ mmio_clrsetbits_32(CTL_REG(i, 59), 0xffff << 16,
+ pdram_timing->txsr << 16);
+ } else {
+ mmio_write_32(CTL_REG(i, 5), pdram_timing->tinit1);
+ mmio_write_32(CTL_REG(i, 7), pdram_timing->tinit4);
+ mmio_write_32(CTL_REG(i, 32),
+ (pdram_timing->tmrd << 8) |
+ pdram_timing->tmrd);
+ mmio_clrsetbits_32(CTL_REG(i, 59), 0xffff << 16,
+ pdram_timing->txsr << 16);
+ }
+ mmio_write_32(CTL_REG(i, 6), pdram_timing->tinit3);
+ mmio_write_32(CTL_REG(i, 8), pdram_timing->tinit5);
+ mmio_clrsetbits_32(CTL_REG(i, 23), (0x7f << 16),
+ ((pdram_timing->cl * 2) << 16));
+ mmio_clrsetbits_32(CTL_REG(i, 23), (0x1f << 24),
+ (pdram_timing->cwl << 24));
+ mmio_clrsetbits_32(CTL_REG(i, 24), 0x3f, pdram_timing->al);
+ mmio_clrsetbits_32(CTL_REG(i, 26), 0xffff << 16,
+ (pdram_timing->trc << 24) |
+ (pdram_timing->trrd << 16));
+ mmio_write_32(CTL_REG(i, 27),
+ (pdram_timing->tfaw << 24) |
+ (pdram_timing->trppb << 16) |
+ (pdram_timing->twtr << 8) |
+ pdram_timing->tras_min);
+
+ mmio_clrsetbits_32(CTL_REG(i, 31), 0xff << 24,
+ max(4, pdram_timing->trtp) << 24);
+ mmio_write_32(CTL_REG(i, 33), (pdram_timing->tcke << 24) |
+ pdram_timing->tras_max);
+ mmio_clrsetbits_32(CTL_REG(i, 34), 0xff,
+ max(1, pdram_timing->tckesr));
+ mmio_clrsetbits_32(CTL_REG(i, 39),
+ (0x3f << 16) | (0xff << 8),
+ (pdram_timing->twr << 16) |
+ (pdram_timing->trcd << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 42), 0x1f << 16,
+ pdram_timing->tmrz << 16);
+ tmp = pdram_timing->tdal ? pdram_timing->tdal :
+ (pdram_timing->twr + pdram_timing->trp);
+ mmio_clrsetbits_32(CTL_REG(i, 44), 0xff, tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 45), 0xff, pdram_timing->trp);
+ mmio_write_32(CTL_REG(i, 48),
+ ((pdram_timing->trefi - 8) << 16) |
+ pdram_timing->trfc);
+ mmio_clrsetbits_32(CTL_REG(i, 52), 0xffff, pdram_timing->txp);
+ mmio_clrsetbits_32(CTL_REG(i, 53), 0xffff << 16,
+ pdram_timing->txpdll << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 55), 0xf << 24,
+ pdram_timing->tcscke << 24);
+ mmio_clrsetbits_32(CTL_REG(i, 55), 0xff, pdram_timing->tmrri);
+ mmio_write_32(CTL_REG(i, 56),
+ (pdram_timing->tzqcke << 24) |
+ (pdram_timing->tmrwckel << 16) |
+ (pdram_timing->tckehcs << 8) |
+ pdram_timing->tckelcs);
+ mmio_clrsetbits_32(CTL_REG(i, 60), 0xffff, pdram_timing->txsnr);
+ mmio_clrsetbits_32(CTL_REG(i, 62), 0xffff << 16,
+ (pdram_timing->tckehcmd << 24) |
+ (pdram_timing->tckelcmd << 16));
+ mmio_write_32(CTL_REG(i, 63),
+ (pdram_timing->tckelpd << 24) |
+ (pdram_timing->tescke << 16) |
+ (pdram_timing->tsr << 8) |
+ pdram_timing->tckckel);
+ mmio_clrsetbits_32(CTL_REG(i, 64), 0xfff,
+ (pdram_timing->tcmdcke << 8) |
+ pdram_timing->tcsckeh);
+ mmio_clrsetbits_32(CTL_REG(i, 92), 0xffff << 8,
+ (pdram_timing->tcksrx << 16) |
+ (pdram_timing->tcksre << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 108), 0x1 << 24,
+ (timing_config->dllbp << 24));
+ mmio_clrsetbits_32(CTL_REG(i, 122), 0x3ff << 16,
+ (pdram_timing->tvrcg_enable << 16));
+ mmio_write_32(CTL_REG(i, 123), (pdram_timing->tfc_long << 16) |
+ pdram_timing->tvrcg_disable);
+ mmio_write_32(CTL_REG(i, 124),
+ (pdram_timing->tvref_long << 16) |
+ (pdram_timing->tckfspx << 8) |
+ pdram_timing->tckfspe);
+ mmio_write_32(CTL_REG(i, 133), (pdram_timing->mr[1] << 16) |
+ pdram_timing->mr[0]);
+ mmio_clrsetbits_32(CTL_REG(i, 134), 0xffff,
+ pdram_timing->mr[2]);
+ mmio_clrsetbits_32(CTL_REG(i, 138), 0xffff,
+ pdram_timing->mr[3]);
+ mmio_clrsetbits_32(CTL_REG(i, 139), 0xff << 24,
+ pdram_timing->mr11 << 24);
+ mmio_write_32(CTL_REG(i, 147),
+ (pdram_timing->mr[1] << 16) |
+ pdram_timing->mr[0]);
+ mmio_clrsetbits_32(CTL_REG(i, 148), 0xffff,
+ pdram_timing->mr[2]);
+ mmio_clrsetbits_32(CTL_REG(i, 152), 0xffff,
+ pdram_timing->mr[3]);
+ mmio_clrsetbits_32(CTL_REG(i, 153), 0xff << 24,
+ pdram_timing->mr11 << 24);
+ if (timing_config->dram_type == LPDDR4) {
+ mmio_clrsetbits_32(CTL_REG(i, 140), 0xffff << 16,
+ pdram_timing->mr12 << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 142), 0xffff << 16,
+ pdram_timing->mr14 << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 145), 0xffff << 16,
+ pdram_timing->mr22 << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 154), 0xffff << 16,
+ pdram_timing->mr12 << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 156), 0xffff << 16,
+ pdram_timing->mr14 << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 159), 0xffff << 16,
+ pdram_timing->mr22 << 16);
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 179), 0xfff << 8,
+ pdram_timing->tzqinit << 8);
+ mmio_write_32(CTL_REG(i, 180), (pdram_timing->tzqcs << 16) |
+ (pdram_timing->tzqinit / 2));
+ mmio_write_32(CTL_REG(i, 181), (pdram_timing->tzqlat << 16) |
+ pdram_timing->tzqcal);
+ mmio_clrsetbits_32(CTL_REG(i, 212), 0xff << 8,
+ pdram_timing->todton << 8);
+
+ if (timing_config->odt) {
+ mmio_setbits_32(CTL_REG(i, 213), 1 << 16);
+ if (timing_config->freq < 400)
+ tmp = 4 << 24;
+ else
+ tmp = 8 << 24;
+ } else {
+ mmio_clrbits_32(CTL_REG(i, 213), 1 << 16);
+ tmp = 2 << 24;
+ }
+
+ mmio_clrsetbits_32(CTL_REG(i, 216), 0x1f << 24, tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 221), (0x3 << 16) | (0xf << 8),
+ (pdram_timing->tdqsck << 16) |
+ (pdram_timing->tdqsck_max << 8));
+ tmp =
+ (get_wrlat_adj(timing_config->dram_type, pdram_timing->cwl)
+ << 8) | get_rdlat_adj(timing_config->dram_type,
+ pdram_timing->cl);
+ mmio_clrsetbits_32(CTL_REG(i, 284), 0xffff, tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 82), 0xffff << 16,
+ (4 * pdram_timing->trefi) << 16);
+
+ mmio_clrsetbits_32(CTL_REG(i, 83), 0xffff,
+ (2 * pdram_timing->trefi) & 0xffff);
+
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ tmp = get_pi_wrlat(pdram_timing, timing_config);
+ tmp1 = get_pi_todtoff_max(pdram_timing, timing_config);
+ tmp = (tmp > tmp1) ? (tmp - tmp1) : 0;
+ } else {
+ tmp = 0;
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 214), 0x3f << 16,
+ (tmp & 0x3f) << 16);
+
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ /* min_rl_preamble = cl+TDQSCK_MIN -1 */
+ tmp = pdram_timing->cl +
+ get_pi_todtoff_min(pdram_timing, timing_config) - 1;
+ /* todtoff_max */
+ tmp1 = get_pi_todtoff_max(pdram_timing, timing_config);
+ tmp = (tmp > tmp1) ? (tmp - tmp1) : 0;
+ } else {
+ tmp = pdram_timing->cl - pdram_timing->cwl;
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 215), 0x3f << 8,
+ (tmp & 0x3f) << 8);
+
+ mmio_clrsetbits_32(CTL_REG(i, 275), 0xff << 16,
+ (get_pi_tdfi_phy_rdlat(pdram_timing,
+ timing_config) &
+ 0xff) << 16);
+
+ mmio_clrsetbits_32(CTL_REG(i, 277), 0xffff,
+ (2 * pdram_timing->trefi) & 0xffff);
+
+ mmio_clrsetbits_32(CTL_REG(i, 282), 0xffff,
+ (2 * pdram_timing->trefi) & 0xffff);
+
+ mmio_write_32(CTL_REG(i, 283), 20 * pdram_timing->trefi);
+
+ /* CTL_308 TDFI_CALVL_CAPTURE_F0:RW:16:10 */
+ tmp1 = 20000 / (1000000 / pdram_timing->mhz) + 1;
+ if ((20000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp1++;
+ tmp = (tmp1 >> 1) + (tmp1 % 2) + 5;
+ mmio_clrsetbits_32(CTL_REG(i, 308), 0x3ff << 16, tmp << 16);
+
+ /* CTL_308 TDFI_CALVL_CC_F0:RW:0:10 */
+ tmp = tmp + 18;
+ mmio_clrsetbits_32(CTL_REG(i, 308), 0x3ff, tmp);
+
+ /* CTL_314 TDFI_WRCSLAT_F0:RW:8:8 */
+ tmp1 = get_pi_wrlat_adj(pdram_timing, timing_config);
+ if (timing_config->freq <= TDFI_LAT_THRESHOLD_FREQ) {
+ if (tmp1 == 0)
+ tmp = 0;
+ else if (tmp1 < 5)
+ tmp = tmp1 - 1;
+ else
+ tmp = tmp1 - 5;
+ } else {
+ tmp = tmp1 - 2;
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 314), 0xff << 8, tmp << 8);
+
+ /* CTL_314 TDFI_RDCSLAT_F0:RW:0:8 */
+ if ((timing_config->freq <= TDFI_LAT_THRESHOLD_FREQ) &&
+ (pdram_timing->cl >= 5))
+ tmp = pdram_timing->cl - 5;
+ else
+ tmp = pdram_timing->cl - 2;
+ mmio_clrsetbits_32(CTL_REG(i, 314), 0xff, tmp);
+ }
+}
+
+static void gen_rk3399_ctl_params_f1(struct timing_related_config
+ *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t i;
+ uint32_t tmp, tmp1;
+
+ for (i = 0; i < timing_config->ch_cnt; i++) {
+ if (timing_config->dram_type == DDR3) {
+ tmp =
+ ((700000 + 10) * timing_config->freq + 999) / 1000;
+ tmp += pdram_timing->txsnr + (pdram_timing->tmrd * 3) +
+ pdram_timing->tmod + pdram_timing->tzqinit;
+ mmio_write_32(CTL_REG(i, 9), tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 22), 0xffff << 16,
+ pdram_timing->tdllk << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 34), 0xffffff00,
+ (pdram_timing->tmod << 24) |
+ (pdram_timing->tmrd << 16) |
+ (pdram_timing->trtp << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 60), 0xffff << 16,
+ (pdram_timing->txsr -
+ pdram_timing->trcd) << 16);
+ } else if (timing_config->dram_type == LPDDR4) {
+ mmio_write_32(CTL_REG(i, 9), pdram_timing->tinit1 +
+ pdram_timing->tinit3);
+ mmio_clrsetbits_32(CTL_REG(i, 34), 0xffffff00,
+ (pdram_timing->tmrd << 24) |
+ (pdram_timing->tmrd << 16) |
+ (pdram_timing->trtp << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 60), 0xffff << 16,
+ pdram_timing->txsr << 16);
+ } else {
+ mmio_write_32(CTL_REG(i, 9), pdram_timing->tinit1);
+ mmio_write_32(CTL_REG(i, 11), pdram_timing->tinit4);
+ mmio_clrsetbits_32(CTL_REG(i, 34), 0xffffff00,
+ (pdram_timing->tmrd << 24) |
+ (pdram_timing->tmrd << 16) |
+ (pdram_timing->trtp << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 60), 0xffff << 16,
+ pdram_timing->txsr << 16);
+ }
+ mmio_write_32(CTL_REG(i, 10), pdram_timing->tinit3);
+ mmio_write_32(CTL_REG(i, 12), pdram_timing->tinit5);
+ mmio_clrsetbits_32(CTL_REG(i, 24), (0x7f << 8),
+ ((pdram_timing->cl * 2) << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 24), (0x1f << 16),
+ (pdram_timing->cwl << 16));
+ mmio_clrsetbits_32(CTL_REG(i, 24), 0x3f << 24,
+ pdram_timing->al << 24);
+ mmio_clrsetbits_32(CTL_REG(i, 28), 0xffffff00,
+ (pdram_timing->tras_min << 24) |
+ (pdram_timing->trc << 16) |
+ (pdram_timing->trrd << 8));
+ mmio_clrsetbits_32(CTL_REG(i, 29), 0xffffff,
+ (pdram_timing->tfaw << 16) |
+ (pdram_timing->trppb << 8) |
+ pdram_timing->twtr);
+ mmio_write_32(CTL_REG(i, 35), (pdram_timing->tcke << 24) |
+ pdram_timing->tras_max);
+ mmio_clrsetbits_32(CTL_REG(i, 36), 0xff,
+ max(1, pdram_timing->tckesr));
+ mmio_clrsetbits_32(CTL_REG(i, 39), (0xff << 24),
+ (pdram_timing->trcd << 24));
+ mmio_clrsetbits_32(CTL_REG(i, 40), 0x3f, pdram_timing->twr);
+ mmio_clrsetbits_32(CTL_REG(i, 42), 0x1f << 24,
+ pdram_timing->tmrz << 24);
+ tmp = pdram_timing->tdal ? pdram_timing->tdal :
+ (pdram_timing->twr + pdram_timing->trp);
+ mmio_clrsetbits_32(CTL_REG(i, 44), 0xff << 8, tmp << 8);
+ mmio_clrsetbits_32(CTL_REG(i, 45), 0xff << 8,
+ pdram_timing->trp << 8);
+ mmio_write_32(CTL_REG(i, 49),
+ ((pdram_timing->trefi - 8) << 16) |
+ pdram_timing->trfc);
+ mmio_clrsetbits_32(CTL_REG(i, 52), 0xffff << 16,
+ pdram_timing->txp << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 54), 0xffff,
+ pdram_timing->txpdll);
+ mmio_clrsetbits_32(CTL_REG(i, 55), 0xff << 8,
+ pdram_timing->tmrri << 8);
+ mmio_write_32(CTL_REG(i, 57), (pdram_timing->tmrwckel << 24) |
+ (pdram_timing->tckehcs << 16) |
+ (pdram_timing->tckelcs << 8) |
+ pdram_timing->tcscke);
+ mmio_clrsetbits_32(CTL_REG(i, 58), 0xf, pdram_timing->tzqcke);
+ mmio_clrsetbits_32(CTL_REG(i, 61), 0xffff, pdram_timing->txsnr);
+ mmio_clrsetbits_32(CTL_REG(i, 64), 0xffff << 16,
+ (pdram_timing->tckehcmd << 24) |
+ (pdram_timing->tckelcmd << 16));
+ mmio_write_32(CTL_REG(i, 65), (pdram_timing->tckelpd << 24) |
+ (pdram_timing->tescke << 16) |
+ (pdram_timing->tsr << 8) |
+ pdram_timing->tckckel);
+ mmio_clrsetbits_32(CTL_REG(i, 66), 0xfff,
+ (pdram_timing->tcmdcke << 8) |
+ pdram_timing->tcsckeh);
+ mmio_clrsetbits_32(CTL_REG(i, 92), (0xff << 24),
+ (pdram_timing->tcksre << 24));
+ mmio_clrsetbits_32(CTL_REG(i, 93), 0xff,
+ pdram_timing->tcksrx);
+ mmio_clrsetbits_32(CTL_REG(i, 108), (0x1 << 25),
+ (timing_config->dllbp << 25));
+ mmio_write_32(CTL_REG(i, 125),
+ (pdram_timing->tvrcg_disable << 16) |
+ pdram_timing->tvrcg_enable);
+ mmio_write_32(CTL_REG(i, 126), (pdram_timing->tckfspx << 24) |
+ (pdram_timing->tckfspe << 16) |
+ pdram_timing->tfc_long);
+ mmio_clrsetbits_32(CTL_REG(i, 127), 0xffff,
+ pdram_timing->tvref_long);
+ mmio_clrsetbits_32(CTL_REG(i, 134), 0xffff << 16,
+ pdram_timing->mr[0] << 16);
+ mmio_write_32(CTL_REG(i, 135), (pdram_timing->mr[2] << 16) |
+ pdram_timing->mr[1]);
+ mmio_clrsetbits_32(CTL_REG(i, 138), 0xffff << 16,
+ pdram_timing->mr[3] << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 140), 0xff, pdram_timing->mr11);
+ mmio_clrsetbits_32(CTL_REG(i, 148), 0xffff << 16,
+ pdram_timing->mr[0] << 16);
+ mmio_write_32(CTL_REG(i, 149), (pdram_timing->mr[2] << 16) |
+ pdram_timing->mr[1]);
+ mmio_clrsetbits_32(CTL_REG(i, 152), 0xffff << 16,
+ pdram_timing->mr[3] << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 154), 0xff, pdram_timing->mr11);
+ if (timing_config->dram_type == LPDDR4) {
+ mmio_clrsetbits_32(CTL_REG(i, 141), 0xffff,
+ pdram_timing->mr12);
+ mmio_clrsetbits_32(CTL_REG(i, 143), 0xffff,
+ pdram_timing->mr14);
+ mmio_clrsetbits_32(CTL_REG(i, 146), 0xffff,
+ pdram_timing->mr22);
+ mmio_clrsetbits_32(CTL_REG(i, 155), 0xffff,
+ pdram_timing->mr12);
+ mmio_clrsetbits_32(CTL_REG(i, 157), 0xffff,
+ pdram_timing->mr14);
+ mmio_clrsetbits_32(CTL_REG(i, 160), 0xffff,
+ pdram_timing->mr22);
+ }
+ mmio_write_32(CTL_REG(i, 182),
+ ((pdram_timing->tzqinit / 2) << 16) |
+ pdram_timing->tzqinit);
+ mmio_write_32(CTL_REG(i, 183), (pdram_timing->tzqcal << 16) |
+ pdram_timing->tzqcs);
+ mmio_clrsetbits_32(CTL_REG(i, 184), 0x3f, pdram_timing->tzqlat);
+ mmio_clrsetbits_32(CTL_REG(i, 188), 0xfff,
+ pdram_timing->tzqreset);
+ mmio_clrsetbits_32(CTL_REG(i, 212), 0xff << 16,
+ pdram_timing->todton << 16);
+
+ if (timing_config->odt) {
+ mmio_setbits_32(CTL_REG(i, 213), (1 << 24));
+ if (timing_config->freq < 400)
+ tmp = 4 << 24;
+ else
+ tmp = 8 << 24;
+ } else {
+ mmio_clrbits_32(CTL_REG(i, 213), (1 << 24));
+ tmp = 2 << 24;
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 217), 0x1f << 24, tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 221), 0xf << 24,
+ (pdram_timing->tdqsck_max << 24));
+ mmio_clrsetbits_32(CTL_REG(i, 222), 0x3, pdram_timing->tdqsck);
+ mmio_clrsetbits_32(CTL_REG(i, 291), 0xffff,
+ (get_wrlat_adj(timing_config->dram_type,
+ pdram_timing->cwl) << 8) |
+ get_rdlat_adj(timing_config->dram_type,
+ pdram_timing->cl));
+
+ mmio_clrsetbits_32(CTL_REG(i, 84), 0xffff,
+ (4 * pdram_timing->trefi) & 0xffff);
+
+ mmio_clrsetbits_32(CTL_REG(i, 84), 0xffff << 16,
+ ((2 * pdram_timing->trefi) & 0xffff) << 16);
+
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ tmp = get_pi_wrlat(pdram_timing, timing_config);
+ tmp1 = get_pi_todtoff_max(pdram_timing, timing_config);
+ tmp = (tmp > tmp1) ? (tmp - tmp1) : 0;
+ } else {
+ tmp = 0;
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 214), 0x3f << 24,
+ (tmp & 0x3f) << 24);
+
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ /* min_rl_preamble = cl + TDQSCK_MIN - 1 */
+ tmp = pdram_timing->cl +
+ get_pi_todtoff_min(pdram_timing, timing_config);
+ tmp--;
+ /* todtoff_max */
+ tmp1 = get_pi_todtoff_max(pdram_timing, timing_config);
+ tmp = (tmp > tmp1) ? (tmp - tmp1) : 0;
+ } else {
+ tmp = pdram_timing->cl - pdram_timing->cwl;
+ }
+ mmio_clrsetbits_32(CTL_REG(i, 215), 0x3f << 16,
+ (tmp & 0x3f) << 16);
+
+ mmio_clrsetbits_32(CTL_REG(i, 275), 0xff << 24,
+ (get_pi_tdfi_phy_rdlat(pdram_timing,
+ timing_config) &
+ 0xff) << 24);
+
+ mmio_clrsetbits_32(CTL_REG(i, 284), 0xffff << 16,
+ ((2 * pdram_timing->trefi) & 0xffff) << 16);
+
+ mmio_clrsetbits_32(CTL_REG(i, 289), 0xffff,
+ (2 * pdram_timing->trefi) & 0xffff);
+
+ mmio_write_32(CTL_REG(i, 290), 20 * pdram_timing->trefi);
+
+ /* CTL_309 TDFI_CALVL_CAPTURE_F1:RW:16:10 */
+ tmp1 = 20000 / (1000000 / pdram_timing->mhz) + 1;
+ if ((20000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp1++;
+ tmp = (tmp1 >> 1) + (tmp1 % 2) + 5;
+ mmio_clrsetbits_32(CTL_REG(i, 309), 0x3ff << 16, tmp << 16);
+
+ /* CTL_309 TDFI_CALVL_CC_F1:RW:0:10 */
+ tmp = tmp + 18;
+ mmio_clrsetbits_32(CTL_REG(i, 309), 0x3ff, tmp);
+
+ /* CTL_314 TDFI_WRCSLAT_F1:RW:24:8 */
+ tmp1 = get_pi_wrlat_adj(pdram_timing, timing_config);
+ if (timing_config->freq <= TDFI_LAT_THRESHOLD_FREQ) {
+ if (tmp1 == 0)
+ tmp = 0;
+ else if (tmp1 < 5)
+ tmp = tmp1 - 1;
+ else
+ tmp = tmp1 - 5;
+ } else {
+ tmp = tmp1 - 2;
+ }
+
+ mmio_clrsetbits_32(CTL_REG(i, 314), 0xff << 24, tmp << 24);
+
+ /* CTL_314 TDFI_RDCSLAT_F1:RW:16:8 */
+ if ((timing_config->freq <= TDFI_LAT_THRESHOLD_FREQ) &&
+ (pdram_timing->cl >= 5))
+ tmp = pdram_timing->cl - 5;
+ else
+ tmp = pdram_timing->cl - 2;
+ mmio_clrsetbits_32(CTL_REG(i, 314), 0xff << 16, tmp << 16);
+ }
+}
+
+static void gen_rk3399_enable_training(uint32_t ch_cnt, uint32_t nmhz)
+{
+ uint32_t i, tmp;
+
+ if (nmhz <= PHY_DLL_BYPASS_FREQ)
+ tmp = 0;
+ else
+ tmp = 1;
+
+ for (i = 0; i < ch_cnt; i++) {
+ mmio_clrsetbits_32(CTL_REG(i, 305), 1 << 16, tmp << 16);
+ mmio_clrsetbits_32(CTL_REG(i, 71), 1, tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 70), 1 << 8, 1 << 8);
+ }
+}
+
+static void gen_rk3399_disable_training(uint32_t ch_cnt)
+{
+ uint32_t i;
+
+ for (i = 0; i < ch_cnt; i++) {
+ mmio_clrbits_32(CTL_REG(i, 305), 1 << 16);
+ mmio_clrbits_32(CTL_REG(i, 71), 1);
+ mmio_clrbits_32(CTL_REG(i, 70), 1 << 8);
+ }
+}
+
+static void gen_rk3399_ctl_params(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing,
+ uint32_t fn)
+{
+ if (fn == 0)
+ gen_rk3399_ctl_params_f0(timing_config, pdram_timing);
+ else
+ gen_rk3399_ctl_params_f1(timing_config, pdram_timing);
+}
+
+static void gen_rk3399_pi_params_f0(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t tmp, tmp1, tmp2;
+ uint32_t i;
+
+ for (i = 0; i < timing_config->ch_cnt; i++) {
+ /* PI_02 PI_TDFI_PHYMSTR_MAX_F0:RW:0:32 */
+ tmp = 4 * pdram_timing->trefi;
+ mmio_write_32(PI_REG(i, 2), tmp);
+ /* PI_03 PI_TDFI_PHYMSTR_RESP_F0:RW:0:16 */
+ tmp = 2 * pdram_timing->trefi;
+ mmio_clrsetbits_32(PI_REG(i, 3), 0xffff, tmp);
+ /* PI_07 PI_TDFI_PHYUPD_RESP_F0:RW:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 7), 0xffff << 16, tmp << 16);
+
+ /* PI_42 PI_TDELAY_RDWR_2_BUS_IDLE_F0:RW:0:8 */
+ if (timing_config->dram_type == LPDDR4)
+ tmp = 2;
+ else
+ tmp = 0;
+ tmp = (pdram_timing->bl / 2) + 4 +
+ (get_pi_rdlat_adj(pdram_timing) - 2) + tmp +
+ get_pi_tdfi_phy_rdlat(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 42), 0xff, tmp);
+ /* PI_43 PI_WRLAT_F0:RW:0:5 */
+ if (timing_config->dram_type == LPDDR3) {
+ tmp = get_pi_wrlat(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 43), 0x1f, tmp);
+ }
+ /* PI_43 PI_ADDITIVE_LAT_F0:RW:8:6 */
+ mmio_clrsetbits_32(PI_REG(i, 43), 0x3f << 8,
+ PI_ADD_LATENCY << 8);
+
+ /* PI_43 PI_CASLAT_LIN_F0:RW:16:7 */
+ mmio_clrsetbits_32(PI_REG(i, 43), 0x7f << 16,
+ (pdram_timing->cl * 2) << 16);
+ /* PI_46 PI_TREF_F0:RW:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 46), 0xffff << 16,
+ pdram_timing->trefi << 16);
+ /* PI_46 PI_TRFC_F0:RW:0:10 */
+ mmio_clrsetbits_32(PI_REG(i, 46), 0x3ff, pdram_timing->trfc);
+ /* PI_66 PI_TODTL_2CMD_F0:RW:24:8 */
+ if (timing_config->dram_type == LPDDR3) {
+ tmp = get_pi_todtoff_max(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 66), 0xff << 24,
+ tmp << 24);
+ }
+ /* PI_72 PI_WR_TO_ODTH_F0:RW:16:6 */
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ tmp1 = get_pi_wrlat(pdram_timing, timing_config);
+ tmp2 = get_pi_todtoff_max(pdram_timing, timing_config);
+ if (tmp1 > tmp2)
+ tmp = tmp1 - tmp2;
+ else
+ tmp = 0;
+ } else if (timing_config->dram_type == DDR3) {
+ tmp = 0;
+ }
+ mmio_clrsetbits_32(PI_REG(i, 72), 0x3f << 16, tmp << 16);
+ /* PI_73 PI_RD_TO_ODTH_F0:RW:8:6 */
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ /* min_rl_preamble = cl + TDQSCK_MIN - 1 */
+ tmp1 = pdram_timing->cl;
+ tmp1 += get_pi_todtoff_min(pdram_timing, timing_config);
+ tmp1--;
+ /* todtoff_max */
+ tmp2 = get_pi_todtoff_max(pdram_timing, timing_config);
+ if (tmp1 > tmp2)
+ tmp = tmp1 - tmp2;
+ else
+ tmp = 0;
+ } else if (timing_config->dram_type == DDR3) {
+ tmp = pdram_timing->cl - pdram_timing->cwl;
+ }
+ mmio_clrsetbits_32(PI_REG(i, 73), 0x3f << 8, tmp << 8);
+ /* PI_89 PI_RDLAT_ADJ_F0:RW:16:8 */
+ tmp = get_pi_rdlat_adj(pdram_timing);
+ mmio_clrsetbits_32(PI_REG(i, 89), 0xff << 16, tmp << 16);
+ /* PI_90 PI_WRLAT_ADJ_F0:RW:16:8 */
+ tmp = get_pi_wrlat_adj(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 90), 0xff << 16, tmp << 16);
+ /* PI_91 PI_TDFI_WRCSLAT_F0:RW:16:8 */
+ tmp1 = tmp;
+ if (tmp1 == 0)
+ tmp = 0;
+ else if (tmp1 < 5)
+ tmp = tmp1 - 1;
+ else
+ tmp = tmp1 - 5;
+ mmio_clrsetbits_32(PI_REG(i, 91), 0xff << 16, tmp << 16);
+ /* PI_95 PI_TDFI_CALVL_CAPTURE_F0:RW:16:10 */
+ tmp1 = 20000 / (1000000 / pdram_timing->mhz) + 1;
+ if ((20000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp1++;
+ tmp = (tmp1 >> 1) + (tmp1 % 2) + 5;
+ mmio_clrsetbits_32(PI_REG(i, 95), 0x3ff << 16, tmp << 16);
+ /* PI_95 PI_TDFI_CALVL_CC_F0:RW:0:10 */
+ mmio_clrsetbits_32(PI_REG(i, 95), 0x3ff, tmp + 18);
+ /* PI_102 PI_TMRZ_F0:RW:8:5 */
+ mmio_clrsetbits_32(PI_REG(i, 102), 0x1f << 8,
+ pdram_timing->tmrz << 8);
+ /* PI_111 PI_TDFI_CALVL_STROBE_F0:RW:8:4 */
+ tmp1 = 2 * 1000 / (1000000 / pdram_timing->mhz);
+ if ((2 * 1000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp1++;
+ /* pi_tdfi_calvl_strobe=tds_train+5 */
+ tmp = tmp1 + 5;
+ mmio_clrsetbits_32(PI_REG(i, 111), 0xf << 8, tmp << 8);
+ /* PI_116 PI_TCKEHDQS_F0:RW:16:6 */
+ tmp = 10000 / (1000000 / pdram_timing->mhz);
+ if ((10000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp++;
+ if (pdram_timing->mhz <= 100)
+ tmp = tmp + 1;
+ else
+ tmp = tmp + 8;
+ mmio_clrsetbits_32(PI_REG(i, 116), 0x3f << 16, tmp << 16);
+ /* PI_125 PI_MR1_DATA_F0_0:RW+:8:16 */
+ mmio_clrsetbits_32(PI_REG(i, 125), 0xffff << 8,
+ pdram_timing->mr[1] << 8);
+ /* PI_133 PI_MR1_DATA_F0_1:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 133), 0xffff, pdram_timing->mr[1]);
+ /* PI_140 PI_MR1_DATA_F0_2:RW+:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 140), 0xffff << 16,
+ pdram_timing->mr[1] << 16);
+ /* PI_148 PI_MR1_DATA_F0_3:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 148), 0xffff, pdram_timing->mr[1]);
+ /* PI_126 PI_MR2_DATA_F0_0:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 126), 0xffff, pdram_timing->mr[2]);
+ /* PI_133 PI_MR2_DATA_F0_1:RW+:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 133), 0xffff << 16,
+ pdram_timing->mr[2] << 16);
+ /* PI_141 PI_MR2_DATA_F0_2:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 141), 0xffff, pdram_timing->mr[2]);
+ /* PI_148 PI_MR2_DATA_F0_3:RW+:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 148), 0xffff << 16,
+ pdram_timing->mr[2] << 16);
+ /* PI_156 PI_TFC_F0:RW:0:10 */
+ mmio_clrsetbits_32(PI_REG(i, 156), 0x3ff,
+ pdram_timing->tfc_long);
+ /* PI_158 PI_TWR_F0:RW:24:6 */
+ mmio_clrsetbits_32(PI_REG(i, 158), 0x3f << 24,
+ pdram_timing->twr << 24);
+ /* PI_158 PI_TWTR_F0:RW:16:6 */
+ mmio_clrsetbits_32(PI_REG(i, 158), 0x3f << 16,
+ pdram_timing->twtr << 16);
+ /* PI_158 PI_TRCD_F0:RW:8:8 */
+ mmio_clrsetbits_32(PI_REG(i, 158), 0xff << 8,
+ pdram_timing->trcd << 8);
+ /* PI_158 PI_TRP_F0:RW:0:8 */
+ mmio_clrsetbits_32(PI_REG(i, 158), 0xff, pdram_timing->trp);
+ /* PI_157 PI_TRTP_F0:RW:24:8 */
+ mmio_clrsetbits_32(PI_REG(i, 157), 0xff << 24,
+ pdram_timing->trtp << 24);
+ /* PI_159 PI_TRAS_MIN_F0:RW:24:8 */
+ mmio_clrsetbits_32(PI_REG(i, 159), 0xff << 24,
+ pdram_timing->tras_min << 24);
+ /* PI_159 PI_TRAS_MAX_F0:RW:0:17 */
+ tmp = pdram_timing->tras_max * 99 / 100;
+ mmio_clrsetbits_32(PI_REG(i, 159), 0x1ffff, tmp);
+ /* PI_160 PI_TMRD_F0:RW:16:6 */
+ mmio_clrsetbits_32(PI_REG(i, 160), 0x3f << 16,
+ pdram_timing->tmrd << 16);
+ /*PI_160 PI_TDQSCK_MAX_F0:RW:0:4 */
+ mmio_clrsetbits_32(PI_REG(i, 160), 0xf,
+ pdram_timing->tdqsck_max);
+ /* PI_187 PI_TDFI_CTRLUPD_MAX_F0:RW:8:16 */
+ mmio_clrsetbits_32(PI_REG(i, 187), 0xffff << 8,
+ (2 * pdram_timing->trefi) << 8);
+ /* PI_188 PI_TDFI_CTRLUPD_INTERVAL_F0:RW:0:32 */
+ mmio_clrsetbits_32(PI_REG(i, 188), 0xffffffff,
+ 20 * pdram_timing->trefi);
+ }
+}
+
+static void gen_rk3399_pi_params_f1(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t tmp, tmp1, tmp2;
+ uint32_t i;
+
+ for (i = 0; i < timing_config->ch_cnt; i++) {
+ /* PI_04 PI_TDFI_PHYMSTR_MAX_F1:RW:0:32 */
+ tmp = 4 * pdram_timing->trefi;
+ mmio_write_32(PI_REG(i, 4), tmp);
+ /* PI_05 PI_TDFI_PHYMSTR_RESP_F1:RW:0:16 */
+ tmp = 2 * pdram_timing->trefi;
+ mmio_clrsetbits_32(PI_REG(i, 5), 0xffff, tmp);
+ /* PI_12 PI_TDFI_PHYUPD_RESP_F1:RW:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 12), 0xffff, tmp);
+
+ /* PI_42 PI_TDELAY_RDWR_2_BUS_IDLE_F1:RW:8:8 */
+ if (timing_config->dram_type == LPDDR4)
+ tmp = 2;
+ else
+ tmp = 0;
+ tmp = (pdram_timing->bl / 2) + 4 +
+ (get_pi_rdlat_adj(pdram_timing) - 2) + tmp +
+ get_pi_tdfi_phy_rdlat(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 42), 0xff << 8, tmp << 8);
+ /* PI_43 PI_WRLAT_F1:RW:24:5 */
+ if (timing_config->dram_type == LPDDR3) {
+ tmp = get_pi_wrlat(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 43), 0x1f << 24,
+ tmp << 24);
+ }
+ /* PI_44 PI_ADDITIVE_LAT_F1:RW:0:6 */
+ mmio_clrsetbits_32(PI_REG(i, 44), 0x3f, PI_ADD_LATENCY);
+ /* PI_44 PI_CASLAT_LIN_F1:RW:8:7:=0x18 */
+ mmio_clrsetbits_32(PI_REG(i, 44), 0x7f << 8,
+ (pdram_timing->cl * 2) << 8);
+ /* PI_47 PI_TREF_F1:RW:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 47), 0xffff << 16,
+ pdram_timing->trefi << 16);
+ /* PI_47 PI_TRFC_F1:RW:0:10 */
+ mmio_clrsetbits_32(PI_REG(i, 47), 0x3ff, pdram_timing->trfc);
+ /* PI_67 PI_TODTL_2CMD_F1:RW:8:8 */
+ if (timing_config->dram_type == LPDDR3) {
+ tmp = get_pi_todtoff_max(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 67), 0xff << 8, tmp << 8);
+ }
+ /* PI_72 PI_WR_TO_ODTH_F1:RW:24:6 */
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ tmp1 = get_pi_wrlat(pdram_timing, timing_config);
+ tmp2 = get_pi_todtoff_max(pdram_timing, timing_config);
+ if (tmp1 > tmp2)
+ tmp = tmp1 - tmp2;
+ else
+ tmp = 0;
+ } else if (timing_config->dram_type == DDR3) {
+ tmp = 0;
+ }
+ mmio_clrsetbits_32(PI_REG(i, 72), 0x3f << 24, tmp << 24);
+ /* PI_73 PI_RD_TO_ODTH_F1:RW:16:6 */
+ if ((timing_config->dram_type == LPDDR3) ||
+ (timing_config->dram_type == LPDDR4)) {
+ /* min_rl_preamble = cl + TDQSCK_MIN - 1 */
+ tmp1 = pdram_timing->cl +
+ get_pi_todtoff_min(pdram_timing, timing_config);
+ tmp1--;
+ /* todtoff_max */
+ tmp2 = get_pi_todtoff_max(pdram_timing, timing_config);
+ if (tmp1 > tmp2)
+ tmp = tmp1 - tmp2;
+ else
+ tmp = 0;
+ } else if (timing_config->dram_type == DDR3)
+ tmp = pdram_timing->cl - pdram_timing->cwl;
+
+ mmio_clrsetbits_32(PI_REG(i, 73), 0x3f << 16, tmp << 16);
+ /*P I_89 PI_RDLAT_ADJ_F1:RW:24:8 */
+ tmp = get_pi_rdlat_adj(pdram_timing);
+ mmio_clrsetbits_32(PI_REG(i, 89), 0xff << 24, tmp << 24);
+ /* PI_90 PI_WRLAT_ADJ_F1:RW:24:8 */
+ tmp = get_pi_wrlat_adj(pdram_timing, timing_config);
+ mmio_clrsetbits_32(PI_REG(i, 90), 0xff << 24, tmp << 24);
+ /* PI_91 PI_TDFI_WRCSLAT_F1:RW:24:8 */
+ tmp1 = tmp;
+ if (tmp1 == 0)
+ tmp = 0;
+ else if (tmp1 < 5)
+ tmp = tmp1 - 1;
+ else
+ tmp = tmp1 - 5;
+ mmio_clrsetbits_32(PI_REG(i, 91), 0xff << 24, tmp << 24);
+ /*PI_96 PI_TDFI_CALVL_CAPTURE_F1:RW:16:10 */
+ /* tadr=20ns */
+ tmp1 = 20000 / (1000000 / pdram_timing->mhz) + 1;
+ if ((20000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp1++;
+ tmp = (tmp1 >> 1) + (tmp1 % 2) + 5;
+ mmio_clrsetbits_32(PI_REG(i, 96), 0x3ff << 16, tmp << 16);
+ /* PI_96 PI_TDFI_CALVL_CC_F1:RW:0:10 */
+ tmp = tmp + 18;
+ mmio_clrsetbits_32(PI_REG(i, 96), 0x3ff, tmp);
+ /*PI_103 PI_TMRZ_F1:RW:0:5 */
+ mmio_clrsetbits_32(PI_REG(i, 103), 0x1f, pdram_timing->tmrz);
+ /*PI_111 PI_TDFI_CALVL_STROBE_F1:RW:16:4 */
+ /* tds_train=ceil(2/ns) */
+ tmp1 = 2 * 1000 / (1000000 / pdram_timing->mhz);
+ if ((2 * 1000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp1++;
+ /* pi_tdfi_calvl_strobe=tds_train+5 */
+ tmp = tmp1 + 5;
+ mmio_clrsetbits_32(PI_REG(i, 111), 0xf << 16,
+ tmp << 16);
+ /* PI_116 PI_TCKEHDQS_F1:RW:24:6 */
+ tmp = 10000 / (1000000 / pdram_timing->mhz);
+ if ((10000 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp++;
+ if (pdram_timing->mhz <= 100)
+ tmp = tmp + 1;
+ else
+ tmp = tmp + 8;
+ mmio_clrsetbits_32(PI_REG(i, 116), 0x3f << 24,
+ tmp << 24);
+ /* PI_128 PI_MR1_DATA_F1_0:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 128), 0xffff, pdram_timing->mr[1]);
+ /* PI_135 PI_MR1_DATA_F1_1:RW+:8:16 */
+ mmio_clrsetbits_32(PI_REG(i, 135), 0xffff << 8,
+ pdram_timing->mr[1] << 8);
+ /* PI_143 PI_MR1_DATA_F1_2:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 143), 0xffff, pdram_timing->mr[1]);
+ /* PI_150 PI_MR1_DATA_F1_3:RW+:8:16 */
+ mmio_clrsetbits_32(PI_REG(i, 150), 0xffff << 8,
+ pdram_timing->mr[1] << 8);
+ /* PI_128 PI_MR2_DATA_F1_0:RW+:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 128), 0xffff << 16,
+ pdram_timing->mr[2] << 16);
+ /* PI_136 PI_MR2_DATA_F1_1:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 136), 0xffff, pdram_timing->mr[2]);
+ /* PI_143 PI_MR2_DATA_F1_2:RW+:16:16 */
+ mmio_clrsetbits_32(PI_REG(i, 143), 0xffff << 16,
+ pdram_timing->mr[2] << 16);
+ /* PI_151 PI_MR2_DATA_F1_3:RW+:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 151), 0xffff, pdram_timing->mr[2]);
+ /* PI_156 PI_TFC_F1:RW:16:10 */
+ mmio_clrsetbits_32(PI_REG(i, 156), 0x3ff << 16,
+ pdram_timing->tfc_long << 16);
+ /* PI_162 PI_TWR_F1:RW:8:6 */
+ mmio_clrsetbits_32(PI_REG(i, 162), 0x3f << 8,
+ pdram_timing->twr << 8);
+ /* PI_162 PI_TWTR_F1:RW:0:6 */
+ mmio_clrsetbits_32(PI_REG(i, 162), 0x3f, pdram_timing->twtr);
+ /* PI_161 PI_TRCD_F1:RW:24:8 */
+ mmio_clrsetbits_32(PI_REG(i, 161), 0xff << 24,
+ pdram_timing->trcd << 24);
+ /* PI_161 PI_TRP_F1:RW:16:8 */
+ mmio_clrsetbits_32(PI_REG(i, 161), 0xff << 16,
+ pdram_timing->trp << 16);
+ /* PI_161 PI_TRTP_F1:RW:8:8 */
+ mmio_clrsetbits_32(PI_REG(i, 161), 0xff << 8,
+ pdram_timing->trtp << 8);
+ /* PI_163 PI_TRAS_MIN_F1:RW:24:8 */
+ mmio_clrsetbits_32(PI_REG(i, 163), 0xff << 24,
+ pdram_timing->tras_min << 24);
+ /* PI_163 PI_TRAS_MAX_F1:RW:0:17 */
+ mmio_clrsetbits_32(PI_REG(i, 163), 0x1ffff,
+ pdram_timing->tras_max * 99 / 100);
+ /* PI_164 PI_TMRD_F1:RW:16:6 */
+ mmio_clrsetbits_32(PI_REG(i, 164), 0x3f << 16,
+ pdram_timing->tmrd << 16);
+ /* PI_164 PI_TDQSCK_MAX_F1:RW:0:4 */
+ mmio_clrsetbits_32(PI_REG(i, 164), 0xf,
+ pdram_timing->tdqsck_max);
+ /* PI_189 PI_TDFI_CTRLUPD_MAX_F1:RW:0:16 */
+ mmio_clrsetbits_32(PI_REG(i, 189), 0xffff,
+ 2 * pdram_timing->trefi);
+ /* PI_190 PI_TDFI_CTRLUPD_INTERVAL_F1:RW:0:32 */
+ mmio_clrsetbits_32(PI_REG(i, 190), 0xffffffff,
+ 20 * pdram_timing->trefi);
+ }
+}
+
+static void gen_rk3399_pi_params(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing,
+ uint32_t fn)
+{
+ if (fn == 0)
+ gen_rk3399_pi_params_f0(timing_config, pdram_timing);
+ else
+ gen_rk3399_pi_params_f1(timing_config, pdram_timing);
+}
+
+static void gen_rk3399_set_odt(uint32_t odt_en)
+{
+ uint32_t drv_odt_val;
+ uint32_t i;
+
+ for (i = 0; i < rk3399_dram_status.timing_config.ch_cnt; i++) {
+ drv_odt_val = (odt_en | (0 << 1) | (0 << 2)) << 16;
+ mmio_clrsetbits_32(PHY_REG(i, 5), 0x7 << 16, drv_odt_val);
+ mmio_clrsetbits_32(PHY_REG(i, 133), 0x7 << 16, drv_odt_val);
+ mmio_clrsetbits_32(PHY_REG(i, 261), 0x7 << 16, drv_odt_val);
+ mmio_clrsetbits_32(PHY_REG(i, 389), 0x7 << 16, drv_odt_val);
+ drv_odt_val = (odt_en | (0 << 1) | (0 << 2)) << 24;
+ mmio_clrsetbits_32(PHY_REG(i, 6), 0x7 << 24, drv_odt_val);
+ mmio_clrsetbits_32(PHY_REG(i, 134), 0x7 << 24, drv_odt_val);
+ mmio_clrsetbits_32(PHY_REG(i, 262), 0x7 << 24, drv_odt_val);
+ mmio_clrsetbits_32(PHY_REG(i, 390), 0x7 << 24, drv_odt_val);
+ }
+}
+
+static void gen_rk3399_phy_dll_bypass(uint32_t mhz, uint32_t ch,
+ uint32_t index, uint32_t dram_type)
+{
+ uint32_t sw_master_mode = 0;
+ uint32_t rddqs_gate_delay, rddqs_latency, total_delay;
+ uint32_t i;
+
+ if (dram_type == DDR3)
+ total_delay = PI_PAD_DELAY_PS_VALUE;
+ else if (dram_type == LPDDR3)
+ total_delay = PI_PAD_DELAY_PS_VALUE + 2500;
+ else
+ total_delay = PI_PAD_DELAY_PS_VALUE + 1500;
+ /* total_delay + 0.55tck */
+ total_delay += (55 * 10000)/mhz;
+ rddqs_latency = total_delay * mhz / 1000000;
+ total_delay -= rddqs_latency * 1000000 / mhz;
+ rddqs_gate_delay = total_delay * 0x200 * mhz / 1000000;
+ if (mhz <= PHY_DLL_BYPASS_FREQ) {
+ sw_master_mode = 0xc;
+ mmio_setbits_32(PHY_REG(ch, 514), 1);
+ mmio_setbits_32(PHY_REG(ch, 642), 1);
+ mmio_setbits_32(PHY_REG(ch, 770), 1);
+
+ /* setting bypass mode slave delay */
+ for (i = 0; i < 4; i++) {
+ /* wr dq delay = -180deg + (0x60 / 4) * 20ps */
+ mmio_clrsetbits_32(PHY_REG(ch, 1 + 128 * i), 0x7ff << 8,
+ 0x4a0 << 8);
+ /* rd dqs/dq delay = (0x60 / 4) * 20ps */
+ mmio_clrsetbits_32(PHY_REG(ch, 11 + 128 * i), 0x3ff,
+ 0xa0);
+ /* rd rddqs_gate delay */
+ mmio_clrsetbits_32(PHY_REG(ch, 2 + 128 * i), 0x3ff,
+ rddqs_gate_delay);
+ mmio_clrsetbits_32(PHY_REG(ch, 78 + 128 * i), 0xf,
+ rddqs_latency);
+ }
+ for (i = 0; i < 3; i++)
+ /* adr delay */
+ mmio_clrsetbits_32(PHY_REG(ch, 513 + 128 * i),
+ 0x7ff << 16, 0x80 << 16);
+
+ if ((mmio_read_32(PHY_REG(ch, 86)) & 0xc00) == 0) {
+ /*
+ * old status is normal mode,
+ * and saving the wrdqs slave delay
+ */
+ for (i = 0; i < 4; i++) {
+ /* save and clear wr dqs slave delay */
+ wrdqs_delay_val[ch][index][i] = 0x3ff &
+ (mmio_read_32(PHY_REG(ch, 63 + i * 128))
+ >> 16);
+ mmio_clrsetbits_32(PHY_REG(ch, 63 + i * 128),
+ 0x03ff << 16, 0 << 16);
+ /*
+ * in normal mode the cmd may delay 1cycle by
+ * wrlvl and in bypass mode making dqs also
+ * delay 1cycle.
+ */
+ mmio_clrsetbits_32(PHY_REG(ch, 78 + i * 128),
+ 0x07 << 8, 0x1 << 8);
+ }
+ }
+ } else if (mmio_read_32(PHY_REG(ch, 86)) & 0xc00) {
+ /* old status is bypass mode and restore wrlvl resume */
+ for (i = 0; i < 4; i++) {
+ mmio_clrsetbits_32(PHY_REG(ch, 63 + i * 128),
+ 0x03ff << 16,
+ (wrdqs_delay_val[ch][index][i] &
+ 0x3ff) << 16);
+ /* resume phy_write_path_lat_add */
+ mmio_clrbits_32(PHY_REG(ch, 78 + i * 128), 0x07 << 8);
+ }
+ }
+
+ /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
+ mmio_clrsetbits_32(PHY_REG(ch, 86), 0xf << 8, sw_master_mode << 8);
+ mmio_clrsetbits_32(PHY_REG(ch, 214), 0xf << 8, sw_master_mode << 8);
+ mmio_clrsetbits_32(PHY_REG(ch, 342), 0xf << 8, sw_master_mode << 8);
+ mmio_clrsetbits_32(PHY_REG(ch, 470), 0xf << 8, sw_master_mode << 8);
+
+ /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
+ mmio_clrsetbits_32(PHY_REG(ch, 547), 0xf << 16, sw_master_mode << 16);
+ mmio_clrsetbits_32(PHY_REG(ch, 675), 0xf << 16, sw_master_mode << 16);
+ mmio_clrsetbits_32(PHY_REG(ch, 803), 0xf << 16, sw_master_mode << 16);
+}
+
+static void gen_rk3399_phy_params(struct timing_related_config *timing_config,
+ struct drv_odt_lp_config *drv_config,
+ struct dram_timing_t *pdram_timing,
+ uint32_t fn)
+{
+ uint32_t tmp, i, div, j;
+ uint32_t mem_delay_ps, pad_delay_ps, total_delay_ps, delay_frac_ps;
+ uint32_t trpre_min_ps, gate_delay_ps, gate_delay_frac_ps;
+ uint32_t ie_enable, tsel_enable, cas_lat, rddata_en_ie_dly, tsel_adder;
+ uint32_t extra_adder, delta, hs_offset;
+
+ for (i = 0; i < timing_config->ch_cnt; i++) {
+
+ pad_delay_ps = PI_PAD_DELAY_PS_VALUE;
+ ie_enable = PI_IE_ENABLE_VALUE;
+ tsel_enable = PI_TSEL_ENABLE_VALUE;
+
+ mmio_clrsetbits_32(PHY_REG(i, 896), (0x3 << 8) | 1, fn << 8);
+
+ /* PHY_LOW_FREQ_SEL */
+ /* DENALI_PHY_913 1bit offset_0 */
+ if (timing_config->freq > 400)
+ mmio_clrbits_32(PHY_REG(i, 913), 1);
+ else
+ mmio_setbits_32(PHY_REG(i, 913), 1);
+
+ /* PHY_RPTR_UPDATE_x */
+ /* DENALI_PHY_87/215/343/471 4bit offset_16 */
+ tmp = 2500 / (1000000 / pdram_timing->mhz) + 3;
+ if ((2500 % (1000000 / pdram_timing->mhz)) != 0)
+ tmp++;
+ mmio_clrsetbits_32(PHY_REG(i, 87), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 215), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 343), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 471), 0xf << 16, tmp << 16);
+
+ /* PHY_PLL_CTRL */
+ /* DENALI_PHY_911 13bits offset_0 */
+ /* PHY_LP4_BOOT_PLL_CTRL */
+ /* DENALI_PHY_919 13bits offset_0 */
+ tmp = (1 << 12) | (2 << 7) | (1 << 1);
+ mmio_clrsetbits_32(PHY_REG(i, 911), 0x1fff, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 919), 0x1fff, tmp);
+
+ /* PHY_PLL_CTRL_CA */
+ /* DENALI_PHY_911 13bits offset_16 */
+ /* PHY_LP4_BOOT_PLL_CTRL_CA */
+ /* DENALI_PHY_919 13bits offset_16 */
+ tmp = (2 << 7) | (1 << 5) | (1 << 1);
+ mmio_clrsetbits_32(PHY_REG(i, 911), 0x1fff << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 919), 0x1fff << 16, tmp << 16);
+
+ /* PHY_TCKSRE_WAIT */
+ /* DENALI_PHY_922 4bits offset_24 */
+ if (pdram_timing->mhz <= 400)
+ tmp = 1;
+ else if (pdram_timing->mhz <= 800)
+ tmp = 3;
+ else if (pdram_timing->mhz <= 1000)
+ tmp = 4;
+ else
+ tmp = 5;
+ mmio_clrsetbits_32(PHY_REG(i, 922), 0xf << 24, tmp << 24);
+ /* PHY_CAL_CLK_SELECT_0:RW8:3 */
+ div = pdram_timing->mhz / (2 * 20);
+ for (j = 2, tmp = 1; j <= 128; j <<= 1, tmp++) {
+ if (div < j)
+ break;
+ }
+ mmio_clrsetbits_32(PHY_REG(i, 947), 0x7 << 8, tmp << 8);
+
+ if (timing_config->dram_type == DDR3) {
+ mem_delay_ps = 0;
+ trpre_min_ps = 1000;
+ } else if (timing_config->dram_type == LPDDR4) {
+ mem_delay_ps = 1500;
+ trpre_min_ps = 900;
+ } else if (timing_config->dram_type == LPDDR3) {
+ mem_delay_ps = 2500;
+ trpre_min_ps = 900;
+ } else {
+ ERROR("gen_rk3399_phy_params:dramtype unsupport\n");
+ return;
+ }
+ total_delay_ps = mem_delay_ps + pad_delay_ps;
+ delay_frac_ps = 1000 * total_delay_ps /
+ (1000000 / pdram_timing->mhz);
+ gate_delay_ps = delay_frac_ps + 1000 - (trpre_min_ps / 2);
+ gate_delay_frac_ps = gate_delay_ps % 1000;
+ tmp = gate_delay_frac_ps * 0x200 / 1000;
+ /* PHY_RDDQS_GATE_SLAVE_DELAY */
+ /* DENALI_PHY_77/205/333/461 10bits offset_16 */
+ mmio_clrsetbits_32(PHY_REG(i, 77), 0x2ff << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 205), 0x2ff << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 333), 0x2ff << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 461), 0x2ff << 16, tmp << 16);
+
+ tmp = gate_delay_ps / 1000;
+ /* PHY_LP4_BOOT_RDDQS_LATENCY_ADJUST */
+ /* DENALI_PHY_10/138/266/394 4bit offset_0 */
+ mmio_clrsetbits_32(PHY_REG(i, 10), 0xf, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 138), 0xf, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 266), 0xf, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 394), 0xf, tmp);
+ /* PHY_GTLVL_LAT_ADJ_START */
+ /* DENALI_PHY_80/208/336/464 4bits offset_16 */
+ tmp = rddqs_delay_ps / (1000000 / pdram_timing->mhz) + 2;
+ mmio_clrsetbits_32(PHY_REG(i, 80), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 208), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 336), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 464), 0xf << 16, tmp << 16);
+
+ cas_lat = pdram_timing->cl + PI_ADD_LATENCY;
+ rddata_en_ie_dly = ie_enable / (1000000 / pdram_timing->mhz);
+ if ((ie_enable % (1000000 / pdram_timing->mhz)) != 0)
+ rddata_en_ie_dly++;
+ rddata_en_ie_dly = rddata_en_ie_dly - 1;
+ tsel_adder = tsel_enable / (1000000 / pdram_timing->mhz);
+ if ((tsel_enable % (1000000 / pdram_timing->mhz)) != 0)
+ tsel_adder++;
+ if (rddata_en_ie_dly > tsel_adder)
+ extra_adder = rddata_en_ie_dly - tsel_adder;
+ else
+ extra_adder = 0;
+ delta = cas_lat - rddata_en_ie_dly;
+ if (PI_REGS_DIMM_SUPPORT && PI_DOUBLEFREEK)
+ hs_offset = 2;
+ else
+ hs_offset = 1;
+ if (rddata_en_ie_dly > (cas_lat - 1 - hs_offset))
+ tmp = 0;
+ else if ((delta == 2) || (delta == 1))
+ tmp = rddata_en_ie_dly - 0 - extra_adder;
+ else
+ tmp = extra_adder;
+ /* PHY_LP4_BOOT_RDDATA_EN_TSEL_DLY */
+ /* DENALI_PHY_9/137/265/393 4bit offset_16 */
+ mmio_clrsetbits_32(PHY_REG(i, 9), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 137), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 265), 0xf << 16, tmp << 16);
+ mmio_clrsetbits_32(PHY_REG(i, 393), 0xf << 16, tmp << 16);
+ /* PHY_RDDATA_EN_TSEL_DLY */
+ /* DENALI_PHY_86/214/342/470 4bit offset_0 */
+ mmio_clrsetbits_32(PHY_REG(i, 86), 0xf, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 214), 0xf, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 342), 0xf, tmp);
+ mmio_clrsetbits_32(PHY_REG(i, 470), 0xf, tmp);
+
+ if (tsel_adder > rddata_en_ie_dly)
+ extra_adder = tsel_adder - rddata_en_ie_dly;
+ else
+ extra_adder = 0;
+ if (rddata_en_ie_dly > (cas_lat - 1 - hs_offset))
+ tmp = tsel_adder;
+ else
+ tmp = rddata_en_ie_dly - 0 + extra_adder;
+ /* PHY_LP4_BOOT_RDDATA_EN_DLY */
+ /* DENALI_PHY_9/137/265/393 4bit offset_8 */
+ mmio_clrsetbits_32(PHY_REG(i, 9), 0xf << 8, tmp << 8);
+ mmio_clrsetbits_32(PHY_REG(i, 137), 0xf << 8, tmp << 8);
+ mmio_clrsetbits_32(PHY_REG(i, 265), 0xf << 8, tmp << 8);
+ mmio_clrsetbits_32(PHY_REG(i, 393), 0xf << 8, tmp << 8);
+ /* PHY_RDDATA_EN_DLY */
+ /* DENALI_PHY_85/213/341/469 4bit offset_24 */
+ mmio_clrsetbits_32(PHY_REG(i, 85), 0xf << 24, tmp << 24);
+ mmio_clrsetbits_32(PHY_REG(i, 213), 0xf << 24, tmp << 24);
+ mmio_clrsetbits_32(PHY_REG(i, 341), 0xf << 24, tmp << 24);
+ mmio_clrsetbits_32(PHY_REG(i, 469), 0xf << 24, tmp << 24);
+
+ if (pdram_timing->mhz <= ENPER_CS_TRAINING_FREQ) {
+ /*
+ * Note:Per-CS Training is not compatible at speeds
+ * under 533 MHz. If the PHY is running at a speed
+ * less than 533MHz, all phy_per_cs_training_en_X
+ * parameters must be cleared to 0.
+ */
+
+ /*DENALI_PHY_84/212/340/468 1bit offset_16 */
+ mmio_clrbits_32(PHY_REG(i, 84), 0x1 << 16);
+ mmio_clrbits_32(PHY_REG(i, 212), 0x1 << 16);
+ mmio_clrbits_32(PHY_REG(i, 340), 0x1 << 16);
+ mmio_clrbits_32(PHY_REG(i, 468), 0x1 << 16);
+ } else {
+ mmio_setbits_32(PHY_REG(i, 84), 0x1 << 16);
+ mmio_setbits_32(PHY_REG(i, 212), 0x1 << 16);
+ mmio_setbits_32(PHY_REG(i, 340), 0x1 << 16);
+ mmio_setbits_32(PHY_REG(i, 468), 0x1 << 16);
+ }
+ gen_rk3399_phy_dll_bypass(pdram_timing->mhz, i, fn,
+ timing_config->dram_type);
+ }
+}
+
+static int to_get_clk_index(unsigned int mhz)
+{
+ int pll_cnt, i;
+
+ pll_cnt = ARRAY_SIZE(dpll_rates_table);
+
+ /* Assumming rate_table is in descending order */
+ for (i = 0; i < pll_cnt; i++) {
+ if (mhz >= dpll_rates_table[i].mhz)
+ break;
+ }
+
+ /* if mhz lower than lowest frequency in table, use lowest frequency */
+ if (i == pll_cnt)
+ i = pll_cnt - 1;
+
+ return i;
+}
+
+uint32_t ddr_get_rate(void)
+{
+ uint32_t refdiv, postdiv1, fbdiv, postdiv2;
+
+ refdiv = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 1)) & 0x3f;
+ fbdiv = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 0)) & 0xfff;
+ postdiv1 =
+ (mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 1)) >> 8) & 0x7;
+ postdiv2 =
+ (mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 1)) >> 12) & 0x7;
+
+ return (24 / refdiv * fbdiv / postdiv1 / postdiv2) * 1000 * 1000;
+}
+
+/*
+ * return: bit12: channel 1, external self-refresh
+ * bit11: channel 1, stdby_mode
+ * bit10: channel 1, self-refresh with controller and memory clock gate
+ * bit9: channel 1, self-refresh
+ * bit8: channel 1, power-down
+ *
+ * bit4: channel 1, external self-refresh
+ * bit3: channel 0, stdby_mode
+ * bit2: channel 0, self-refresh with controller and memory clock gate
+ * bit1: channel 0, self-refresh
+ * bit0: channel 0, power-down
+ */
+uint32_t exit_low_power(void)
+{
+ uint32_t low_power = 0;
+ uint32_t channel_mask;
+ uint32_t tmp, i;
+
+ channel_mask = (mmio_read_32(PMUGRF_BASE + PMUGRF_OSREG(2)) >> 28) &
+ 0x3;
+ for (i = 0; i < 2; i++) {
+ if (!(channel_mask & (1 << i)))
+ continue;
+
+ /* exit stdby mode */
+ mmio_write_32(CIC_BASE + CIC_CTRL1,
+ (1 << (i + 16)) | (0 << i));
+ /* exit external self-refresh */
+ tmp = i ? 12 : 8;
+ low_power |= ((mmio_read_32(PMU_BASE + PMU_SFT_CON) >> tmp) &
+ 0x1) << (4 + 8 * i);
+ mmio_clrbits_32(PMU_BASE + PMU_SFT_CON, 1 << tmp);
+ while (!(mmio_read_32(PMU_BASE + PMU_DDR_SREF_ST) & (1 << i)))
+ ;
+ /* exit auto low-power */
+ mmio_clrbits_32(CTL_REG(i, 101), 0x7);
+ /* lp_cmd to exit */
+ if (((mmio_read_32(CTL_REG(i, 100)) >> 24) & 0x7f) !=
+ 0x40) {
+ while (mmio_read_32(CTL_REG(i, 200)) & 0x1)
+ ;
+ mmio_clrsetbits_32(CTL_REG(i, 93), 0xff << 24,
+ 0x69 << 24);
+ while (((mmio_read_32(CTL_REG(i, 100)) >> 24) & 0x7f) !=
+ 0x40)
+ ;
+ }
+ }
+ return low_power;
+}
+
+void resume_low_power(uint32_t low_power)
+{
+ uint32_t channel_mask;
+ uint32_t tmp, i, val;
+
+ channel_mask = (mmio_read_32(PMUGRF_BASE + PMUGRF_OSREG(2)) >> 28) &
+ 0x3;
+ for (i = 0; i < 2; i++) {
+ if (!(channel_mask & (1 << i)))
+ continue;
+
+ /* resume external self-refresh */
+ tmp = i ? 12 : 8;
+ val = (low_power >> (4 + 8 * i)) & 0x1;
+ mmio_setbits_32(PMU_BASE + PMU_SFT_CON, val << tmp);
+ /* resume auto low-power */
+ val = (low_power >> (8 * i)) & 0x7;
+ mmio_setbits_32(CTL_REG(i, 101), val);
+ /* resume stdby mode */
+ val = (low_power >> (3 + 8 * i)) & 0x1;
+ mmio_write_32(CIC_BASE + CIC_CTRL1,
+ (1 << (i + 16)) | (val << i));
+ }
+}
+
+static void dram_low_power_config(void)
+{
+ uint32_t tmp, i;
+ uint32_t ch_cnt = rk3399_dram_status.timing_config.ch_cnt;
+ uint32_t dram_type = rk3399_dram_status.timing_config.dram_type;
+
+ if (dram_type == DDR3)
+ tmp = (2 << 16) | (0x7 << 8);
+ else
+ tmp = (3 << 16) | (0x7 << 8);
+
+ for (i = 0; i < ch_cnt; i++)
+ mmio_clrsetbits_32(CTL_REG(i, 101), 0x70f0f, tmp);
+
+ /* standby idle */
+ mmio_write_32(CIC_BASE + CIC_CG_WAIT_TH, 0x640008);
+
+ if (ch_cnt == 2) {
+ mmio_write_32(GRF_BASE + GRF_DDRC1_CON1,
+ (((0x1<<4) | (0x1<<5) | (0x1<<6) |
+ (0x1<<7)) << 16) |
+ ((0x1<<4) | (0x0<<5) | (0x1<<6) | (0x1<<7)));
+ mmio_write_32(CIC_BASE + CIC_CTRL1, 0x002a0028);
+ }
+
+ mmio_write_32(GRF_BASE + GRF_DDRC0_CON1,
+ (((0x1<<4) | (0x1<<5) | (0x1<<6) | (0x1<<7)) << 16) |
+ ((0x1<<4) | (0x0<<5) | (0x1<<6) | (0x1<<7)));
+ mmio_write_32(CIC_BASE + CIC_CTRL1, 0x00150014);
+}
+
+void dram_dfs_init(void)
+{
+ uint32_t trefi0, trefi1, boot_freq;
+ uint32_t rddqs_adjust, rddqs_slave;
+
+ /* get sdram config for os reg */
+ get_dram_drv_odt_val(sdram_config.dramtype,
+ &rk3399_dram_status.drv_odt_lp_cfg);
+ sdram_timing_cfg_init(&rk3399_dram_status.timing_config,
+ &sdram_config,
+ &rk3399_dram_status.drv_odt_lp_cfg);
+
+ trefi0 = ((mmio_read_32(CTL_REG(0, 48)) >> 16) & 0xffff) + 8;
+ trefi1 = ((mmio_read_32(CTL_REG(0, 49)) >> 16) & 0xffff) + 8;
+
+ rk3399_dram_status.index_freq[0] = trefi0 * 10 / 39;
+ rk3399_dram_status.index_freq[1] = trefi1 * 10 / 39;
+ rk3399_dram_status.current_index =
+ (mmio_read_32(CTL_REG(0, 111)) >> 16) & 0x3;
+ if (rk3399_dram_status.timing_config.dram_type == DDR3) {
+ rk3399_dram_status.index_freq[0] /= 2;
+ rk3399_dram_status.index_freq[1] /= 2;
+ }
+ boot_freq =
+ rk3399_dram_status.index_freq[rk3399_dram_status.current_index];
+ boot_freq = dpll_rates_table[to_get_clk_index(boot_freq)].mhz;
+ rk3399_dram_status.boot_freq = boot_freq;
+ rk3399_dram_status.index_freq[rk3399_dram_status.current_index] =
+ boot_freq;
+ rk3399_dram_status.index_freq[(rk3399_dram_status.current_index + 1) &
+ 0x1] = 0;
+ rk3399_dram_status.low_power_stat = 0;
+ /*
+ * following register decide if NOC stall the access request
+ * or return error when NOC being idled. when doing ddr frequency
+ * scaling in M0 or DCF, we need to make sure noc stall the access
+ * request, if return error cpu may data abort when ddr frequency
+ * changing. it don't need to set this register every times,
+ * so we init this register in function dram_dfs_init().
+ */
+ mmio_write_32(GRF_BASE + GRF_SOC_CON(0), 0xffffffff);
+ mmio_write_32(GRF_BASE + GRF_SOC_CON(1), 0xffffffff);
+ mmio_write_32(GRF_BASE + GRF_SOC_CON(2), 0xffffffff);
+ mmio_write_32(GRF_BASE + GRF_SOC_CON(3), 0xffffffff);
+ mmio_write_32(GRF_BASE + GRF_SOC_CON(4), 0x70007000);
+
+ /* Disable multicast */
+ mmio_clrbits_32(PHY_REG(0, 896), 1);
+ mmio_clrbits_32(PHY_REG(1, 896), 1);
+ dram_low_power_config();
+
+ /*
+ * If boot_freq isn't in the bypass mode, it can get the
+ * rddqs_delay_ps from the result of gate training
+ */
+ if (((mmio_read_32(PHY_REG(0, 86)) >> 8) & 0xf) != 0xc) {
+
+ /*
+ * Select PHY's frequency set to current_index
+ * index for get the result of gate Training
+ * from registers
+ */
+ mmio_clrsetbits_32(PHY_REG(0, 896), 0x3 << 8,
+ rk3399_dram_status.current_index << 8);
+ rddqs_slave = (mmio_read_32(PHY_REG(0, 77)) >> 16) & 0x3ff;
+ rddqs_slave = rddqs_slave * 1000000 / boot_freq / 512;
+
+ rddqs_adjust = mmio_read_32(PHY_REG(0, 78)) & 0xf;
+ rddqs_adjust = rddqs_adjust * 1000000 / boot_freq;
+ rddqs_delay_ps = rddqs_slave + rddqs_adjust -
+ (1000000 / boot_freq / 2);
+ } else {
+ rddqs_delay_ps = 3500;
+ }
+}
+
+/*
+ * arg0: bit0-7: sr_idle; bit8-15:sr_mc_gate_idle; bit16-31: standby idle
+ * arg1: bit0-11: pd_idle; bit 16-27: srpd_lite_idle
+ * arg2: bit0: if odt en
+ */
+uint32_t dram_set_odt_pd(uint32_t arg0, uint32_t arg1, uint32_t arg2)
+{
+ struct drv_odt_lp_config *lp_cfg = &rk3399_dram_status.drv_odt_lp_cfg;
+ uint32_t *low_power = &rk3399_dram_status.low_power_stat;
+ uint32_t dram_type, ch_count, pd_tmp, sr_tmp, i;
+
+ dram_type = rk3399_dram_status.timing_config.dram_type;
+ ch_count = rk3399_dram_status.timing_config.ch_cnt;
+
+ lp_cfg->sr_idle = arg0 & 0xff;
+ lp_cfg->sr_mc_gate_idle = (arg0 >> 8) & 0xff;
+ lp_cfg->standby_idle = (arg0 >> 16) & 0xffff;
+ lp_cfg->pd_idle = arg1 & 0xfff;
+ lp_cfg->srpd_lite_idle = (arg1 >> 16) & 0xfff;
+
+ rk3399_dram_status.timing_config.odt = arg2 & 0x1;
+
+ exit_low_power();
+
+ *low_power = 0;
+
+ /* pd_idle en */
+ if (lp_cfg->pd_idle)
+ *low_power |= ((1 << 0) | (1 << 8));
+ /* sr_idle en srpd_lite_idle */
+ if (lp_cfg->sr_idle | lp_cfg->srpd_lite_idle)
+ *low_power |= ((1 << 1) | (1 << 9));
+ /* sr_mc_gate_idle */
+ if (lp_cfg->sr_mc_gate_idle)
+ *low_power |= ((1 << 2) | (1 << 10));
+ /* standbyidle */
+ if (lp_cfg->standby_idle) {
+ if (rk3399_dram_status.timing_config.ch_cnt == 2)
+ *low_power |= ((1 << 3) | (1 << 11));
+ else
+ *low_power |= (1 << 3);
+ }
+
+ pd_tmp = arg1;
+ if (dram_type != LPDDR4)
+ pd_tmp = arg1 & 0xfff;
+ sr_tmp = arg0 & 0xffff;
+ for (i = 0; i < ch_count; i++) {
+ mmio_write_32(CTL_REG(i, 102), pd_tmp);
+ mmio_clrsetbits_32(CTL_REG(i, 103), 0xffff, sr_tmp);
+ }
+ mmio_write_32(CIC_BASE + CIC_IDLE_TH, (arg0 >> 16) & 0xffff);
+
+ return 0;
+}
+
+static void m0_configure_ddr(struct pll_div pll_div, uint32_t ddr_index)
+{
+ /* set PARAM to M0_FUNC_DRAM */
+ mmio_write_32(M0_PARAM_ADDR + PARAM_M0_FUNC, M0_FUNC_DRAM);
+
+ mmio_write_32(M0_PARAM_ADDR + PARAM_DPLL_CON0, FBDIV(pll_div.fbdiv));
+ mmio_write_32(M0_PARAM_ADDR + PARAM_DPLL_CON1,
+ POSTDIV2(pll_div.postdiv2) | POSTDIV1(pll_div.postdiv1) |
+ REFDIV(pll_div.refdiv));
+
+ mmio_write_32(M0_PARAM_ADDR + PARAM_DRAM_FREQ, pll_div.mhz);
+
+ mmio_write_32(M0_PARAM_ADDR + PARAM_FREQ_SELECT, ddr_index << 4);
+ dmbst();
+}
+
+static uint32_t prepare_ddr_timing(uint32_t mhz)
+{
+ uint32_t index;
+ struct dram_timing_t dram_timing;
+
+ rk3399_dram_status.timing_config.freq = mhz;
+
+ if (mhz < 300)
+ rk3399_dram_status.timing_config.dllbp = 1;
+ else
+ rk3399_dram_status.timing_config.dllbp = 0;
+
+ if (rk3399_dram_status.timing_config.odt == 1)
+ gen_rk3399_set_odt(1);
+
+ index = (rk3399_dram_status.current_index + 1) & 0x1;
+
+ /*
+ * checking if having available gate traiing timing for
+ * target freq.
+ */
+ dram_get_parameter(&rk3399_dram_status.timing_config, &dram_timing);
+ gen_rk3399_ctl_params(&rk3399_dram_status.timing_config,
+ &dram_timing, index);
+ gen_rk3399_pi_params(&rk3399_dram_status.timing_config,
+ &dram_timing, index);
+ gen_rk3399_phy_params(&rk3399_dram_status.timing_config,
+ &rk3399_dram_status.drv_odt_lp_cfg,
+ &dram_timing, index);
+ rk3399_dram_status.index_freq[index] = mhz;
+
+ return index;
+}
+
+uint32_t ddr_set_rate(uint32_t hz)
+{
+ uint32_t low_power, index, ddr_index;
+ uint32_t mhz = hz / (1000 * 1000);
+
+ if (mhz ==
+ rk3399_dram_status.index_freq[rk3399_dram_status.current_index])
+ return mhz;
+
+ index = to_get_clk_index(mhz);
+ mhz = dpll_rates_table[index].mhz;
+
+ ddr_index = prepare_ddr_timing(mhz);
+ gen_rk3399_enable_training(rk3399_dram_status.timing_config.ch_cnt,
+ mhz);
+ if (ddr_index > 1)
+ goto out;
+
+ /*
+ * Make sure the clock is enabled. The M0 clocks should be on all of the
+ * time during S0.
+ */
+ m0_configure_ddr(dpll_rates_table[index], ddr_index);
+ m0_start();
+ m0_wait_done();
+ m0_stop();
+
+ if (rk3399_dram_status.timing_config.odt == 0)
+ gen_rk3399_set_odt(0);
+
+ rk3399_dram_status.current_index = ddr_index;
+ low_power = rk3399_dram_status.low_power_stat;
+ resume_low_power(low_power);
+out:
+ gen_rk3399_disable_training(rk3399_dram_status.timing_config.ch_cnt);
+ return mhz;
+}
+
+uint32_t ddr_round_rate(uint32_t hz)
+{
+ int index;
+ uint32_t mhz = hz / (1000 * 1000);
+
+ index = to_get_clk_index(mhz);
+
+ return dpll_rates_table[index].mhz * 1000 * 1000;
+}
+
+void ddr_prepare_for_sys_suspend(void)
+{
+ uint32_t mhz =
+ rk3399_dram_status.index_freq[rk3399_dram_status.current_index];
+
+ /*
+ * If we're not currently at the boot (assumed highest) frequency, we
+ * need to change frequencies to configure out current index.
+ */
+ rk3399_suspend_status.freq = mhz;
+ exit_low_power();
+ rk3399_suspend_status.low_power_stat =
+ rk3399_dram_status.low_power_stat;
+ rk3399_suspend_status.odt = rk3399_dram_status.timing_config.odt;
+ rk3399_dram_status.low_power_stat = 0;
+ rk3399_dram_status.timing_config.odt = 1;
+ if (mhz != rk3399_dram_status.boot_freq)
+ ddr_set_rate(rk3399_dram_status.boot_freq * 1000 * 1000);
+
+ /*
+ * This will configure the other index to be the same frequency as the
+ * current one. We retrain both indices on resume, so both have to be
+ * setup for the same frequency.
+ */
+ prepare_ddr_timing(rk3399_dram_status.boot_freq);
+}
+
+void ddr_prepare_for_sys_resume(void)
+{
+ /* Disable multicast */
+ mmio_clrbits_32(PHY_REG(0, 896), 1);
+ mmio_clrbits_32(PHY_REG(1, 896), 1);
+
+ /* The suspend code changes the current index, so reset it now. */
+ rk3399_dram_status.current_index =
+ (mmio_read_32(CTL_REG(0, 111)) >> 16) & 0x3;
+ rk3399_dram_status.low_power_stat =
+ rk3399_suspend_status.low_power_stat;
+ rk3399_dram_status.timing_config.odt = rk3399_suspend_status.odt;
+
+ /*
+ * Set the saved frequency from suspend if it's different than the
+ * current frequency.
+ */
+ if (rk3399_suspend_status.freq !=
+ rk3399_dram_status.index_freq[rk3399_dram_status.current_index]) {
+ ddr_set_rate(rk3399_suspend_status.freq * 1000 * 1000);
+ return;
+ }
+
+ gen_rk3399_set_odt(rk3399_dram_status.timing_config.odt);
+ resume_low_power(rk3399_dram_status.low_power_stat);
+}
diff --git a/plat/rockchip/rk3399/drivers/dram/dfs.h b/plat/rockchip/rk3399/drivers/dram/dfs.h
new file mode 100644
index 00000000..679216c8
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/dfs.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef __SOC_ROCKCHIP_RK3399_DFS_H__
+#define __SOC_ROCKCHIP_RK3399_DFS_H__
+
+struct rk3399_sdram_default_config {
+ unsigned char bl;
+ /* 1:auto precharge, 0:never auto precharge */
+ unsigned char ap;
+ /* dram driver strength */
+ unsigned char dramds;
+ /* dram ODT, if odt=0, this parameter invalid */
+ unsigned char dramodt;
+ /* ca ODT, if odt=0, this parameter invalid
+ * only used by LPDDR4
+ */
+ unsigned char caodt;
+ unsigned char burst_ref_cnt;
+ /* zqcs period, unit(s) */
+ unsigned char zqcsi;
+};
+
+struct drv_odt_lp_config {
+ uint32_t pd_idle;
+ uint32_t sr_idle;
+ uint32_t sr_mc_gate_idle;
+ uint32_t srpd_lite_idle;
+ uint32_t standby_idle;
+ uint32_t odt_en;
+
+ uint32_t dram_side_drv;
+ uint32_t dram_side_dq_odt;
+ uint32_t dram_side_ca_odt;
+};
+
+uint32_t ddr_set_rate(uint32_t hz);
+uint32_t ddr_round_rate(uint32_t hz);
+uint32_t ddr_get_rate(void);
+uint32_t dram_set_odt_pd(uint32_t arg0, uint32_t arg1, uint32_t arg2);
+void dram_dfs_init(void);
+void ddr_prepare_for_sys_suspend(void);
+void ddr_prepare_for_sys_resume(void);
+
+#endif
diff --git a/plat/rockchip/rk3399/drivers/dram/dram.c b/plat/rockchip/rk3399/drivers/dram/dram.c
new file mode 100644
index 00000000..42b62945
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/dram.c
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <dram.h>
+#include <plat_private.h>
+#include <rk3399_def.h>
+#include <secure.h>
+#include <soc.h>
+
+__pmusramdata struct rk3399_sdram_params sdram_config;
+
+void dram_init(void)
+{
+ uint32_t os_reg2_val, i;
+
+ os_reg2_val = mmio_read_32(PMUGRF_BASE + PMUGRF_OSREG(2));
+ sdram_config.dramtype = SYS_REG_DEC_DDRTYPE(os_reg2_val);
+ sdram_config.num_channels = SYS_REG_DEC_NUM_CH(os_reg2_val);
+ sdram_config.stride = (mmio_read_32(SGRF_BASE + SGRF_SOC_CON3_7(4)) >>
+ 10) & 0x1f;
+
+ for (i = 0; i < 2; i++) {
+ struct rk3399_sdram_channel *ch = &sdram_config.ch[i];
+ struct rk3399_msch_timings *noc = &ch->noc_timings;
+
+ if (!(SYS_REG_DEC_CHINFO(os_reg2_val, i)))
+ continue;
+
+ ch->rank = SYS_REG_DEC_RANK(os_reg2_val, i);
+ ch->col = SYS_REG_DEC_COL(os_reg2_val, i);
+ ch->bk = SYS_REG_DEC_BK(os_reg2_val, i);
+ ch->bw = SYS_REG_DEC_BW(os_reg2_val, i);
+ ch->dbw = SYS_REG_DEC_DBW(os_reg2_val, i);
+ ch->row_3_4 = SYS_REG_DEC_ROW_3_4(os_reg2_val, i);
+ ch->cs0_row = SYS_REG_DEC_CS0_ROW(os_reg2_val, i);
+ ch->cs1_row = SYS_REG_DEC_CS1_ROW(os_reg2_val, i);
+ ch->ddrconfig = mmio_read_32(MSCH_BASE(i) + MSCH_DEVICECONF);
+
+ noc->ddrtiminga0.d32 = mmio_read_32(MSCH_BASE(i) +
+ MSCH_DDRTIMINGA0);
+ noc->ddrtimingb0.d32 = mmio_read_32(MSCH_BASE(i) +
+ MSCH_DDRTIMINGB0);
+ noc->ddrtimingc0.d32 = mmio_read_32(MSCH_BASE(i) +
+ MSCH_DDRTIMINGC0);
+ noc->devtodev0.d32 = mmio_read_32(MSCH_BASE(i) +
+ MSCH_DEVTODEV0);
+ noc->ddrmode.d32 = mmio_read_32(MSCH_BASE(i) + MSCH_DDRMODE);
+ noc->agingx0 = mmio_read_32(MSCH_BASE(i) + MSCH_AGINGX0);
+ }
+}
diff --git a/plat/rockchip/rk3399/drivers/dram/dram.h b/plat/rockchip/rk3399/drivers/dram/dram.h
new file mode 100644
index 00000000..0780fc3a
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/dram.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef __SOC_ROCKCHIP_RK3399_DRAM_H__
+#define __SOC_ROCKCHIP_RK3399_DRAM_H__
+
+#include <dram_regs.h>
+#include <plat_private.h>
+#include <stdint.h>
+
+enum {
+ DDR3 = 3,
+ LPDDR2 = 5,
+ LPDDR3 = 6,
+ LPDDR4 = 7,
+ UNUSED = 0xff
+};
+
+struct rk3399_ddr_pctl_regs {
+ uint32_t denali_ctl[CTL_REG_NUM];
+};
+
+struct rk3399_ddr_publ_regs {
+ /*
+ * PHY registers from 0 to 90 for slice1.
+ * These are used to restore slice1-4 on resume.
+ */
+ uint32_t phy0[91];
+ /*
+ * PHY registers from 512 to 895.
+ * Only registers 0-37 of each 128 register range are used.
+ */
+ uint32_t phy512[3][38];
+ uint32_t phy896[63];
+};
+
+struct rk3399_ddr_pi_regs {
+ uint32_t denali_pi[PI_REG_NUM];
+};
+union noc_ddrtiminga0 {
+ uint32_t d32;
+ struct {
+ unsigned acttoact : 6;
+ unsigned reserved0 : 2;
+ unsigned rdtomiss : 6;
+ unsigned reserved1 : 2;
+ unsigned wrtomiss : 6;
+ unsigned reserved2 : 2;
+ unsigned readlatency : 8;
+ } b;
+};
+
+union noc_ddrtimingb0 {
+ uint32_t d32;
+ struct {
+ unsigned rdtowr : 5;
+ unsigned reserved0 : 3;
+ unsigned wrtord : 5;
+ unsigned reserved1 : 3;
+ unsigned rrd : 4;
+ unsigned reserved2 : 4;
+ unsigned faw : 6;
+ unsigned reserved3 : 2;
+ } b;
+};
+
+union noc_ddrtimingc0 {
+ uint32_t d32;
+ struct {
+ unsigned burstpenalty : 4;
+ unsigned reserved0 : 4;
+ unsigned wrtomwr : 6;
+ unsigned reserved1 : 18;
+ } b;
+};
+
+union noc_devtodev0 {
+ uint32_t d32;
+ struct {
+ unsigned busrdtord : 3;
+ unsigned reserved0 : 1;
+ unsigned busrdtowr : 3;
+ unsigned reserved1 : 1;
+ unsigned buswrtord : 3;
+ unsigned reserved2 : 1;
+ unsigned buswrtowr : 3;
+ unsigned reserved3 : 17;
+ } b;
+};
+
+union noc_ddrmode {
+ uint32_t d32;
+ struct {
+ unsigned autoprecharge : 1;
+ unsigned bypassfiltering : 1;
+ unsigned fawbank : 1;
+ unsigned burstsize : 2;
+ unsigned mwrsize : 2;
+ unsigned reserved2 : 1;
+ unsigned forceorder : 8;
+ unsigned forceorderstate : 8;
+ unsigned reserved3 : 8;
+ } b;
+};
+
+struct rk3399_msch_timings {
+ union noc_ddrtiminga0 ddrtiminga0;
+ union noc_ddrtimingb0 ddrtimingb0;
+ union noc_ddrtimingc0 ddrtimingc0;
+ union noc_devtodev0 devtodev0;
+ union noc_ddrmode ddrmode;
+ uint32_t agingx0;
+};
+
+struct rk3399_sdram_channel {
+ unsigned char rank;
+ /* col = 0, means this channel is invalid */
+ unsigned char col;
+ /* 3:8bank, 2:4bank */
+ unsigned char bk;
+ /* channel buswidth, 2:32bit, 1:16bit, 0:8bit */
+ unsigned char bw;
+ /* die buswidth, 2:32bit, 1:16bit, 0:8bit */
+ unsigned char dbw;
+ /* row_3_4 = 1: 6Gb or 12Gb die
+ * row_3_4 = 0: normal die, power of 2
+ */
+ unsigned char row_3_4;
+ unsigned char cs0_row;
+ unsigned char cs1_row;
+ uint32_t ddrconfig;
+ struct rk3399_msch_timings noc_timings;
+};
+
+struct rk3399_sdram_params {
+ struct rk3399_sdram_channel ch[2];
+ uint32_t ddr_freq;
+ unsigned char dramtype;
+ unsigned char num_channels;
+ unsigned char stride;
+ unsigned char odt;
+ struct rk3399_ddr_pctl_regs pctl_regs;
+ struct rk3399_ddr_pi_regs pi_regs;
+ struct rk3399_ddr_publ_regs phy_regs;
+ uint32_t rx_cal_dqs[2][4];
+};
+
+extern __sramdata struct rk3399_sdram_params sdram_config;
+
+void dram_init(void);
+
+#endif
diff --git a/plat/rockchip/rk3399/drivers/dram/dram_spec_timing.c b/plat/rockchip/rk3399/drivers/dram/dram_spec_timing.c
new file mode 100644
index 00000000..2e196b54
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/dram_spec_timing.c
@@ -0,0 +1,1318 @@
+/*
+ * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <dram.h>
+#include <stdint.h>
+#include <string.h>
+#include <utils.h>
+#include "dram_spec_timing.h"
+
+static const uint8_t ddr3_cl_cwl[][7] = {
+ /*
+ * speed 0~330 331 ~ 400 401 ~ 533 534~666 667~800 801~933 934~1066
+ * tCK>3 2.5~3 1.875~2.5 1.5~1.875 1.25~1.5 1.07~1.25 0.938~1.07
+ * cl<<4, cwl cl<<4, cwl cl<<4, cwl
+ */
+ /* DDR3_800D (5-5-5) */
+ {((5 << 4) | 5), ((5 << 4) | 5), 0, 0, 0, 0, 0},
+ /* DDR3_800E (6-6-6) */
+ {((5 << 4) | 5), ((6 << 4) | 5), 0, 0, 0, 0, 0},
+ /* DDR3_1066E (6-6-6) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), 0, 0, 0, 0},
+ /* DDR3_1066F (7-7-7) */
+ {((5 << 4) | 5), ((6 << 4) | 5), ((7 << 4) | 6), 0, 0, 0, 0},
+ /* DDR3_1066G (8-8-8) */
+ {((5 << 4) | 5), ((6 << 4) | 5), ((8 << 4) | 6), 0, 0, 0, 0},
+ /* DDR3_1333F (7-7-7) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), ((7 << 4) | 7),
+ 0, 0, 0},
+ /* DDR3_1333G (8-8-8) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((7 << 4) | 6), ((8 << 4) | 7),
+ 0, 0, 0},
+ /* DDR3_1333H (9-9-9) */
+ {((5 << 4) | 5), ((6 << 4) | 5), ((8 << 4) | 6), ((9 << 4) | 7),
+ 0, 0, 0},
+ /* DDR3_1333J (10-10-10) */
+ {((5 << 4) | 5), ((6 << 4) | 5), ((8 << 4) | 6), ((10 << 4) | 7),
+ 0, 0, 0},
+ /* DDR3_1600G (8-8-8) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), ((7 << 4) | 7),
+ ((8 << 4) | 8), 0, 0},
+ /* DDR3_1600H (9-9-9) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), ((8 << 4) | 7),
+ ((9 << 4) | 8), 0, 0},
+ /* DDR3_1600J (10-10-10) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((7 << 4) | 6), ((9 << 4) | 7),
+ ((10 << 4) | 8), 0, 0},
+ /* DDR3_1600K (11-11-11) */
+ {((5 << 4) | 5), ((6 << 4) | 5), ((8 << 4) | 6), ((10 << 4) | 7),
+ ((11 << 4) | 8), 0, 0},
+ /* DDR3_1866J (10-10-10) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), ((8 << 4) | 7),
+ ((9 << 4) | 8), ((11 << 4) | 9), 0},
+ /* DDR3_1866K (11-11-11) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((7 << 4) | 6), ((8 << 4) | 7),
+ ((10 << 4) | 8), ((11 << 4) | 9), 0},
+ /* DDR3_1866L (12-12-12) */
+ {((6 << 4) | 5), ((6 << 4) | 5), ((7 << 4) | 6), ((9 << 4) | 7),
+ ((11 << 4) | 8), ((12 << 4) | 9), 0},
+ /* DDR3_1866M (13-13-13) */
+ {((6 << 4) | 5), ((6 << 4) | 5), ((8 << 4) | 6), ((10 << 4) | 7),
+ ((11 << 4) | 8), ((13 << 4) | 9), 0},
+ /* DDR3_2133K (11-11-11) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), ((7 << 4) | 7),
+ ((9 << 4) | 8), ((10 << 4) | 9), ((11 << 4) | 10)},
+ /* DDR3_2133L (12-12-12) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((6 << 4) | 6), ((8 << 4) | 7),
+ ((9 << 4) | 8), ((11 << 4) | 9), ((12 << 4) | 10)},
+ /* DDR3_2133M (13-13-13) */
+ {((5 << 4) | 5), ((5 << 4) | 5), ((7 << 4) | 6), ((9 << 4) | 7),
+ ((10 << 4) | 8), ((12 << 4) | 9), ((13 << 4) | 10)},
+ /* DDR3_2133N (14-14-14) */
+ {((6 << 4) | 5), ((6 << 4) | 5), ((7 << 4) | 6), ((9 << 4) | 7),
+ ((11 << 4) | 8), ((13 << 4) | 9), ((14 << 4) | 10)},
+ /* DDR3_DEFAULT */
+ {((6 << 4) | 5), ((6 << 4) | 5), ((8 << 4) | 6), ((10 << 4) | 7),
+ ((11 << 4) | 8), ((13 << 4) | 9), ((14 << 4) | 10)}
+};
+
+static const uint16_t ddr3_trc_tfaw[] = {
+ /* tRC tFAW */
+ ((50 << 8) | 50), /* DDR3_800D (5-5-5) */
+ ((53 << 8) | 50), /* DDR3_800E (6-6-6) */
+
+ ((49 << 8) | 50), /* DDR3_1066E (6-6-6) */
+ ((51 << 8) | 50), /* DDR3_1066F (7-7-7) */
+ ((53 << 8) | 50), /* DDR3_1066G (8-8-8) */
+
+ ((47 << 8) | 45), /* DDR3_1333F (7-7-7) */
+ ((48 << 8) | 45), /* DDR3_1333G (8-8-8) */
+ ((50 << 8) | 45), /* DDR3_1333H (9-9-9) */
+ ((51 << 8) | 45), /* DDR3_1333J (10-10-10) */
+
+ ((45 << 8) | 40), /* DDR3_1600G (8-8-8) */
+ ((47 << 8) | 40), /* DDR3_1600H (9-9-9)*/
+ ((48 << 8) | 40), /* DDR3_1600J (10-10-10) */
+ ((49 << 8) | 40), /* DDR3_1600K (11-11-11) */
+
+ ((45 << 8) | 35), /* DDR3_1866J (10-10-10) */
+ ((46 << 8) | 35), /* DDR3_1866K (11-11-11) */
+ ((47 << 8) | 35), /* DDR3_1866L (12-12-12) */
+ ((48 << 8) | 35), /* DDR3_1866M (13-13-13) */
+
+ ((44 << 8) | 35), /* DDR3_2133K (11-11-11) */
+ ((45 << 8) | 35), /* DDR3_2133L (12-12-12) */
+ ((46 << 8) | 35), /* DDR3_2133M (13-13-13) */
+ ((47 << 8) | 35), /* DDR3_2133N (14-14-14) */
+
+ ((53 << 8) | 50) /* DDR3_DEFAULT */
+};
+
+static uint32_t get_max_speed_rate(struct timing_related_config *timing_config)
+{
+ if (timing_config->ch_cnt > 1)
+ return max(timing_config->dram_info[0].speed_rate,
+ timing_config->dram_info[1].speed_rate);
+ else
+ return timing_config->dram_info[0].speed_rate;
+}
+
+static uint32_t
+get_max_die_capability(struct timing_related_config *timing_config)
+{
+ uint32_t die_cap = 0;
+ uint32_t cs, ch;
+
+ for (ch = 0; ch < timing_config->ch_cnt; ch++) {
+ for (cs = 0; cs < timing_config->dram_info[ch].cs_cnt; cs++) {
+ die_cap = max(die_cap,
+ timing_config->
+ dram_info[ch].per_die_capability[cs]);
+ }
+ }
+ return die_cap;
+}
+
+/* tRSTL, 100ns */
+#define DDR3_TRSTL (100)
+/* trsth, 500us */
+#define DDR3_TRSTH (500000)
+/* trefi, 7.8us */
+#define DDR3_TREFI_7_8_US (7800)
+/* tWR, 15ns */
+#define DDR3_TWR (15)
+/* tRTP, max(4 tCK,7.5ns) */
+#define DDR3_TRTP (7)
+/* tRRD = max(4nCK, 10ns) */
+#define DDR3_TRRD (10)
+/* tCK */
+#define DDR3_TCCD (4)
+/*tWTR, max(4 tCK,7.5ns)*/
+#define DDR3_TWTR (7)
+/* tCK */
+#define DDR3_TRTW (0)
+/* tRAS, 37.5ns(400MHz) 37.5ns(533MHz) */
+#define DDR3_TRAS (37)
+/* ns */
+#define DDR3_TRFC_512MBIT (90)
+/* ns */
+#define DDR3_TRFC_1GBIT (110)
+/* ns */
+#define DDR3_TRFC_2GBIT (160)
+/* ns */
+#define DDR3_TRFC_4GBIT (300)
+/* ns */
+#define DDR3_TRFC_8GBIT (350)
+
+/*pd and sr*/
+#define DDR3_TXP (7) /* tXP, max(3 tCK, 7.5ns)( < 933MHz) */
+#define DDR3_TXPDLL (24) /* tXPDLL, max(10 tCK, 24ns) */
+#define DDR3_TDLLK (512) /* tXSR, tDLLK=512 tCK */
+#define DDR3_TCKE_400MHZ (7) /* tCKE, max(3 tCK,7.5ns)(400MHz) */
+#define DDR3_TCKE_533MHZ (6) /* tCKE, max(3 tCK,5.625ns)(533MHz) */
+#define DDR3_TCKSRE (10) /* tCKSRX, max(5 tCK, 10ns) */
+
+/*mode register timing*/
+#define DDR3_TMOD (15) /* tMOD, max(12 tCK,15ns) */
+#define DDR3_TMRD (4) /* tMRD, 4 tCK */
+
+/* ZQ */
+#define DDR3_TZQINIT (640) /* tZQinit, max(512 tCK, 640ns) */
+#define DDR3_TZQCS (80) /* tZQCS, max(64 tCK, 80ns) */
+#define DDR3_TZQOPER (320) /* tZQoper, max(256 tCK, 320ns) */
+
+/* Write leveling */
+#define DDR3_TWLMRD (40) /* tCK */
+#define DDR3_TWLO (9) /* max 7.5ns */
+#define DDR3_TWLDQSEN (25) /* tCK */
+
+/*
+ * Description: depend on input parameter "timing_config",
+ * and calculate all ddr3
+ * spec timing to "pdram_timing"
+ * parameters:
+ * input: timing_config
+ * output: pdram_timing
+ */
+static void ddr3_get_parameter(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t nmhz = timing_config->freq;
+ uint32_t ddr_speed_bin = get_max_speed_rate(timing_config);
+ uint32_t ddr_capability_per_die = get_max_die_capability(timing_config);
+ uint32_t tmp;
+
+ zeromem((void *)pdram_timing, sizeof(struct dram_timing_t));
+ pdram_timing->mhz = nmhz;
+ pdram_timing->al = 0;
+ pdram_timing->bl = timing_config->bl;
+ if (nmhz <= 330)
+ tmp = 0;
+ else if (nmhz <= 400)
+ tmp = 1;
+ else if (nmhz <= 533)
+ tmp = 2;
+ else if (nmhz <= 666)
+ tmp = 3;
+ else if (nmhz <= 800)
+ tmp = 4;
+ else if (nmhz <= 933)
+ tmp = 5;
+ else
+ tmp = 6;
+
+ /* when dll bypss cl = cwl = 6 */
+ if (nmhz < 300) {
+ pdram_timing->cl = 6;
+ pdram_timing->cwl = 6;
+ } else {
+ pdram_timing->cl = (ddr3_cl_cwl[ddr_speed_bin][tmp] >> 4) & 0xf;
+ pdram_timing->cwl = ddr3_cl_cwl[ddr_speed_bin][tmp] & 0xf;
+ }
+
+ switch (timing_config->dramds) {
+ case 40:
+ tmp = DDR3_DS_40;
+ break;
+ case 34:
+ default:
+ tmp = DDR3_DS_34;
+ break;
+ }
+
+ if (timing_config->odt)
+ switch (timing_config->dramodt) {
+ case 60:
+ pdram_timing->mr[1] = tmp | DDR3_RTT_NOM_60;
+ break;
+ case 40:
+ pdram_timing->mr[1] = tmp | DDR3_RTT_NOM_40;
+ break;
+ case 120:
+ pdram_timing->mr[1] = tmp | DDR3_RTT_NOM_120;
+ break;
+ case 0:
+ default:
+ pdram_timing->mr[1] = tmp | DDR3_RTT_NOM_DIS;
+ break;
+ }
+ else
+ pdram_timing->mr[1] = tmp | DDR3_RTT_NOM_DIS;
+
+ pdram_timing->mr[2] = DDR3_MR2_CWL(pdram_timing->cwl);
+ pdram_timing->mr[3] = 0;
+
+ pdram_timing->trstl = ((DDR3_TRSTL * nmhz + 999) / 1000);
+ pdram_timing->trsth = ((DDR3_TRSTH * nmhz + 999) / 1000);
+ /* tREFI, average periodic refresh interval, 7.8us */
+ pdram_timing->trefi = ((DDR3_TREFI_7_8_US * nmhz + 999) / 1000);
+ /* base timing */
+ pdram_timing->trcd = pdram_timing->cl;
+ pdram_timing->trp = pdram_timing->cl;
+ pdram_timing->trppb = pdram_timing->cl;
+ tmp = ((DDR3_TWR * nmhz + 999) / 1000);
+ pdram_timing->twr = tmp;
+ pdram_timing->tdal = tmp + pdram_timing->trp;
+ if (tmp < 9) {
+ tmp = tmp - 4;
+ } else {
+ tmp += (tmp & 0x1) ? 1 : 0;
+ tmp = tmp >> 1;
+ }
+ if (pdram_timing->bl == 4)
+ pdram_timing->mr[0] = DDR3_BC4
+ | DDR3_CL(pdram_timing->cl)
+ | DDR3_WR(tmp);
+ else
+ pdram_timing->mr[0] = DDR3_BL8
+ | DDR3_CL(pdram_timing->cl)
+ | DDR3_WR(tmp);
+ tmp = ((DDR3_TRTP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->trtp = max(4, tmp);
+ pdram_timing->trc =
+ (((ddr3_trc_tfaw[ddr_speed_bin] >> 8) * nmhz + 999) / 1000);
+ tmp = ((DDR3_TRRD * nmhz + 999) / 1000);
+ pdram_timing->trrd = max(4, tmp);
+ pdram_timing->tccd = DDR3_TCCD;
+ tmp = ((DDR3_TWTR * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->twtr = max(4, tmp);
+ pdram_timing->trtw = DDR3_TRTW;
+ pdram_timing->tras_max = 9 * pdram_timing->trefi;
+ pdram_timing->tras_min = ((DDR3_TRAS * nmhz + (nmhz >> 1) + 999)
+ / 1000);
+ pdram_timing->tfaw =
+ (((ddr3_trc_tfaw[ddr_speed_bin] & 0x0ff) * nmhz + 999)
+ / 1000);
+ /* tRFC, 90ns(512Mb),110ns(1Gb),160ns(2Gb),300ns(4Gb),350ns(8Gb) */
+ if (ddr_capability_per_die <= 0x4000000)
+ tmp = DDR3_TRFC_512MBIT;
+ else if (ddr_capability_per_die <= 0x8000000)
+ tmp = DDR3_TRFC_1GBIT;
+ else if (ddr_capability_per_die <= 0x10000000)
+ tmp = DDR3_TRFC_2GBIT;
+ else if (ddr_capability_per_die <= 0x20000000)
+ tmp = DDR3_TRFC_4GBIT;
+ else
+ tmp = DDR3_TRFC_8GBIT;
+ pdram_timing->trfc = (tmp * nmhz + 999) / 1000;
+ pdram_timing->txsnr = max(5, (((tmp + 10) * nmhz + 999) / 1000));
+ pdram_timing->tdqsck_max = 0;
+ /*pd and sr*/
+ pdram_timing->txsr = DDR3_TDLLK;
+ tmp = ((DDR3_TXP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->txp = max(3, tmp);
+ tmp = ((DDR3_TXPDLL * nmhz + 999) / 1000);
+ pdram_timing->txpdll = max(10, tmp);
+ pdram_timing->tdllk = DDR3_TDLLK;
+ if (nmhz >= 533)
+ tmp = ((DDR3_TCKE_533MHZ * nmhz + 999) / 1000);
+ else
+ tmp = ((DDR3_TCKE_400MHZ * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->tcke = max(3, tmp);
+ pdram_timing->tckesr = (pdram_timing->tcke + 1);
+ tmp = ((DDR3_TCKSRE * nmhz + 999) / 1000);
+ pdram_timing->tcksre = max(5, tmp);
+ pdram_timing->tcksrx = max(5, tmp);
+ /*mode register timing*/
+ tmp = ((DDR3_TMOD * nmhz + 999) / 1000);
+ pdram_timing->tmod = max(12, tmp);
+ pdram_timing->tmrd = DDR3_TMRD;
+ pdram_timing->tmrr = 0;
+ /*ODT*/
+ pdram_timing->todton = pdram_timing->cwl - 2;
+ /*ZQ*/
+ tmp = ((DDR3_TZQINIT * nmhz + 999) / 1000);
+ pdram_timing->tzqinit = max(512, tmp);
+ tmp = ((DDR3_TZQCS * nmhz + 999) / 1000);
+ pdram_timing->tzqcs = max(64, tmp);
+ tmp = ((DDR3_TZQOPER * nmhz + 999) / 1000);
+ pdram_timing->tzqoper = max(256, tmp);
+ /* write leveling */
+ pdram_timing->twlmrd = DDR3_TWLMRD;
+ pdram_timing->twldqsen = DDR3_TWLDQSEN;
+ pdram_timing->twlo = ((DDR3_TWLO * nmhz + (nmhz >> 1) + 999) / 1000);
+}
+
+#define LPDDR2_TINIT1 (100) /* ns */
+#define LPDDR2_TINIT2 (5) /* tCK */
+#define LPDDR2_TINIT3 (200000) /* 200us */
+#define LPDDR2_TINIT4 (1000) /* 1us */
+#define LPDDR2_TINIT5 (10000) /* 10us */
+#define LPDDR2_TRSTL (0) /* tCK */
+#define LPDDR2_TRSTH (500000) /* 500us */
+#define LPDDR2_TREFI_3_9_US (3900) /* 3.9us */
+#define LPDDR2_TREFI_7_8_US (7800) /* 7.8us */
+
+/* base timing */
+#define LPDDR2_TRCD (24) /* tRCD,15ns(Fast)18ns(Typ)24ns(Slow) */
+#define LPDDR2_TRP_PB (18) /* tRPpb,15ns(Fast)18ns(Typ)24ns(Slow) */
+#define LPDDR2_TRP_AB_8_BANK (21) /* tRPab,18ns(Fast)21ns(Typ)27ns(Slow) */
+#define LPDDR2_TWR (15) /* tWR, max(3tCK,15ns) */
+#define LPDDR2_TRTP (7) /* tRTP, max(2tCK, 7.5ns) */
+#define LPDDR2_TRRD (10) /* tRRD, max(2tCK,10ns) */
+#define LPDDR2_TCCD (2) /* tCK */
+#define LPDDR2_TWTR_GREAT_200MHZ (7) /* ns */
+#define LPDDR2_TWTR_LITTLE_200MHZ (10) /* ns */
+#define LPDDR2_TRTW (0) /* tCK */
+#define LPDDR2_TRAS_MAX (70000) /* 70us */
+#define LPDDR2_TRAS (42) /* tRAS, max(3tCK,42ns) */
+#define LPDDR2_TFAW_GREAT_200MHZ (50) /* max(8tCK,50ns) */
+#define LPDDR2_TFAW_LITTLE_200MHZ (60) /* max(8tCK,60ns) */
+#define LPDDR2_TRFC_8GBIT (210) /* ns */
+#define LPDDR2_TRFC_4GBIT (130) /* ns */
+#define LPDDR2_TDQSCK_MIN (2) /* tDQSCKmin, 2.5ns */
+#define LPDDR2_TDQSCK_MAX (5) /* tDQSCKmax, 5.5ns */
+
+/*pd and sr*/
+#define LPDDR2_TXP (7) /* tXP, max(2tCK,7.5ns) */
+#define LPDDR2_TXPDLL (0)
+#define LPDDR2_TDLLK (0) /* tCK */
+#define LPDDR2_TCKE (3) /* tCK */
+#define LPDDR2_TCKESR (15) /* tCKESR, max(3tCK,15ns) */
+#define LPDDR2_TCKSRE (1) /* tCK */
+#define LPDDR2_TCKSRX (2) /* tCK */
+
+/*mode register timing*/
+#define LPDDR2_TMOD (0)
+#define LPDDR2_TMRD (5) /* tMRD, (=tMRW), 5 tCK */
+#define LPDDR2_TMRR (2) /* tCK */
+
+/*ZQ*/
+#define LPDDR2_TZQINIT (1000) /* ns */
+#define LPDDR2_TZQCS (90) /* tZQCS, max(6tCK,90ns) */
+#define LPDDR2_TZQCL (360) /* tZQCL, max(6tCK,360ns) */
+#define LPDDR2_TZQRESET (50) /* ZQreset, max(3tCK,50ns) */
+
+/*
+ * Description: depend on input parameter "timing_config",
+ * and calculate all lpddr2
+ * spec timing to "pdram_timing"
+ * parameters:
+ * input: timing_config
+ * output: pdram_timing
+ */
+static void lpddr2_get_parameter(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t nmhz = timing_config->freq;
+ uint32_t ddr_capability_per_die = get_max_die_capability(timing_config);
+ uint32_t tmp, trp_tmp, trppb_tmp, tras_tmp, twr_tmp, bl_tmp;
+
+ zeromem((void *)pdram_timing, sizeof(struct dram_timing_t));
+ pdram_timing->mhz = nmhz;
+ pdram_timing->al = 0;
+ pdram_timing->bl = timing_config->bl;
+
+ /* 1066 933 800 667 533 400 333
+ * RL, 8 7 6 5 4 3 3
+ * WL, 4 4 3 2 2 1 1
+ */
+ if (nmhz <= 266) {
+ pdram_timing->cl = 4;
+ pdram_timing->cwl = 2;
+ pdram_timing->mr[2] = LPDDR2_RL4_WL2;
+ } else if (nmhz <= 333) {
+ pdram_timing->cl = 5;
+ pdram_timing->cwl = 2;
+ pdram_timing->mr[2] = LPDDR2_RL5_WL2;
+ } else if (nmhz <= 400) {
+ pdram_timing->cl = 6;
+ pdram_timing->cwl = 3;
+ pdram_timing->mr[2] = LPDDR2_RL6_WL3;
+ } else if (nmhz <= 466) {
+ pdram_timing->cl = 7;
+ pdram_timing->cwl = 4;
+ pdram_timing->mr[2] = LPDDR2_RL7_WL4;
+ } else {
+ pdram_timing->cl = 8;
+ pdram_timing->cwl = 4;
+ pdram_timing->mr[2] = LPDDR2_RL8_WL4;
+ }
+ switch (timing_config->dramds) {
+ case 120:
+ pdram_timing->mr[3] = LPDDR2_DS_120;
+ break;
+ case 80:
+ pdram_timing->mr[3] = LPDDR2_DS_80;
+ break;
+ case 60:
+ pdram_timing->mr[3] = LPDDR2_DS_60;
+ break;
+ case 48:
+ pdram_timing->mr[3] = LPDDR2_DS_48;
+ break;
+ case 40:
+ pdram_timing->mr[3] = LPDDR2_DS_40;
+ break;
+ case 34:
+ default:
+ pdram_timing->mr[3] = LPDDR2_DS_34;
+ break;
+ }
+ pdram_timing->mr[0] = 0;
+
+ pdram_timing->tinit1 = (LPDDR2_TINIT1 * nmhz + 999) / 1000;
+ pdram_timing->tinit2 = LPDDR2_TINIT2;
+ pdram_timing->tinit3 = (LPDDR2_TINIT3 * nmhz + 999) / 1000;
+ pdram_timing->tinit4 = (LPDDR2_TINIT4 * nmhz + 999) / 1000;
+ pdram_timing->tinit5 = (LPDDR2_TINIT5 * nmhz + 999) / 1000;
+ pdram_timing->trstl = LPDDR2_TRSTL;
+ pdram_timing->trsth = (LPDDR2_TRSTH * nmhz + 999) / 1000;
+ /*
+ * tREFI, average periodic refresh interval,
+ * 15.6us(<256Mb) 7.8us(256Mb-1Gb) 3.9us(2Gb-8Gb)
+ */
+ if (ddr_capability_per_die >= 0x10000000)
+ pdram_timing->trefi = (LPDDR2_TREFI_3_9_US * nmhz + 999)
+ / 1000;
+ else
+ pdram_timing->trefi = (LPDDR2_TREFI_7_8_US * nmhz + 999)
+ / 1000;
+ /* base timing */
+ tmp = ((LPDDR2_TRCD * nmhz + 999) / 1000);
+ pdram_timing->trcd = max(3, tmp);
+ /*
+ * tRPpb, max(3tCK, 15ns(Fast) 18ns(Typ) 24ns(Slow),
+ */
+ trppb_tmp = ((LPDDR2_TRP_PB * nmhz + 999) / 1000);
+ trppb_tmp = max(3, trppb_tmp);
+ pdram_timing->trppb = trppb_tmp;
+ /*
+ * tRPab, max(3tCK, 4-bank:15ns(Fast) 18ns(Typ) 24ns(Slow),
+ * 8-bank:18ns(Fast) 21ns(Typ) 27ns(Slow))
+ */
+ trp_tmp = ((LPDDR2_TRP_AB_8_BANK * nmhz + 999) / 1000);
+ trp_tmp = max(3, trp_tmp);
+ pdram_timing->trp = trp_tmp;
+ twr_tmp = ((LPDDR2_TWR * nmhz + 999) / 1000);
+ twr_tmp = max(3, twr_tmp);
+ pdram_timing->twr = twr_tmp;
+ bl_tmp = (pdram_timing->bl == 16) ? LPDDR2_BL16 :
+ ((pdram_timing->bl == 8) ? LPDDR2_BL8 : LPDDR2_BL4);
+ pdram_timing->mr[1] = bl_tmp | LPDDR2_N_WR(twr_tmp);
+ tmp = ((LPDDR2_TRTP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->trtp = max(2, tmp);
+ tras_tmp = ((LPDDR2_TRAS * nmhz + 999) / 1000);
+ tras_tmp = max(3, tras_tmp);
+ pdram_timing->tras_min = tras_tmp;
+ pdram_timing->tras_max = ((LPDDR2_TRAS_MAX * nmhz + 999) / 1000);
+ pdram_timing->trc = (tras_tmp + trp_tmp);
+ tmp = ((LPDDR2_TRRD * nmhz + 999) / 1000);
+ pdram_timing->trrd = max(2, tmp);
+ pdram_timing->tccd = LPDDR2_TCCD;
+ /* tWTR, max(2tCK, 7.5ns(533-266MHz) 10ns(200-166MHz)) */
+ if (nmhz > 200)
+ tmp = ((LPDDR2_TWTR_GREAT_200MHZ * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ else
+ tmp = ((LPDDR2_TWTR_LITTLE_200MHZ * nmhz + 999) / 1000);
+ pdram_timing->twtr = max(2, tmp);
+ pdram_timing->trtw = LPDDR2_TRTW;
+ if (nmhz <= 200)
+ pdram_timing->tfaw = (LPDDR2_TFAW_LITTLE_200MHZ * nmhz + 999)
+ / 1000;
+ else
+ pdram_timing->tfaw = (LPDDR2_TFAW_GREAT_200MHZ * nmhz + 999)
+ / 1000;
+ /* tRFC, 90ns(<=512Mb) 130ns(1Gb-4Gb) 210ns(8Gb) */
+ if (ddr_capability_per_die >= 0x40000000) {
+ pdram_timing->trfc =
+ (LPDDR2_TRFC_8GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR2_TRFC_8GBIT + 10) * nmhz + 999) / 1000);
+ } else {
+ pdram_timing->trfc =
+ (LPDDR2_TRFC_4GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR2_TRFC_4GBIT + 10) * nmhz + 999) / 1000);
+ }
+ if (tmp < 2)
+ tmp = 2;
+ pdram_timing->txsr = tmp;
+ pdram_timing->txsnr = tmp;
+ /* tdqsck use rounded down */
+ pdram_timing->tdqsck = ((LPDDR2_TDQSCK_MIN * nmhz + (nmhz >> 1))
+ / 1000);
+ pdram_timing->tdqsck_max =
+ ((LPDDR2_TDQSCK_MAX * nmhz + (nmhz >> 1) + 999)
+ / 1000);
+ /* pd and sr */
+ tmp = ((LPDDR2_TXP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->txp = max(2, tmp);
+ pdram_timing->txpdll = LPDDR2_TXPDLL;
+ pdram_timing->tdllk = LPDDR2_TDLLK;
+ pdram_timing->tcke = LPDDR2_TCKE;
+ tmp = ((LPDDR2_TCKESR * nmhz + 999) / 1000);
+ pdram_timing->tckesr = max(3, tmp);
+ pdram_timing->tcksre = LPDDR2_TCKSRE;
+ pdram_timing->tcksrx = LPDDR2_TCKSRX;
+ /* mode register timing */
+ pdram_timing->tmod = LPDDR2_TMOD;
+ pdram_timing->tmrd = LPDDR2_TMRD;
+ pdram_timing->tmrr = LPDDR2_TMRR;
+ /* ZQ */
+ pdram_timing->tzqinit = (LPDDR2_TZQINIT * nmhz + 999) / 1000;
+ tmp = ((LPDDR2_TZQCS * nmhz + 999) / 1000);
+ pdram_timing->tzqcs = max(6, tmp);
+ tmp = ((LPDDR2_TZQCL * nmhz + 999) / 1000);
+ pdram_timing->tzqoper = max(6, tmp);
+ tmp = ((LPDDR2_TZQRESET * nmhz + 999) / 1000);
+ pdram_timing->tzqreset = max(3, tmp);
+}
+
+#define LPDDR3_TINIT1 (100) /* ns */
+#define LPDDR3_TINIT2 (5) /* tCK */
+#define LPDDR3_TINIT3 (200000) /* 200us */
+#define LPDDR3_TINIT4 (1000) /* 1us */
+#define LPDDR3_TINIT5 (10000) /* 10us */
+#define LPDDR3_TRSTL (0)
+#define LPDDR3_TRSTH (0) /* 500us */
+#define LPDDR3_TREFI_3_9_US (3900) /* 3.9us */
+
+/* base timging */
+#define LPDDR3_TRCD (18) /* tRCD,15ns(Fast)18ns(Typ)24ns(Slow) */
+#define LPDDR3_TRP_PB (18) /* tRPpb, 15ns(Fast) 18ns(Typ) 24ns(Slow) */
+#define LPDDR3_TRP_AB (21) /* tRPab, 18ns(Fast) 21ns(Typ) 27ns(Slow) */
+#define LPDDR3_TWR (15) /* tWR, max(4tCK,15ns) */
+#define LPDDR3_TRTP (7) /* tRTP, max(4tCK, 7.5ns) */
+#define LPDDR3_TRRD (10) /* tRRD, max(2tCK,10ns) */
+#define LPDDR3_TCCD (4) /* tCK */
+#define LPDDR3_TWTR (7) /* tWTR, max(4tCK, 7.5ns) */
+#define LPDDR3_TRTW (0) /* tCK register min valid value */
+#define LPDDR3_TRAS_MAX (70000) /* 70us */
+#define LPDDR3_TRAS (42) /* tRAS, max(3tCK,42ns) */
+#define LPDDR3_TFAW (50) /* tFAW,max(8tCK, 50ns) */
+#define LPDDR3_TRFC_8GBIT (210) /* tRFC, 130ns(4Gb) 210ns(>4Gb) */
+#define LPDDR3_TRFC_4GBIT (130) /* ns */
+#define LPDDR3_TDQSCK_MIN (2) /* tDQSCKmin,2.5ns */
+#define LPDDR3_TDQSCK_MAX (5) /* tDQSCKmax,5.5ns */
+
+/* pd and sr */
+#define LPDDR3_TXP (7) /* tXP, max(3tCK,7.5ns) */
+#define LPDDR3_TXPDLL (0)
+#define LPDDR3_TCKE (7) /* tCKE, (max 7.5ns,3 tCK) */
+#define LPDDR3_TCKESR (15) /* tCKESR, max(3tCK,15ns) */
+#define LPDDR3_TCKSRE (2) /* tCKSRE=tCPDED, 2 tCK */
+#define LPDDR3_TCKSRX (2) /* tCKSRX, 2 tCK */
+
+/* mode register timing */
+#define LPDDR3_TMOD (0)
+#define LPDDR3_TMRD (14) /* tMRD, (=tMRW), max(14ns, 10 tCK) */
+#define LPDDR3_TMRR (4) /* tMRR, 4 tCK */
+#define LPDDR3_TMRRI LPDDR3_TRCD
+
+/* ODT */
+#define LPDDR3_TODTON (3) /* 3.5ns */
+
+/* ZQ */
+#define LPDDR3_TZQINIT (1000) /* 1us */
+#define LPDDR3_TZQCS (90) /* tZQCS, 90ns */
+#define LPDDR3_TZQCL (360) /* 360ns */
+#define LPDDR3_TZQRESET (50) /* ZQreset, max(3tCK,50ns) */
+/* write leveling */
+#define LPDDR3_TWLMRD (40) /* ns */
+#define LPDDR3_TWLO (20) /* ns */
+#define LPDDR3_TWLDQSEN (25) /* ns */
+/* CA training */
+#define LPDDR3_TCACKEL (10) /* tCK */
+#define LPDDR3_TCAENT (10) /* tCK */
+#define LPDDR3_TCAMRD (20) /* tCK */
+#define LPDDR3_TCACKEH (10) /* tCK */
+#define LPDDR3_TCAEXT (10) /* tCK */
+#define LPDDR3_TADR (20) /* ns */
+#define LPDDR3_TMRZ (3) /* ns */
+
+/* FSP */
+#define LPDDR3_TFC_LONG (250) /* ns */
+
+/*
+ * Description: depend on input parameter "timing_config",
+ * and calculate all lpddr3
+ * spec timing to "pdram_timing"
+ * parameters:
+ * input: timing_config
+ * output: pdram_timing
+ */
+static void lpddr3_get_parameter(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t nmhz = timing_config->freq;
+ uint32_t ddr_capability_per_die = get_max_die_capability(timing_config);
+ uint32_t tmp, trp_tmp, trppb_tmp, tras_tmp, twr_tmp, bl_tmp;
+
+ zeromem((void *)pdram_timing, sizeof(struct dram_timing_t));
+ pdram_timing->mhz = nmhz;
+ pdram_timing->al = 0;
+ pdram_timing->bl = timing_config->bl;
+
+ /*
+ * Only support Write Latency Set A here
+ * 1066 933 800 733 667 600 533 400 166
+ * RL, 16 14 12 11 10 9 8 6 3
+ * WL, 8 8 6 6 6 5 4 3 1
+ */
+ if (nmhz <= 400) {
+ pdram_timing->cl = 6;
+ pdram_timing->cwl = 3;
+ pdram_timing->mr[2] = LPDDR3_RL6_WL3;
+ } else if (nmhz <= 533) {
+ pdram_timing->cl = 8;
+ pdram_timing->cwl = 4;
+ pdram_timing->mr[2] = LPDDR3_RL8_WL4;
+ } else if (nmhz <= 600) {
+ pdram_timing->cl = 9;
+ pdram_timing->cwl = 5;
+ pdram_timing->mr[2] = LPDDR3_RL9_WL5;
+ } else if (nmhz <= 667) {
+ pdram_timing->cl = 10;
+ pdram_timing->cwl = 6;
+ pdram_timing->mr[2] = LPDDR3_RL10_WL6;
+ } else if (nmhz <= 733) {
+ pdram_timing->cl = 11;
+ pdram_timing->cwl = 6;
+ pdram_timing->mr[2] = LPDDR3_RL11_WL6;
+ } else if (nmhz <= 800) {
+ pdram_timing->cl = 12;
+ pdram_timing->cwl = 6;
+ pdram_timing->mr[2] = LPDDR3_RL12_WL6;
+ } else if (nmhz <= 933) {
+ pdram_timing->cl = 14;
+ pdram_timing->cwl = 8;
+ pdram_timing->mr[2] = LPDDR3_RL14_WL8;
+ } else {
+ pdram_timing->cl = 16;
+ pdram_timing->cwl = 8;
+ pdram_timing->mr[2] = LPDDR3_RL16_WL8;
+ }
+ switch (timing_config->dramds) {
+ case 80:
+ pdram_timing->mr[3] = LPDDR3_DS_80;
+ break;
+ case 60:
+ pdram_timing->mr[3] = LPDDR3_DS_60;
+ break;
+ case 48:
+ pdram_timing->mr[3] = LPDDR3_DS_48;
+ break;
+ case 40:
+ pdram_timing->mr[3] = LPDDR3_DS_40;
+ break;
+ case 3440:
+ pdram_timing->mr[3] = LPDDR3_DS_34D_40U;
+ break;
+ case 4048:
+ pdram_timing->mr[3] = LPDDR3_DS_40D_48U;
+ break;
+ case 3448:
+ pdram_timing->mr[3] = LPDDR3_DS_34D_48U;
+ break;
+ case 34:
+ default:
+ pdram_timing->mr[3] = LPDDR3_DS_34;
+ break;
+ }
+ pdram_timing->mr[0] = 0;
+ if (timing_config->odt)
+ switch (timing_config->dramodt) {
+ case 60:
+ pdram_timing->mr11 = LPDDR3_ODT_60;
+ break;
+ case 120:
+ pdram_timing->mr11 = LPDDR3_ODT_120;
+ break;
+ case 240:
+ default:
+ pdram_timing->mr11 = LPDDR3_ODT_240;
+ break;
+ }
+ else
+ pdram_timing->mr11 = LPDDR3_ODT_DIS;
+
+ pdram_timing->tinit1 = (LPDDR3_TINIT1 * nmhz + 999) / 1000;
+ pdram_timing->tinit2 = LPDDR3_TINIT2;
+ pdram_timing->tinit3 = (LPDDR3_TINIT3 * nmhz + 999) / 1000;
+ pdram_timing->tinit4 = (LPDDR3_TINIT4 * nmhz + 999) / 1000;
+ pdram_timing->tinit5 = (LPDDR3_TINIT5 * nmhz + 999) / 1000;
+ pdram_timing->trstl = LPDDR3_TRSTL;
+ pdram_timing->trsth = (LPDDR3_TRSTH * nmhz + 999) / 1000;
+ /* tREFI, average periodic refresh interval, 3.9us(4Gb-16Gb) */
+ pdram_timing->trefi = (LPDDR3_TREFI_3_9_US * nmhz + 999) / 1000;
+ /* base timing */
+ tmp = ((LPDDR3_TRCD * nmhz + 999) / 1000);
+ pdram_timing->trcd = max(3, tmp);
+ trppb_tmp = ((LPDDR3_TRP_PB * nmhz + 999) / 1000);
+ trppb_tmp = max(3, trppb_tmp);
+ pdram_timing->trppb = trppb_tmp;
+ trp_tmp = ((LPDDR3_TRP_AB * nmhz + 999) / 1000);
+ trp_tmp = max(3, trp_tmp);
+ pdram_timing->trp = trp_tmp;
+ twr_tmp = ((LPDDR3_TWR * nmhz + 999) / 1000);
+ twr_tmp = max(4, twr_tmp);
+ pdram_timing->twr = twr_tmp;
+ if (twr_tmp <= 6)
+ twr_tmp = 6;
+ else if (twr_tmp <= 8)
+ twr_tmp = 8;
+ else if (twr_tmp <= 12)
+ twr_tmp = twr_tmp;
+ else if (twr_tmp <= 14)
+ twr_tmp = 14;
+ else
+ twr_tmp = 16;
+ if (twr_tmp > 9)
+ pdram_timing->mr[2] |= (1 << 4); /*enable nWR > 9*/
+ twr_tmp = (twr_tmp > 9) ? (twr_tmp - 10) : (twr_tmp - 2);
+ bl_tmp = LPDDR3_BL8;
+ pdram_timing->mr[1] = bl_tmp | LPDDR3_N_WR(twr_tmp);
+ tmp = ((LPDDR3_TRTP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->trtp = max(4, tmp);
+ tras_tmp = ((LPDDR3_TRAS * nmhz + 999) / 1000);
+ tras_tmp = max(3, tras_tmp);
+ pdram_timing->tras_min = tras_tmp;
+ pdram_timing->trc = (tras_tmp + trp_tmp);
+ tmp = ((LPDDR3_TRRD * nmhz + 999) / 1000);
+ pdram_timing->trrd = max(2, tmp);
+ pdram_timing->tccd = LPDDR3_TCCD;
+ tmp = ((LPDDR3_TWTR * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->twtr = max(4, tmp);
+ pdram_timing->trtw = ((LPDDR3_TRTW * nmhz + 999) / 1000);
+ pdram_timing->tras_max = ((LPDDR3_TRAS_MAX * nmhz + 999) / 1000);
+ tmp = (LPDDR3_TFAW * nmhz + 999) / 1000;
+ pdram_timing->tfaw = max(8, tmp);
+ if (ddr_capability_per_die > 0x20000000) {
+ pdram_timing->trfc =
+ (LPDDR3_TRFC_8GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR3_TRFC_8GBIT + 10) * nmhz + 999) / 1000);
+ } else {
+ pdram_timing->trfc =
+ (LPDDR3_TRFC_4GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR3_TRFC_4GBIT + 10) * nmhz + 999) / 1000);
+ }
+ pdram_timing->txsr = max(2, tmp);
+ pdram_timing->txsnr = max(2, tmp);
+ /* tdqsck use rounded down */
+ pdram_timing->tdqsck =
+ ((LPDDR3_TDQSCK_MIN * nmhz + (nmhz >> 1))
+ / 1000);
+ pdram_timing->tdqsck_max =
+ ((LPDDR3_TDQSCK_MAX * nmhz + (nmhz >> 1) + 999)
+ / 1000);
+ /*pd and sr*/
+ tmp = ((LPDDR3_TXP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->txp = max(3, tmp);
+ pdram_timing->txpdll = LPDDR3_TXPDLL;
+ tmp = ((LPDDR3_TCKE * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->tcke = max(3, tmp);
+ tmp = ((LPDDR3_TCKESR * nmhz + 999) / 1000);
+ pdram_timing->tckesr = max(3, tmp);
+ pdram_timing->tcksre = LPDDR3_TCKSRE;
+ pdram_timing->tcksrx = LPDDR3_TCKSRX;
+ /*mode register timing*/
+ pdram_timing->tmod = LPDDR3_TMOD;
+ tmp = ((LPDDR3_TMRD * nmhz + 999) / 1000);
+ pdram_timing->tmrd = max(10, tmp);
+ pdram_timing->tmrr = LPDDR3_TMRR;
+ tmp = ((LPDDR3_TRCD * nmhz + 999) / 1000);
+ pdram_timing->tmrri = max(3, tmp);
+ /* ODT */
+ pdram_timing->todton = (LPDDR3_TODTON * nmhz + (nmhz >> 1) + 999)
+ / 1000;
+ /* ZQ */
+ pdram_timing->tzqinit = (LPDDR3_TZQINIT * nmhz + 999) / 1000;
+ pdram_timing->tzqcs =
+ ((LPDDR3_TZQCS * nmhz + 999) / 1000);
+ pdram_timing->tzqoper =
+ ((LPDDR3_TZQCL * nmhz + 999) / 1000);
+ tmp = ((LPDDR3_TZQRESET * nmhz + 999) / 1000);
+ pdram_timing->tzqreset = max(3, tmp);
+ /* write leveling */
+ pdram_timing->twlmrd = (LPDDR3_TWLMRD * nmhz + 999) / 1000;
+ pdram_timing->twlo = (LPDDR3_TWLO * nmhz + 999) / 1000;
+ pdram_timing->twldqsen = (LPDDR3_TWLDQSEN * nmhz + 999) / 1000;
+ /* CA training */
+ pdram_timing->tcackel = LPDDR3_TCACKEL;
+ pdram_timing->tcaent = LPDDR3_TCAENT;
+ pdram_timing->tcamrd = LPDDR3_TCAMRD;
+ pdram_timing->tcackeh = LPDDR3_TCACKEH;
+ pdram_timing->tcaext = LPDDR3_TCAEXT;
+ pdram_timing->tadr = (LPDDR3_TADR * nmhz + 999) / 1000;
+ pdram_timing->tmrz = (LPDDR3_TMRZ * nmhz + 999) / 1000;
+ pdram_timing->tcacd = pdram_timing->tadr + 2;
+
+ /* FSP */
+ pdram_timing->tfc_long = (LPDDR3_TFC_LONG * nmhz + 999) / 1000;
+}
+
+#define LPDDR4_TINIT1 (200000) /* 200us */
+#define LPDDR4_TINIT2 (10) /* 10ns */
+#define LPDDR4_TINIT3 (2000000) /* 2ms */
+#define LPDDR4_TINIT4 (5) /* tCK */
+#define LPDDR4_TINIT5 (2000) /* 2us */
+#define LPDDR4_TRSTL LPDDR4_TINIT1
+#define LPDDR4_TRSTH LPDDR4_TINIT3
+#define LPDDR4_TREFI_3_9_US (3900) /* 3.9us */
+
+/* base timging */
+#define LPDDR4_TRCD (18) /* tRCD, max(18ns,4tCK) */
+#define LPDDR4_TRP_PB (18) /* tRPpb, max(18ns, 4tCK) */
+#define LPDDR4_TRP_AB (21) /* tRPab, max(21ns, 4tCK) */
+#define LPDDR4_TRRD (10) /* tRRD, max(4tCK,10ns) */
+#define LPDDR4_TCCD_BL16 (8) /* tCK */
+#define LPDDR4_TCCD_BL32 (16) /* tCK */
+#define LPDDR4_TWTR (10) /* tWTR, max(8tCK, 10ns) */
+#define LPDDR4_TRTW (0) /* tCK register min valid value */
+#define LPDDR4_TRAS_MAX (70000) /* 70us */
+#define LPDDR4_TRAS (42) /* tRAS, max(3tCK,42ns) */
+#define LPDDR4_TFAW (40) /* tFAW,min 40ns) */
+#define LPDDR4_TRFC_12GBIT (280) /* tRFC, 280ns(>=12Gb) */
+#define LPDDR4_TRFC_6GBIT (180) /* 6Gb/8Gb 180ns */
+#define LPDDR4_TRFC_4GBIT (130) /* 4Gb 130ns */
+#define LPDDR4_TDQSCK_MIN (1) /* tDQSCKmin,1.5ns */
+#define LPDDR4_TDQSCK_MAX (3) /* tDQSCKmax,3.5ns */
+#define LPDDR4_TPPD (4) /* tCK */
+
+/* pd and sr */
+#define LPDDR4_TXP (7) /* tXP, max(5tCK,7.5ns) */
+#define LPDDR4_TCKE (7) /* tCKE, max(7.5ns,4 tCK) */
+#define LPDDR4_TESCKE (1) /* tESCKE, max(1.75ns, 3tCK) */
+#define LPDDR4_TSR (15) /* tSR, max(15ns, 3tCK) */
+#define LPDDR4_TCMDCKE (1) /* max(1.75ns, 3tCK) */
+#define LPDDR4_TCSCKE (1) /* 1.75ns */
+#define LPDDR4_TCKELCS (5) /* max(5ns, 5tCK) */
+#define LPDDR4_TCSCKEH (1) /* 1.75ns */
+#define LPDDR4_TCKEHCS (7) /* max(7.5ns, 5tCK) */
+#define LPDDR4_TMRWCKEL (14) /* max(14ns, 10tCK) */
+#define LPDDR4_TCKELCMD (7) /* max(7.5ns, 3tCK) */
+#define LPDDR4_TCKEHCMD (7) /* max(7.5ns, 3tCK) */
+#define LPDDR4_TCKELPD (7) /* max(7.5ns, 3tCK) */
+#define LPDDR4_TCKCKEL (7) /* max(7.5ns, 3tCK) */
+
+/* mode register timing */
+#define LPDDR4_TMRD (14) /* tMRD, (=tMRW), max(14ns, 10 tCK) */
+#define LPDDR4_TMRR (8) /* tMRR, 8 tCK */
+
+/* ODT */
+#define LPDDR4_TODTON (3) /* 3.5ns */
+
+/* ZQ */
+#define LPDDR4_TZQCAL (1000) /* 1us */
+#define LPDDR4_TZQLAT (30) /* tZQLAT, max(30ns,8tCK) */
+#define LPDDR4_TZQRESET (50) /* ZQreset, max(3tCK,50ns) */
+#define LPDDR4_TZQCKE (1) /* tZQCKE, max(1.75ns, 3tCK) */
+
+/* write leveling */
+#define LPDDR4_TWLMRD (40) /* tCK */
+#define LPDDR4_TWLO (20) /* ns */
+#define LPDDR4_TWLDQSEN (20) /* tCK */
+
+/* CA training */
+#define LPDDR4_TCAENT (250) /* ns */
+#define LPDDR4_TADR (20) /* ns */
+#define LPDDR4_TMRZ (1) /* 1.5ns */
+#define LPDDR4_TVREF_LONG (250) /* ns */
+#define LPDDR4_TVREF_SHORT (100) /* ns */
+
+/* VRCG */
+#define LPDDR4_TVRCG_ENABLE (200) /* ns */
+#define LPDDR4_TVRCG_DISABLE (100) /* ns */
+
+/* FSP */
+#define LPDDR4_TFC_LONG (250) /* ns */
+#define LPDDR4_TCKFSPE (7) /* max(7.5ns, 4tCK) */
+#define LPDDR4_TCKFSPX (7) /* max(7.5ns, 4tCK) */
+
+/*
+ * Description: depend on input parameter "timing_config",
+ * and calculate all lpddr4
+ * spec timing to "pdram_timing"
+ * parameters:
+ * input: timing_config
+ * output: pdram_timing
+ */
+static void lpddr4_get_parameter(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ uint32_t nmhz = timing_config->freq;
+ uint32_t ddr_capability_per_die = get_max_die_capability(timing_config);
+ uint32_t tmp, trp_tmp, trppb_tmp, tras_tmp;
+
+ zeromem((void *)pdram_timing, sizeof(struct dram_timing_t));
+ pdram_timing->mhz = nmhz;
+ pdram_timing->al = 0;
+ pdram_timing->bl = timing_config->bl;
+
+ /*
+ * Only support Write Latency Set A here
+ * 2133 1866 1600 1333 1066 800 533 266
+ * RL, 36 32 28 24 20 14 10 6
+ * WL, 18 16 14 12 10 8 6 4
+ * nWR, 40 34 30 24 20 16 10 6
+ * nRTP,16 14 12 10 8 8 8 8
+ */
+ tmp = (timing_config->bl == 32) ? 1 : 0;
+
+ /*
+ * we always use WR preamble = 2tCK
+ * RD preamble = Static
+ */
+ tmp |= (1 << 2);
+ if (nmhz <= 266) {
+ pdram_timing->cl = 6;
+ pdram_timing->cwl = 4;
+ pdram_timing->twr = 6;
+ pdram_timing->trtp = 8;
+ pdram_timing->mr[2] = LPDDR4_RL6_NRTP8 | LPDDR4_A_WL4;
+ } else if (nmhz <= 533) {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 12;
+ pdram_timing->mr[2] = LPDDR4_RL12_NRTP8 | LPDDR4_A_WL6;
+ } else {
+ pdram_timing->cl = 10;
+ pdram_timing->mr[2] = LPDDR4_RL10_NRTP8 | LPDDR4_A_WL6;
+ }
+ pdram_timing->cwl = 6;
+ pdram_timing->twr = 10;
+ pdram_timing->trtp = 8;
+ tmp |= (1 << 4);
+ } else if (nmhz <= 800) {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 16;
+ pdram_timing->mr[2] = LPDDR4_RL16_NRTP8 | LPDDR4_A_WL8;
+ } else {
+ pdram_timing->cl = 14;
+ pdram_timing->mr[2] = LPDDR4_RL14_NRTP8 | LPDDR4_A_WL8;
+ }
+ pdram_timing->cwl = 8;
+ pdram_timing->twr = 16;
+ pdram_timing->trtp = 8;
+ tmp |= (2 << 4);
+ } else if (nmhz <= 1066) {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 22;
+ pdram_timing->mr[2] = LPDDR4_RL22_NRTP8 | LPDDR4_A_WL10;
+ } else {
+ pdram_timing->cl = 20;
+ pdram_timing->mr[2] = LPDDR4_RL20_NRTP8 | LPDDR4_A_WL10;
+ }
+ pdram_timing->cwl = 10;
+ pdram_timing->twr = 20;
+ pdram_timing->trtp = 8;
+ tmp |= (3 << 4);
+ } else if (nmhz <= 1333) {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 28;
+ pdram_timing->mr[2] = LPDDR4_RL28_NRTP10 |
+ LPDDR4_A_WL12;
+ } else {
+ pdram_timing->cl = 24;
+ pdram_timing->mr[2] = LPDDR4_RL24_NRTP10 |
+ LPDDR4_A_WL12;
+ }
+ pdram_timing->cwl = 12;
+ pdram_timing->twr = 24;
+ pdram_timing->trtp = 10;
+ tmp |= (4 << 4);
+ } else if (nmhz <= 1600) {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 32;
+ pdram_timing->mr[2] = LPDDR4_RL32_NRTP12 |
+ LPDDR4_A_WL14;
+ } else {
+ pdram_timing->cl = 28;
+ pdram_timing->mr[2] = LPDDR4_RL28_NRTP12 |
+ LPDDR4_A_WL14;
+ }
+ pdram_timing->cwl = 14;
+ pdram_timing->twr = 30;
+ pdram_timing->trtp = 12;
+ tmp |= (5 << 4);
+ } else if (nmhz <= 1866) {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 36;
+ pdram_timing->mr[2] = LPDDR4_RL36_NRTP14 |
+ LPDDR4_A_WL16;
+ } else {
+ pdram_timing->cl = 32;
+ pdram_timing->mr[2] = LPDDR4_RL32_NRTP14 |
+ LPDDR4_A_WL16;
+ }
+ pdram_timing->cwl = 16;
+ pdram_timing->twr = 34;
+ pdram_timing->trtp = 14;
+ tmp |= (6 << 4);
+ } else {
+ if (timing_config->rdbi) {
+ pdram_timing->cl = 40;
+ pdram_timing->mr[2] = LPDDR4_RL40_NRTP16 |
+ LPDDR4_A_WL18;
+ } else {
+ pdram_timing->cl = 36;
+ pdram_timing->mr[2] = LPDDR4_RL36_NRTP16 |
+ LPDDR4_A_WL18;
+ }
+ pdram_timing->cwl = 18;
+ pdram_timing->twr = 40;
+ pdram_timing->trtp = 16;
+ tmp |= (7 << 4);
+ }
+ pdram_timing->mr[1] = tmp;
+ tmp = (timing_config->rdbi ? LPDDR4_DBI_RD_EN : 0) |
+ (timing_config->wdbi ? LPDDR4_DBI_WR_EN : 0);
+ switch (timing_config->dramds) {
+ case 240:
+ pdram_timing->mr[3] = LPDDR4_PDDS_240 | tmp;
+ break;
+ case 120:
+ pdram_timing->mr[3] = LPDDR4_PDDS_120 | tmp;
+ break;
+ case 80:
+ pdram_timing->mr[3] = LPDDR4_PDDS_80 | tmp;
+ break;
+ case 60:
+ pdram_timing->mr[3] = LPDDR4_PDDS_60 | tmp;
+ break;
+ case 48:
+ pdram_timing->mr[3] = LPDDR4_PDDS_48 | tmp;
+ break;
+ case 40:
+ default:
+ pdram_timing->mr[3] = LPDDR4_PDDS_40 | tmp;
+ break;
+ }
+ pdram_timing->mr[0] = 0;
+ if (timing_config->odt) {
+ switch (timing_config->dramodt) {
+ case 240:
+ tmp = LPDDR4_DQODT_240;
+ break;
+ case 120:
+ tmp = LPDDR4_DQODT_120;
+ break;
+ case 80:
+ tmp = LPDDR4_DQODT_80;
+ break;
+ case 60:
+ tmp = LPDDR4_DQODT_60;
+ break;
+ case 48:
+ tmp = LPDDR4_DQODT_48;
+ break;
+ case 40:
+ default:
+ tmp = LPDDR4_DQODT_40;
+ break;
+ }
+
+ switch (timing_config->caodt) {
+ case 240:
+ pdram_timing->mr11 = LPDDR4_CAODT_240 | tmp;
+ break;
+ case 120:
+ pdram_timing->mr11 = LPDDR4_CAODT_120 | tmp;
+ break;
+ case 80:
+ pdram_timing->mr11 = LPDDR4_CAODT_80 | tmp;
+ break;
+ case 60:
+ pdram_timing->mr11 = LPDDR4_CAODT_60 | tmp;
+ break;
+ case 48:
+ pdram_timing->mr11 = LPDDR4_CAODT_48 | tmp;
+ break;
+ case 40:
+ default:
+ pdram_timing->mr11 = LPDDR4_CAODT_40 | tmp;
+ break;
+ }
+ } else {
+ pdram_timing->mr11 = LPDDR4_CAODT_DIS | tmp;
+ }
+
+ pdram_timing->tinit1 = (LPDDR4_TINIT1 * nmhz + 999) / 1000;
+ pdram_timing->tinit2 = (LPDDR4_TINIT2 * nmhz + 999) / 1000;
+ pdram_timing->tinit3 = (LPDDR4_TINIT3 * nmhz + 999) / 1000;
+ pdram_timing->tinit4 = (LPDDR4_TINIT4 * nmhz + 999) / 1000;
+ pdram_timing->tinit5 = (LPDDR4_TINIT5 * nmhz + 999) / 1000;
+ pdram_timing->trstl = (LPDDR4_TRSTL * nmhz + 999) / 1000;
+ pdram_timing->trsth = (LPDDR4_TRSTH * nmhz + 999) / 1000;
+ /* tREFI, average periodic refresh interval, 3.9us(4Gb-16Gb) */
+ pdram_timing->trefi = (LPDDR4_TREFI_3_9_US * nmhz + 999) / 1000;
+ /* base timing */
+ tmp = ((LPDDR4_TRCD * nmhz + 999) / 1000);
+ pdram_timing->trcd = max(4, tmp);
+ trppb_tmp = ((LPDDR4_TRP_PB * nmhz + 999) / 1000);
+ trppb_tmp = max(4, trppb_tmp);
+ pdram_timing->trppb = trppb_tmp;
+ trp_tmp = ((LPDDR4_TRP_AB * nmhz + 999) / 1000);
+ trp_tmp = max(4, trp_tmp);
+ pdram_timing->trp = trp_tmp;
+ tras_tmp = ((LPDDR4_TRAS * nmhz + 999) / 1000);
+ tras_tmp = max(3, tras_tmp);
+ pdram_timing->tras_min = tras_tmp;
+ pdram_timing->trc = (tras_tmp + trp_tmp);
+ tmp = ((LPDDR4_TRRD * nmhz + 999) / 1000);
+ pdram_timing->trrd = max(4, tmp);
+ if (timing_config->bl == 32)
+ pdram_timing->tccd = LPDDR4_TCCD_BL16;
+ else
+ pdram_timing->tccd = LPDDR4_TCCD_BL32;
+ pdram_timing->tccdmw = 4 * pdram_timing->tccd;
+ tmp = ((LPDDR4_TWTR * nmhz + 999) / 1000);
+ pdram_timing->twtr = max(8, tmp);
+ pdram_timing->trtw = ((LPDDR4_TRTW * nmhz + 999) / 1000);
+ pdram_timing->tras_max = ((LPDDR4_TRAS_MAX * nmhz + 999) / 1000);
+ pdram_timing->tfaw = (LPDDR4_TFAW * nmhz + 999) / 1000;
+ if (ddr_capability_per_die > 0x60000000) {
+ /* >= 12Gb */
+ pdram_timing->trfc =
+ (LPDDR4_TRFC_12GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR4_TRFC_12GBIT + 7) * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ } else if (ddr_capability_per_die > 0x30000000) {
+ pdram_timing->trfc =
+ (LPDDR4_TRFC_6GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR4_TRFC_6GBIT + 7) * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ } else {
+ pdram_timing->trfc =
+ (LPDDR4_TRFC_4GBIT * nmhz + 999) / 1000;
+ tmp = (((LPDDR4_TRFC_4GBIT + 7) * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ }
+ pdram_timing->txsr = max(2, tmp);
+ pdram_timing->txsnr = max(2, tmp);
+ /* tdqsck use rounded down */
+ pdram_timing->tdqsck = ((LPDDR4_TDQSCK_MIN * nmhz +
+ (nmhz >> 1)) / 1000);
+ pdram_timing->tdqsck_max = ((LPDDR4_TDQSCK_MAX * nmhz +
+ (nmhz >> 1) + 999) / 1000);
+ pdram_timing->tppd = LPDDR4_TPPD;
+ /* pd and sr */
+ tmp = ((LPDDR4_TXP * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->txp = max(5, tmp);
+ tmp = ((LPDDR4_TCKE * nmhz + (nmhz >> 1) + 999) / 1000);
+ pdram_timing->tcke = max(4, tmp);
+ tmp = ((LPDDR4_TESCKE * nmhz +
+ ((nmhz * 3) / 4) +
+ 999) / 1000);
+ pdram_timing->tescke = max(3, tmp);
+ tmp = ((LPDDR4_TSR * nmhz + 999) / 1000);
+ pdram_timing->tsr = max(3, tmp);
+ tmp = ((LPDDR4_TCMDCKE * nmhz +
+ ((nmhz * 3) / 4) +
+ 999) / 1000);
+ pdram_timing->tcmdcke = max(3, tmp);
+ pdram_timing->tcscke = ((LPDDR4_TCSCKE * nmhz +
+ ((nmhz * 3) / 4) +
+ 999) / 1000);
+ tmp = ((LPDDR4_TCKELCS * nmhz + 999) / 1000);
+ pdram_timing->tckelcs = max(5, tmp);
+ pdram_timing->tcsckeh = ((LPDDR4_TCSCKEH * nmhz +
+ ((nmhz * 3) / 4) +
+ 999) / 1000);
+ tmp = ((LPDDR4_TCKEHCS * nmhz +
+ (nmhz >> 1) + 999) / 1000);
+ pdram_timing->tckehcs = max(5, tmp);
+ tmp = ((LPDDR4_TMRWCKEL * nmhz + 999) / 1000);
+ pdram_timing->tmrwckel = max(10, tmp);
+ tmp = ((LPDDR4_TCKELCMD * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ pdram_timing->tckelcmd = max(3, tmp);
+ tmp = ((LPDDR4_TCKEHCMD * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ pdram_timing->tckehcmd = max(3, tmp);
+ tmp = ((LPDDR4_TCKELPD * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ pdram_timing->tckelpd = max(3, tmp);
+ tmp = ((LPDDR4_TCKCKEL * nmhz + (nmhz >> 1) +
+ 999) / 1000);
+ pdram_timing->tckckel = max(3, tmp);
+ /* mode register timing */
+ tmp = ((LPDDR4_TMRD * nmhz + 999) / 1000);
+ pdram_timing->tmrd = max(10, tmp);
+ pdram_timing->tmrr = LPDDR4_TMRR;
+ pdram_timing->tmrri = pdram_timing->trcd + 3;
+ /* ODT */
+ pdram_timing->todton = (LPDDR4_TODTON * nmhz + (nmhz >> 1) + 999)
+ / 1000;
+ /* ZQ */
+ pdram_timing->tzqcal = (LPDDR4_TZQCAL * nmhz + 999) / 1000;
+ tmp = ((LPDDR4_TZQLAT * nmhz + 999) / 1000);
+ pdram_timing->tzqlat = max(8, tmp);
+ tmp = ((LPDDR4_TZQRESET * nmhz + 999) / 1000);
+ pdram_timing->tzqreset = max(3, tmp);
+ tmp = ((LPDDR4_TZQCKE * nmhz +
+ ((nmhz * 3) / 4) +
+ 999) / 1000);
+ pdram_timing->tzqcke = max(3, tmp);
+ /* write leveling */
+ pdram_timing->twlmrd = LPDDR4_TWLMRD;
+ pdram_timing->twlo = (LPDDR4_TWLO * nmhz + 999) / 1000;
+ pdram_timing->twldqsen = LPDDR4_TWLDQSEN;
+ /* CA training */
+ pdram_timing->tcaent = (LPDDR4_TCAENT * nmhz + 999) / 1000;
+ pdram_timing->tadr = (LPDDR4_TADR * nmhz + 999) / 1000;
+ pdram_timing->tmrz = (LPDDR4_TMRZ * nmhz + (nmhz >> 1) + 999) / 1000;
+ pdram_timing->tvref_long = (LPDDR4_TVREF_LONG * nmhz + 999) / 1000;
+ pdram_timing->tvref_short = (LPDDR4_TVREF_SHORT * nmhz + 999) / 1000;
+ /* VRCG */
+ pdram_timing->tvrcg_enable = (LPDDR4_TVRCG_ENABLE * nmhz +
+ 999) / 1000;
+ pdram_timing->tvrcg_disable = (LPDDR4_TVRCG_DISABLE * nmhz +
+ 999) / 1000;
+ /* FSP */
+ pdram_timing->tfc_long = (LPDDR4_TFC_LONG * nmhz + 999) / 1000;
+ tmp = (LPDDR4_TCKFSPE * nmhz + (nmhz >> 1) + 999) / 1000;
+ pdram_timing->tckfspe = max(4, tmp);
+ tmp = (LPDDR4_TCKFSPX * nmhz + (nmhz >> 1) + 999) / 1000;
+ pdram_timing->tckfspx = max(4, tmp);
+}
+
+/*
+ * Description: depend on input parameter "timing_config",
+ * and calculate correspond "dram_type"
+ * spec timing to "pdram_timing"
+ * parameters:
+ * input: timing_config
+ * output: pdram_timing
+ * NOTE: MR ODT is set, need to disable by controller
+ */
+void dram_get_parameter(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing)
+{
+ switch (timing_config->dram_type) {
+ case DDR3:
+ ddr3_get_parameter(timing_config, pdram_timing);
+ break;
+ case LPDDR2:
+ lpddr2_get_parameter(timing_config, pdram_timing);
+ break;
+ case LPDDR3:
+ lpddr3_get_parameter(timing_config, pdram_timing);
+ break;
+ case LPDDR4:
+ lpddr4_get_parameter(timing_config, pdram_timing);
+ break;
+ }
+}
diff --git a/plat/rockchip/rk3399/drivers/dram/dram_spec_timing.h b/plat/rockchip/rk3399/drivers/dram/dram_spec_timing.h
new file mode 100644
index 00000000..30d3aeab
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/dram_spec_timing.h
@@ -0,0 +1,506 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef _DRAM_SPEC_TIMING_HEAD_
+#define _DRAM_SPEC_TIMING_HEAD_
+#include <stdint.h>
+
+enum ddr3_speed_rate {
+ /* 5-5-5 */
+ DDR3_800D = 0,
+ /* 6-6-6 */
+ DDR3_800E = 1,
+ /* 6-6-6 */
+ DDR3_1066E = 2,
+ /* 7-7-7 */
+ DDR3_1066F = 3,
+ /* 8-8-8 */
+ DDR3_1066G = 4,
+ /* 7-7-7 */
+ DDR3_1333F = 5,
+ /* 8-8-8 */
+ DDR3_1333G = 6,
+ /* 9-9-9 */
+ DDR3_1333H = 7,
+ /* 10-10-10 */
+ DDR3_1333J = 8,
+ /* 8-8-8 */
+ DDR3_1600G = 9,
+ /* 9-9-9 */
+ DDR3_1600H = 10,
+ /* 10-10-10 */
+ DDR3_1600J = 11,
+ /* 11-11-11 */
+ DDR3_1600K = 12,
+ /* 10-10-10 */
+ DDR3_1866J = 13,
+ /* 11-11-11 */
+ DDR3_1866K = 14,
+ /* 12-12-12 */
+ DDR3_1866L = 15,
+ /* 13-13-13 */
+ DDR3_1866M = 16,
+ /* 11-11-11 */
+ DDR3_2133K = 17,
+ /* 12-12-12 */
+ DDR3_2133L = 18,
+ /* 13-13-13 */
+ DDR3_2133M = 19,
+ /* 14-14-14 */
+ DDR3_2133N = 20,
+ DDR3_DEFAULT = 21,
+};
+
+#define max(a, b) (((a) > (b)) ? (a) : (b))
+#define range(mi, val, ma) (((ma) > (val)) ? (max(mi, val)) : (ma))
+
+struct dram_timing_t {
+ /* unit MHz */
+ uint32_t mhz;
+ /* some timing unit is us */
+ uint32_t tinit1;
+ uint32_t tinit2;
+ uint32_t tinit3;
+ uint32_t tinit4;
+ uint32_t tinit5;
+ /* reset low, DDR3:200us */
+ uint32_t trstl;
+ /* reset high to CKE high, DDR3:500us */
+ uint32_t trsth;
+ uint32_t trefi;
+ /* base */
+ uint32_t trcd;
+ /* trp per bank */
+ uint32_t trppb;
+ /* trp all bank */
+ uint32_t trp;
+ uint32_t twr;
+ uint32_t tdal;
+ uint32_t trtp;
+ uint32_t trc;
+ uint32_t trrd;
+ uint32_t tccd;
+ uint32_t twtr;
+ uint32_t trtw;
+ uint32_t tras_max;
+ uint32_t tras_min;
+ uint32_t tfaw;
+ uint32_t trfc;
+ uint32_t tdqsck;
+ uint32_t tdqsck_max;
+ /* pd or sr */
+ uint32_t txsr;
+ uint32_t txsnr;
+ uint32_t txp;
+ uint32_t txpdll;
+ uint32_t tdllk;
+ uint32_t tcke;
+ uint32_t tckesr;
+ uint32_t tcksre;
+ uint32_t tcksrx;
+ uint32_t tdpd;
+ /* mode regiter timing */
+ uint32_t tmod;
+ uint32_t tmrd;
+ uint32_t tmrr;
+ uint32_t tmrri;
+ /* ODT */
+ uint32_t todton;
+ /* ZQ */
+ uint32_t tzqinit;
+ uint32_t tzqcs;
+ uint32_t tzqoper;
+ uint32_t tzqreset;
+ /* Write Leveling */
+ uint32_t twlmrd;
+ uint32_t twlo;
+ uint32_t twldqsen;
+ /* CA Training */
+ uint32_t tcackel;
+ uint32_t tcaent;
+ uint32_t tcamrd;
+ uint32_t tcackeh;
+ uint32_t tcaext;
+ uint32_t tadr;
+ uint32_t tmrz;
+ uint32_t tcacd;
+ /* mode register */
+ uint32_t mr[4];
+ uint32_t mr11;
+ /* lpddr4 spec */
+ uint32_t mr12;
+ uint32_t mr13;
+ uint32_t mr14;
+ uint32_t mr16;
+ uint32_t mr17;
+ uint32_t mr20;
+ uint32_t mr22;
+ uint32_t tccdmw;
+ uint32_t tppd;
+ uint32_t tescke;
+ uint32_t tsr;
+ uint32_t tcmdcke;
+ uint32_t tcscke;
+ uint32_t tckelcs;
+ uint32_t tcsckeh;
+ uint32_t tckehcs;
+ uint32_t tmrwckel;
+ uint32_t tzqcal;
+ uint32_t tzqlat;
+ uint32_t tzqcke;
+ uint32_t tvref_long;
+ uint32_t tvref_short;
+ uint32_t tvrcg_enable;
+ uint32_t tvrcg_disable;
+ uint32_t tfc_long;
+ uint32_t tckfspe;
+ uint32_t tckfspx;
+ uint32_t tckehcmd;
+ uint32_t tckelcmd;
+ uint32_t tckelpd;
+ uint32_t tckckel;
+ /* other */
+ uint32_t al;
+ uint32_t cl;
+ uint32_t cwl;
+ uint32_t bl;
+};
+
+struct dram_info_t {
+ /* speed_rate only used when DDR3 */
+ enum ddr3_speed_rate speed_rate;
+ /* 1: use CS0, 2: use CS0 and CS1 */
+ uint32_t cs_cnt;
+ /* give the max per-die capability on each rank/cs */
+ uint32_t per_die_capability[2];
+};
+
+struct timing_related_config {
+ struct dram_info_t dram_info[2];
+ uint32_t dram_type;
+ /* MHz */
+ uint32_t freq;
+ uint32_t ch_cnt;
+ uint32_t bl;
+ /* 1:auto precharge, 0:never auto precharge */
+ uint32_t ap;
+ /*
+ * 1:dll bypass, 0:dll normal
+ * dram and controller dll bypass at the same time
+ */
+ uint32_t dllbp;
+ /* 1:odt enable, 0:odt disable */
+ uint32_t odt;
+ /* 1:enable, 0:disabe */
+ uint32_t rdbi;
+ uint32_t wdbi;
+ /* dram driver strength */
+ uint32_t dramds;
+ /* dram ODT, if odt=0, this parameter invalid */
+ uint32_t dramodt;
+ /*
+ * ca ODT, if odt=0, this parameter invalid
+ * it only used by LPDDR4
+ */
+ uint32_t caodt;
+};
+
+/* mr0 for ddr3 */
+#define DDR3_BL8 (0)
+#define DDR3_BC4_8 (1)
+#define DDR3_BC4 (2)
+#define DDR3_CL(n) (((((n) - 4) & 0x7) << 4)\
+ | ((((n) - 4) & 0x8) >> 1))
+#define DDR3_WR(n) (((n) & 0x7) << 9)
+#define DDR3_DLL_RESET (1 << 8)
+#define DDR3_DLL_DERESET (0 << 8)
+
+/* mr1 for ddr3 */
+#define DDR3_DLL_ENABLE (0)
+#define DDR3_DLL_DISABLE (1)
+#define DDR3_MR1_AL(n) (((n) & 0x3) << 3)
+
+#define DDR3_DS_40 (0)
+#define DDR3_DS_34 (1 << 1)
+#define DDR3_RTT_NOM_DIS (0)
+#define DDR3_RTT_NOM_60 (1 << 2)
+#define DDR3_RTT_NOM_120 (1 << 6)
+#define DDR3_RTT_NOM_40 ((1 << 2) | (1 << 6))
+#define DDR3_TDQS (1 << 11)
+
+/* mr2 for ddr3 */
+#define DDR3_MR2_CWL(n) ((((n) - 5) & 0x7) << 3)
+#define DDR3_RTT_WR_DIS (0)
+#define DDR3_RTT_WR_60 (1 << 9)
+#define DDR3_RTT_WR_120 (2 << 9)
+
+/*
+ * MR0 (Device Information)
+ * 0:DAI complete, 1:DAI still in progress
+ */
+#define LPDDR2_DAI (0x1)
+/* 0:S2 or S4 SDRAM, 1:NVM */
+#define LPDDR2_DI (0x1 << 1)
+/* 0:DNV not supported, 1:DNV supported */
+#define LPDDR2_DNVI (0x1 << 2)
+#define LPDDR2_RZQI (0x3 << 3)
+
+/*
+ * 00:RZQ self test not supported,
+ * 01:ZQ-pin may connect to VDDCA or float
+ * 10:ZQ-pin may short to GND.
+ * 11:ZQ-pin self test completed, no error condition detected.
+ */
+
+/* MR1 (Device Feature) */
+#define LPDDR2_BL4 (0x2)
+#define LPDDR2_BL8 (0x3)
+#define LPDDR2_BL16 (0x4)
+#define LPDDR2_N_WR(n) (((n) - 2) << 5)
+
+/* MR2 (Device Feature 2) */
+#define LPDDR2_RL3_WL1 (0x1)
+#define LPDDR2_RL4_WL2 (0x2)
+#define LPDDR2_RL5_WL2 (0x3)
+#define LPDDR2_RL6_WL3 (0x4)
+#define LPDDR2_RL7_WL4 (0x5)
+#define LPDDR2_RL8_WL4 (0x6)
+
+/* MR3 (IO Configuration 1) */
+#define LPDDR2_DS_34 (0x1)
+#define LPDDR2_DS_40 (0x2)
+#define LPDDR2_DS_48 (0x3)
+#define LPDDR2_DS_60 (0x4)
+#define LPDDR2_DS_80 (0x6)
+/* optional */
+#define LPDDR2_DS_120 (0x7)
+
+/* MR4 (Device Temperature) */
+#define LPDDR2_TREF_MASK (0x7)
+#define LPDDR2_4_TREF (0x1)
+#define LPDDR2_2_TREF (0x2)
+#define LPDDR2_1_TREF (0x3)
+#define LPDDR2_025_TREF (0x5)
+#define LPDDR2_025_TREF_DERATE (0x6)
+
+#define LPDDR2_TUF (0x1 << 7)
+
+/* MR8 (Basic configuration 4) */
+#define LPDDR2_S4 (0x0)
+#define LPDDR2_S2 (0x1)
+#define LPDDR2_N (0x2)
+/* Unit:MB */
+#define LPDDR2_DENSITY(mr8) (8 << (((mr8) >> 2) & 0xf))
+#define LPDDR2_IO_WIDTH(mr8) (32 >> (((mr8) >> 6) & 0x3))
+
+/* MR10 (Calibration) */
+#define LPDDR2_ZQINIT (0xff)
+#define LPDDR2_ZQCL (0xab)
+#define LPDDR2_ZQCS (0x56)
+#define LPDDR2_ZQRESET (0xc3)
+
+/* MR16 (PASR Bank Mask), S2 SDRAM Only */
+#define LPDDR2_PASR_FULL (0x0)
+#define LPDDR2_PASR_1_2 (0x1)
+#define LPDDR2_PASR_1_4 (0x2)
+#define LPDDR2_PASR_1_8 (0x3)
+
+/*
+ * MR0 (Device Information)
+ * 0:DAI complete,
+ * 1:DAI still in progress
+ */
+#define LPDDR3_DAI (0x1)
+/*
+ * 00:RZQ self test not supported,
+ * 01:ZQ-pin may connect to VDDCA or float
+ * 10:ZQ-pin may short to GND.
+ * 11:ZQ-pin self test completed, no error condition detected.
+ */
+#define LPDDR3_RZQI (0x3 << 3)
+/*
+ * 0:DRAM does not support WL(Set B),
+ * 1:DRAM support WL(Set B)
+ */
+#define LPDDR3_WL_SUPOT (1 << 6)
+/*
+ * 0:DRAM does not support RL=3,nWR=3,WL=1;
+ * 1:DRAM supports RL=3,nWR=3,WL=1 for frequencies <=166
+ */
+#define LPDDR3_RL3_SUPOT (1 << 7)
+
+/* MR1 (Device Feature) */
+#define LPDDR3_BL8 (0x3)
+#define LPDDR3_N_WR(n) ((n) << 5)
+
+/* MR2 (Device Feature 2), WL Set A,default */
+/* <=166MHz,optional*/
+#define LPDDR3_RL3_WL1 (0x1)
+/* <=400MHz*/
+#define LPDDR3_RL6_WL3 (0x4)
+/* <=533MHz*/
+#define LPDDR3_RL8_WL4 (0x6)
+/* <=600MHz*/
+#define LPDDR3_RL9_WL5 (0x7)
+/* <=667MHz,default*/
+#define LPDDR3_RL10_WL6 (0x8)
+/* <=733MHz*/
+#define LPDDR3_RL11_WL6 (0x9)
+/* <=800MHz*/
+#define LPDDR3_RL12_WL6 (0xa)
+/* <=933MHz*/
+#define LPDDR3_RL14_WL8 (0xc)
+/* <=1066MHz*/
+#define LPDDR3_RL16_WL8 (0xe)
+
+/* WL Set B, optional */
+/* <=667MHz,default*/
+#define LPDDR3_RL10_WL8 (0x8)
+/* <=733MHz*/
+#define LPDDR3_RL11_WL9 (0x9)
+/* <=800MHz*/
+#define LPDDR3_RL12_WL9 (0xa)
+/* <=933MHz*/
+#define LPDDR3_RL14_WL11 (0xc)
+/* <=1066MHz*/
+#define LPDDR3_RL16_WL13 (0xe)
+
+/* 1:enable nWR programming > 9(default)*/
+#define LPDDR3_N_WRE (1 << 4)
+/* 1:Select WL Set B*/
+#define LPDDR3_WL_S (1 << 6)
+/* 1:enable*/
+#define LPDDR3_WR_LEVEL (1 << 7)
+
+/* MR3 (IO Configuration 1) */
+#define LPDDR3_DS_34 (0x1)
+#define LPDDR3_DS_40 (0x2)
+#define LPDDR3_DS_48 (0x3)
+#define LPDDR3_DS_60 (0x4)
+#define LPDDR3_DS_80 (0x6)
+#define LPDDR3_DS_34D_40U (0x9)
+#define LPDDR3_DS_40D_48U (0xa)
+#define LPDDR3_DS_34D_48U (0xb)
+
+/* MR4 (Device Temperature) */
+#define LPDDR3_TREF_MASK (0x7)
+/* SDRAM Low temperature operating limit exceeded */
+#define LPDDR3_LT_EXED (0x0)
+#define LPDDR3_4_TREF (0x1)
+#define LPDDR3_2_TREF (0x2)
+#define LPDDR3_1_TREF (0x3)
+#define LPDDR3_05_TREF (0x4)
+#define LPDDR3_025_TREF (0x5)
+#define LPDDR3_025_TREF_DERATE (0x6)
+/* SDRAM High temperature operating limit exceeded */
+#define LPDDR3_HT_EXED (0x7)
+
+/* 1:value has changed since last read of MR4 */
+#define LPDDR3_TUF (0x1 << 7)
+
+/* MR8 (Basic configuration 4) */
+#define LPDDR3_S8 (0x3)
+#define LPDDR3_DENSITY(mr8) (8 << (((mr8) >> 2) & 0xf))
+#define LPDDR3_IO_WIDTH(mr8) (32 >> (((mr8) >> 6) & 0x3))
+
+/* MR10 (Calibration) */
+#define LPDDR3_ZQINIT (0xff)
+#define LPDDR3_ZQCL (0xab)
+#define LPDDR3_ZQCS (0x56)
+#define LPDDR3_ZQRESET (0xc3)
+
+/* MR11 (ODT Control) */
+#define LPDDR3_ODT_60 (1)
+#define LPDDR3_ODT_120 (2)
+#define LPDDR3_ODT_240 (3)
+#define LPDDR3_ODT_DIS (0)
+
+/* MR2 (Device Feature 2) */
+/* RL & nRTP for DBI-RD Disabled */
+#define LPDDR4_RL6_NRTP8 (0x0)
+#define LPDDR4_RL10_NRTP8 (0x1)
+#define LPDDR4_RL14_NRTP8 (0x2)
+#define LPDDR4_RL20_NRTP8 (0x3)
+#define LPDDR4_RL24_NRTP10 (0x4)
+#define LPDDR4_RL28_NRTP12 (0x5)
+#define LPDDR4_RL32_NRTP14 (0x6)
+#define LPDDR4_RL36_NRTP16 (0x7)
+/* RL & nRTP for DBI-RD Disabled */
+#define LPDDR4_RL12_NRTP8 (0x1)
+#define LPDDR4_RL16_NRTP8 (0x2)
+#define LPDDR4_RL22_NRTP8 (0x3)
+#define LPDDR4_RL28_NRTP10 (0x4)
+#define LPDDR4_RL32_NRTP12 (0x5)
+#define LPDDR4_RL36_NRTP14 (0x6)
+#define LPDDR4_RL40_NRTP16 (0x7)
+/* WL Set A,default */
+#define LPDDR4_A_WL4 (0x0)
+#define LPDDR4_A_WL6 (0x1)
+#define LPDDR4_A_WL8 (0x2)
+#define LPDDR4_A_WL10 (0x3)
+#define LPDDR4_A_WL12 (0x4)
+#define LPDDR4_A_WL14 (0x5)
+#define LPDDR4_A_WL16 (0x6)
+#define LPDDR4_A_WL18 (0x7)
+/* WL Set B, optional */
+#define LPDDR4_B_WL4 (0x0 << 3)
+#define LPDDR4_B_WL8 (0x1 << 3)
+#define LPDDR4_B_WL12 (0x2 << 3)
+#define LPDDR4_B_WL18 (0x3 << 3)
+#define LPDDR4_B_WL22 (0x4 << 3)
+#define LPDDR4_B_WL26 (0x5 << 3)
+#define LPDDR4_B_WL30 (0x6 << 3)
+#define LPDDR4_B_WL34 (0x7 << 3)
+/* 1:Select WL Set B*/
+#define LPDDR4_WL_B (1 << 6)
+/* 1:enable*/
+#define LPDDR4_WR_LEVEL (1 << 7)
+
+/* MR3 */
+#define LPDDR4_VDDQ_2_5 (0)
+#define LPDDR4_VDDQ_3 (1)
+#define LPDDR4_WRPST_0_5_TCK (0 << 1)
+#define LPDDR4_WRPST_1_5_TCK (1 << 1)
+#define LPDDR4_PPR_EN (1 << 2)
+/* PDDS */
+#define LPDDR4_PDDS_240 (0x1 << 3)
+#define LPDDR4_PDDS_120 (0x2 << 3)
+#define LPDDR4_PDDS_80 (0x3 << 3)
+#define LPDDR4_PDDS_60 (0x4 << 3)
+#define LPDDR4_PDDS_48 (0x5 << 3)
+#define LPDDR4_PDDS_40 (0x6 << 3)
+#define LPDDR4_DBI_RD_EN (1 << 6)
+#define LPDDR4_DBI_WR_EN (1 << 7)
+
+/* MR11 (ODT Control) */
+#define LPDDR4_DQODT_240 (1)
+#define LPDDR4_DQODT_120 (2)
+#define LPDDR4_DQODT_80 (3)
+#define LPDDR4_DQODT_60 (4)
+#define LPDDR4_DQODT_48 (5)
+#define LPDDR4_DQODT_40 (6)
+#define LPDDR4_DQODT_DIS (0)
+#define LPDDR4_CAODT_240 (1 << 4)
+#define LPDDR4_CAODT_120 (2 << 4)
+#define LPDDR4_CAODT_80 (3 << 4)
+#define LPDDR4_CAODT_60 (4 << 4)
+#define LPDDR4_CAODT_48 (5 << 4)
+#define LPDDR4_CAODT_40 (6 << 4)
+#define LPDDR4_CAODT_DIS (0 << 4)
+
+/*
+ * Description: depend on input parameter "timing_config",
+ * and calculate correspond "dram_type"
+ * spec timing to "pdram_timing"
+ * parameters:
+ * input: timing_config
+ * output: pdram_timing
+ * NOTE: MR ODT is set, need to disable by controller
+ */
+void dram_get_parameter(struct timing_related_config *timing_config,
+ struct dram_timing_t *pdram_timing);
+
+#endif /* _DRAM_SPEC_TIMING_HEAD_ */
diff --git a/plat/rockchip/rk3399/drivers/dram/suspend.c b/plat/rockchip/rk3399/drivers/dram/suspend.c
new file mode 100644
index 00000000..f66150ae
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/suspend.c
@@ -0,0 +1,772 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch_helpers.h>
+#include <debug.h>
+#include <dram.h>
+#include <plat_private.h>
+#include <platform_def.h>
+#include <pmu_regs.h>
+#include <rk3399_def.h>
+#include <secure.h>
+#include <soc.h>
+#include <suspend.h>
+
+#define PMUGRF_OS_REG0 0x300
+#define PMUGRF_OS_REG1 0x304
+#define PMUGRF_OS_REG2 0x308
+#define PMUGRF_OS_REG3 0x30c
+
+#define CRU_SFTRST_DDR_CTRL(ch, n) ((0x1 << (8 + 16 + (ch) * 4)) | \
+ ((n) << (8 + (ch) * 4)))
+#define CRU_SFTRST_DDR_PHY(ch, n) ((0x1 << (9 + 16 + (ch) * 4)) | \
+ ((n) << (9 + (ch) * 4)))
+
+#define FBDIV_ENC(n) ((n) << 16)
+#define FBDIV_DEC(n) (((n) >> 16) & 0xfff)
+#define POSTDIV2_ENC(n) ((n) << 12)
+#define POSTDIV2_DEC(n) (((n) >> 12) & 0x7)
+#define POSTDIV1_ENC(n) ((n) << 8)
+#define POSTDIV1_DEC(n) (((n) >> 8) & 0x7)
+#define REFDIV_ENC(n) (n)
+#define REFDIV_DEC(n) ((n) & 0x3f)
+
+/* PMU CRU */
+#define PMUCRU_RSTNHOLD_CON0 0x120
+#define PMUCRU_RSTNHOLD_CON1 0x124
+
+#define PRESET_GPIO0_HOLD(n) (((n) << 7) | WMSK_BIT(7))
+#define PRESET_GPIO1_HOLD(n) (((n) << 8) | WMSK_BIT(8))
+
+#define SYS_COUNTER_FREQ_IN_MHZ (SYS_COUNTER_FREQ_IN_TICKS / 1000000)
+
+__pmusramdata uint32_t dpll_data[PLL_CON_COUNT];
+__pmusramdata uint32_t cru_clksel_con6;
+
+/*
+ * Copy @num registers from @src to @dst
+ */
+static __pmusramfunc void sram_regcpy(uintptr_t dst, uintptr_t src,
+ uint32_t num)
+{
+ while (num--) {
+ mmio_write_32(dst, mmio_read_32(src));
+ dst += sizeof(uint32_t);
+ src += sizeof(uint32_t);
+ }
+}
+
+/*
+ * Copy @num registers from @src to @dst
+ * This is intentionally a copy of the sram_regcpy function. PMUSRAM functions
+ * cannot be called from code running in DRAM.
+ */
+static void dram_regcpy(uintptr_t dst, uintptr_t src, uint32_t num)
+{
+ while (num--) {
+ mmio_write_32(dst, mmio_read_32(src));
+ dst += sizeof(uint32_t);
+ src += sizeof(uint32_t);
+ }
+}
+
+static __pmusramfunc uint32_t sram_get_timer_value(void)
+{
+ /*
+ * Generic delay timer implementation expects the timer to be a down
+ * counter. We apply bitwise NOT operator to the tick values returned
+ * by read_cntpct_el0() to simulate the down counter.
+ */
+ return (uint32_t)(~read_cntpct_el0());
+}
+
+static __pmusramfunc void sram_udelay(uint32_t usec)
+{
+ uint32_t start, cnt, delta, delta_us;
+
+ /* counter is decreasing */
+ start = sram_get_timer_value();
+ do {
+ cnt = sram_get_timer_value();
+ if (cnt > start) {
+ delta = UINT32_MAX - cnt;
+ delta += start;
+ } else
+ delta = start - cnt;
+ delta_us = (delta * SYS_COUNTER_FREQ_IN_MHZ);
+ } while (delta_us < usec);
+}
+
+static __pmusramfunc void configure_sgrf(void)
+{
+ /*
+ * SGRF_DDR_RGN_DPLL_CLK and SGRF_DDR_RGN_RTC_CLK:
+ * IC ECO bug, need to set this register.
+ *
+ * SGRF_DDR_RGN_BYPS:
+ * After the PD_CENTER suspend/resume, the DDR region
+ * related registers in the SGRF will be reset, we
+ * need to re-initialize them.
+ */
+ mmio_write_32(SGRF_BASE + SGRF_DDRRGN_CON0_16(16),
+ SGRF_DDR_RGN_DPLL_CLK |
+ SGRF_DDR_RGN_RTC_CLK |
+ SGRF_DDR_RGN_BYPS);
+}
+
+static __pmusramfunc void rkclk_ddr_reset(uint32_t channel, uint32_t ctl,
+ uint32_t phy)
+{
+ channel &= 0x1;
+ ctl &= 0x1;
+ phy &= 0x1;
+ mmio_write_32(CRU_BASE + CRU_SOFTRST_CON(4),
+ CRU_SFTRST_DDR_CTRL(channel, ctl) |
+ CRU_SFTRST_DDR_PHY(channel, phy));
+}
+
+static __pmusramfunc void phy_pctrl_reset(uint32_t ch)
+{
+ rkclk_ddr_reset(ch, 1, 1);
+ sram_udelay(10);
+ rkclk_ddr_reset(ch, 1, 0);
+ sram_udelay(10);
+ rkclk_ddr_reset(ch, 0, 0);
+ sram_udelay(10);
+}
+
+static __pmusramfunc void set_cs_training_index(uint32_t ch, uint32_t rank)
+{
+ uint32_t byte;
+
+ /* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
+ for (byte = 0; byte < 4; byte++)
+ mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 24,
+ rank << 24);
+}
+
+static __pmusramfunc void select_per_cs_training_index(uint32_t ch,
+ uint32_t rank)
+{
+ /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
+ if ((mmio_read_32(PHY_REG(ch, 84)) >> 16) & 1)
+ set_cs_training_index(ch, rank);
+}
+
+static __pmusramfunc void override_write_leveling_value(uint32_t ch)
+{
+ uint32_t byte;
+
+ for (byte = 0; byte < 4; byte++) {
+ /*
+ * PHY_8/136/264/392
+ * phy_per_cs_training_multicast_en_X 1bit offset_16
+ */
+ mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 16,
+ 1 << 16);
+ mmio_clrsetbits_32(PHY_REG(ch, 63 + (128 * byte)),
+ 0xffff << 16,
+ 0x200 << 16);
+ }
+
+ /* CTL_200 ctrlupd_req 1bit offset_8 */
+ mmio_clrsetbits_32(CTL_REG(ch, 200), 0x1 << 8, 0x1 << 8);
+}
+
+static __pmusramfunc int data_training(uint32_t ch,
+ struct rk3399_sdram_params *sdram_params,
+ uint32_t training_flag)
+{
+ uint32_t obs_0, obs_1, obs_2, obs_3, obs_err = 0;
+ uint32_t rank = sdram_params->ch[ch].rank;
+ uint32_t rank_mask;
+ uint32_t i, tmp;
+
+ if (sdram_params->dramtype == LPDDR4)
+ rank_mask = (rank == 1) ? 0x5 : 0xf;
+ else
+ rank_mask = (rank == 1) ? 0x1 : 0x3;
+
+ /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
+ mmio_setbits_32(PHY_REG(ch, 927), (1 << 22));
+
+ if (training_flag == PI_FULL_TRAINING) {
+ if (sdram_params->dramtype == LPDDR4) {
+ training_flag = PI_WRITE_LEVELING |
+ PI_READ_GATE_TRAINING |
+ PI_READ_LEVELING |
+ PI_WDQ_LEVELING;
+ } else if (sdram_params->dramtype == LPDDR3) {
+ training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
+ PI_READ_GATE_TRAINING;
+ } else if (sdram_params->dramtype == DDR3) {
+ training_flag = PI_WRITE_LEVELING |
+ PI_READ_GATE_TRAINING |
+ PI_READ_LEVELING;
+ }
+ }
+
+ /* ca training(LPDDR4,LPDDR3 support) */
+ if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
+ for (i = 0; i < 4; i++) {
+ if (!(rank_mask & (1 << i)))
+ continue;
+
+ select_per_cs_training_index(ch, i);
+ /* PI_100 PI_CALVL_EN:RW:8:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 100), 0x3 << 8, 0x2 << 8);
+
+ /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 92),
+ (0x1 << 16) | (0x3 << 24),
+ (0x1 << 16) | (i << 24));
+ while (1) {
+ /* PI_174 PI_INT_STATUS:RD:8:18 */
+ tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
+
+ /*
+ * check status obs
+ * PHY_532/660/788 phy_adr_calvl_obs1_:0:32
+ */
+ obs_0 = mmio_read_32(PHY_REG(ch, 532));
+ obs_1 = mmio_read_32(PHY_REG(ch, 660));
+ obs_2 = mmio_read_32(PHY_REG(ch, 788));
+ if (((obs_0 >> 30) & 0x3) ||
+ ((obs_1 >> 30) & 0x3) ||
+ ((obs_2 >> 30) & 0x3))
+ obs_err = 1;
+ if ((((tmp >> 11) & 0x1) == 0x1) &&
+ (((tmp >> 13) & 0x1) == 0x1) &&
+ (((tmp >> 5) & 0x1) == 0x0) &&
+ (obs_err == 0))
+ break;
+ else if ((((tmp >> 5) & 0x1) == 0x1) ||
+ (obs_err == 1))
+ return -1;
+ }
+ /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
+ mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
+ }
+ mmio_clrbits_32(PI_REG(ch, 100), 0x3 << 8);
+ }
+
+ /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
+ if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
+ for (i = 0; i < rank; i++) {
+ select_per_cs_training_index(ch, i);
+ /* PI_60 PI_WRLVL_EN:RW:8:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 60), 0x3 << 8, 0x2 << 8);
+ /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 59),
+ (0x1 << 8) | (0x3 << 16),
+ (0x1 << 8) | (i << 16));
+
+ while (1) {
+ /* PI_174 PI_INT_STATUS:RD:8:18 */
+ tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
+
+ /*
+ * check status obs, if error maybe can not
+ * get leveling done PHY_40/168/296/424
+ * phy_wrlvl_status_obs_X:0:13
+ */
+ obs_0 = mmio_read_32(PHY_REG(ch, 40));
+ obs_1 = mmio_read_32(PHY_REG(ch, 168));
+ obs_2 = mmio_read_32(PHY_REG(ch, 296));
+ obs_3 = mmio_read_32(PHY_REG(ch, 424));
+ if (((obs_0 >> 12) & 0x1) ||
+ ((obs_1 >> 12) & 0x1) ||
+ ((obs_2 >> 12) & 0x1) ||
+ ((obs_3 >> 12) & 0x1))
+ obs_err = 1;
+ if ((((tmp >> 10) & 0x1) == 0x1) &&
+ (((tmp >> 13) & 0x1) == 0x1) &&
+ (((tmp >> 4) & 0x1) == 0x0) &&
+ (obs_err == 0))
+ break;
+ else if ((((tmp >> 4) & 0x1) == 0x1) ||
+ (obs_err == 1))
+ return -1;
+ }
+
+ /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
+ mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
+ }
+ override_write_leveling_value(ch);
+ mmio_clrbits_32(PI_REG(ch, 60), 0x3 << 8);
+ }
+
+ /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
+ if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
+ for (i = 0; i < rank; i++) {
+ select_per_cs_training_index(ch, i);
+ /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 24,
+ 0x2 << 24);
+ /*
+ * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
+ * PI_RDLVL_CS:RW:24:2
+ */
+ mmio_clrsetbits_32(PI_REG(ch, 74),
+ (0x1 << 16) | (0x3 << 24),
+ (0x1 << 16) | (i << 24));
+
+ while (1) {
+ /* PI_174 PI_INT_STATUS:RD:8:18 */
+ tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
+
+ /*
+ * check status obs
+ * PHY_43/171/299/427
+ * PHY_GTLVL_STATUS_OBS_x:16:8
+ */
+ obs_0 = mmio_read_32(PHY_REG(ch, 43));
+ obs_1 = mmio_read_32(PHY_REG(ch, 171));
+ obs_2 = mmio_read_32(PHY_REG(ch, 299));
+ obs_3 = mmio_read_32(PHY_REG(ch, 427));
+ if (((obs_0 >> (16 + 6)) & 0x3) ||
+ ((obs_1 >> (16 + 6)) & 0x3) ||
+ ((obs_2 >> (16 + 6)) & 0x3) ||
+ ((obs_3 >> (16 + 6)) & 0x3))
+ obs_err = 1;
+ if ((((tmp >> 9) & 0x1) == 0x1) &&
+ (((tmp >> 13) & 0x1) == 0x1) &&
+ (((tmp >> 3) & 0x1) == 0x0) &&
+ (obs_err == 0))
+ break;
+ else if ((((tmp >> 3) & 0x1) == 0x1) ||
+ (obs_err == 1))
+ return -1;
+ }
+ /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
+ mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
+ }
+ mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 24);
+ }
+
+ /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
+ if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
+ for (i = 0; i < rank; i++) {
+ select_per_cs_training_index(ch, i);
+ /* PI_80 PI_RDLVL_EN:RW:16:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 16,
+ 0x2 << 16);
+ /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 74),
+ (0x1 << 8) | (0x3 << 24),
+ (0x1 << 8) | (i << 24));
+ while (1) {
+ /* PI_174 PI_INT_STATUS:RD:8:18 */
+ tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
+
+ /*
+ * make sure status obs not report error bit
+ * PHY_46/174/302/430
+ * phy_rdlvl_status_obs_X:16:8
+ */
+ if ((((tmp >> 8) & 0x1) == 0x1) &&
+ (((tmp >> 13) & 0x1) == 0x1) &&
+ (((tmp >> 2) & 0x1) == 0x0))
+ break;
+ else if (((tmp >> 2) & 0x1) == 0x1)
+ return -1;
+ }
+ /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
+ mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
+ }
+ mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 16);
+ }
+
+ /* wdq leveling(LPDDR4 support) */
+ if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
+ for (i = 0; i < 4; i++) {
+ if (!(rank_mask & (1 << i)))
+ continue;
+
+ select_per_cs_training_index(ch, i);
+ /*
+ * disable PI_WDQLVL_VREF_EN before wdq leveling?
+ * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
+ */
+ mmio_clrbits_32(PI_REG(ch, 181), 0x1 << 8);
+ /* PI_124 PI_WDQLVL_EN:RW:16:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 124), 0x3 << 16,
+ 0x2 << 16);
+ /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
+ mmio_clrsetbits_32(PI_REG(ch, 121),
+ (0x1 << 8) | (0x3 << 16),
+ (0x1 << 8) | (i << 16));
+ while (1) {
+ /* PI_174 PI_INT_STATUS:RD:8:18 */
+ tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
+ if ((((tmp >> 12) & 0x1) == 0x1) &&
+ (((tmp >> 13) & 0x1) == 0x1) &&
+ (((tmp >> 6) & 0x1) == 0x0))
+ break;
+ else if (((tmp >> 6) & 0x1) == 0x1)
+ return -1;
+ }
+ /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
+ mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
+ }
+ mmio_clrbits_32(PI_REG(ch, 124), 0x3 << 16);
+ }
+
+ /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
+ mmio_clrbits_32(PHY_REG(ch, 927), (1 << 22));
+
+ return 0;
+}
+
+static __pmusramfunc void set_ddrconfig(
+ struct rk3399_sdram_params *sdram_params,
+ unsigned char channel, uint32_t ddrconfig)
+{
+ /* only need to set ddrconfig */
+ struct rk3399_sdram_channel *ch = &sdram_params->ch[channel];
+ unsigned int cs0_cap = 0;
+ unsigned int cs1_cap = 0;
+
+ cs0_cap = (1 << (ch->cs0_row + ch->col + ch->bk + ch->bw - 20));
+ if (ch->rank > 1)
+ cs1_cap = cs0_cap >> (ch->cs0_row - ch->cs1_row);
+ if (ch->row_3_4) {
+ cs0_cap = cs0_cap * 3 / 4;
+ cs1_cap = cs1_cap * 3 / 4;
+ }
+
+ mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICECONF,
+ ddrconfig | (ddrconfig << 6));
+ mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICESIZE,
+ ((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8));
+}
+
+static __pmusramfunc void dram_all_config(
+ struct rk3399_sdram_params *sdram_params)
+{
+ unsigned int i;
+
+ for (i = 0; i < 2; i++) {
+ struct rk3399_sdram_channel *info = &sdram_params->ch[i];
+ struct rk3399_msch_timings *noc = &info->noc_timings;
+
+ if (sdram_params->ch[i].col == 0)
+ continue;
+
+ mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGA0,
+ noc->ddrtiminga0.d32);
+ mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGB0,
+ noc->ddrtimingb0.d32);
+ mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGC0,
+ noc->ddrtimingc0.d32);
+ mmio_write_32(MSCH_BASE(i) + MSCH_DEVTODEV0,
+ noc->devtodev0.d32);
+ mmio_write_32(MSCH_BASE(i) + MSCH_DDRMODE, noc->ddrmode.d32);
+
+ /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
+ if (sdram_params->ch[i].rank == 1)
+ mmio_setbits_32(CTL_REG(i, 276), 1 << 17);
+ }
+
+ DDR_STRIDE(sdram_params->stride);
+
+ /* reboot hold register set */
+ mmio_write_32(PMUCRU_BASE + CRU_PMU_RSTHOLD_CON(1),
+ CRU_PMU_SGRF_RST_RLS |
+ PRESET_GPIO0_HOLD(1) |
+ PRESET_GPIO1_HOLD(1));
+ mmio_clrsetbits_32(CRU_BASE + CRU_GLB_RST_CON, 0x3, 0x3);
+}
+
+static __pmusramfunc void pctl_cfg(uint32_t ch,
+ struct rk3399_sdram_params *sdram_params)
+{
+ const uint32_t *params_ctl = sdram_params->pctl_regs.denali_ctl;
+ const uint32_t *params_pi = sdram_params->pi_regs.denali_pi;
+ const struct rk3399_ddr_publ_regs *phy_regs = &sdram_params->phy_regs;
+ uint32_t tmp, tmp1, tmp2, i;
+
+ /*
+ * Workaround controller bug:
+ * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
+ */
+ sram_regcpy(CTL_REG(ch, 1), (uintptr_t)&params_ctl[1],
+ CTL_REG_NUM - 1);
+ mmio_write_32(CTL_REG(ch, 0), params_ctl[0]);
+ sram_regcpy(PI_REG(ch, 0), (uintptr_t)&params_pi[0],
+ PI_REG_NUM);
+
+ sram_regcpy(PHY_REG(ch, 910), (uintptr_t)&phy_regs->phy896[910 - 896],
+ 3);
+
+ mmio_clrsetbits_32(CTL_REG(ch, 68), PWRUP_SREFRESH_EXIT,
+ PWRUP_SREFRESH_EXIT);
+
+ /* PHY_DLL_RST_EN */
+ mmio_clrsetbits_32(PHY_REG(ch, 957), 0x3 << 24, 1 << 24);
+ dmbst();
+
+ mmio_setbits_32(PI_REG(ch, 0), START);
+ mmio_setbits_32(CTL_REG(ch, 0), START);
+
+ /* wait lock */
+ while (1) {
+ tmp = mmio_read_32(PHY_REG(ch, 920));
+ tmp1 = mmio_read_32(PHY_REG(ch, 921));
+ tmp2 = mmio_read_32(PHY_REG(ch, 922));
+ if ((((tmp >> 16) & 0x1) == 0x1) &&
+ (((tmp1 >> 16) & 0x1) == 0x1) &&
+ (((tmp1 >> 0) & 0x1) == 0x1) &&
+ (((tmp2 >> 0) & 0x1) == 0x1))
+ break;
+ /* if PLL bypass,don't need wait lock */
+ if (mmio_read_32(PHY_REG(ch, 911)) & 0x1)
+ break;
+ }
+
+ sram_regcpy(PHY_REG(ch, 896), (uintptr_t)&phy_regs->phy896[0], 63);
+
+ for (i = 0; i < 4; i++)
+ sram_regcpy(PHY_REG(ch, 128 * i),
+ (uintptr_t)&phy_regs->phy0[0], 91);
+
+ for (i = 0; i < 3; i++)
+ sram_regcpy(PHY_REG(ch, 512 + 128 * i),
+ (uintptr_t)&phy_regs->phy512[i][0], 38);
+}
+
+static __pmusramfunc int dram_switch_to_next_index(
+ struct rk3399_sdram_params *sdram_params)
+{
+ uint32_t ch, ch_count;
+ uint32_t fn = ((mmio_read_32(CTL_REG(0, 111)) >> 16) + 1) & 0x1;
+
+ mmio_write_32(CIC_BASE + CIC_CTRL0,
+ (((0x3 << 4) | (1 << 2) | 1) << 16) |
+ (fn << 4) | (1 << 2) | 1);
+ while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 2)))
+ ;
+
+ mmio_write_32(CIC_BASE + CIC_CTRL0, 0x20002);
+ while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 0)))
+ ;
+
+ ch_count = sdram_params->num_channels;
+
+ /* LPDDR4 f2 cann't do training, all training will fail */
+ for (ch = 0; ch < ch_count; ch++) {
+ mmio_clrsetbits_32(PHY_REG(ch, 896), (0x3 << 8) | 1,
+ fn << 8);
+
+ /* data_training failed */
+ if (data_training(ch, sdram_params, PI_FULL_TRAINING))
+ return -1;
+ }
+
+ return 0;
+}
+
+/*
+ * Needs to be done for both channels at once in case of a shared reset signal
+ * between channels.
+ */
+static __pmusramfunc int pctl_start(uint32_t channel_mask,
+ struct rk3399_sdram_params *sdram_params)
+{
+ uint32_t count;
+ uint32_t byte;
+
+ mmio_setbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
+ mmio_setbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
+
+ /* need de-access IO retention before controller START */
+ if (channel_mask & (1 << 0))
+ mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 19));
+ if (channel_mask & (1 << 1))
+ mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 23));
+
+ /* PHY_DLL_RST_EN */
+ if (channel_mask & (1 << 0))
+ mmio_clrsetbits_32(PHY_REG(0, 957), 0x3 << 24,
+ 0x2 << 24);
+ if (channel_mask & (1 << 1))
+ mmio_clrsetbits_32(PHY_REG(1, 957), 0x3 << 24,
+ 0x2 << 24);
+
+ /* check ERROR bit */
+ if (channel_mask & (1 << 0)) {
+ count = 0;
+ while (!(mmio_read_32(CTL_REG(0, 203)) & (1 << 3))) {
+ /* CKE is low, loop 10ms */
+ if (count > 100)
+ return -1;
+
+ sram_udelay(100);
+ count++;
+ }
+
+ mmio_clrbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
+
+ /* Restore the PHY_RX_CAL_DQS value */
+ for (byte = 0; byte < 4; byte++)
+ mmio_clrsetbits_32(PHY_REG(0, 57 + 128 * byte),
+ 0xfff << 16,
+ sdram_params->rx_cal_dqs[0][byte]);
+ }
+ if (channel_mask & (1 << 1)) {
+ count = 0;
+ while (!(mmio_read_32(CTL_REG(1, 203)) & (1 << 3))) {
+ /* CKE is low, loop 10ms */
+ if (count > 100)
+ return -1;
+
+ sram_udelay(100);
+ count++;
+ }
+
+ mmio_clrbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
+
+ /* Restore the PHY_RX_CAL_DQS value */
+ for (byte = 0; byte < 4; byte++)
+ mmio_clrsetbits_32(PHY_REG(1, 57 + 128 * byte),
+ 0xfff << 16,
+ sdram_params->rx_cal_dqs[1][byte]);
+ }
+
+ return 0;
+}
+
+__pmusramfunc static void pmusram_restore_pll(int pll_id, uint32_t *src)
+{
+ mmio_write_32((CRU_BASE + CRU_PLL_CON(pll_id, 3)), PLL_SLOW_MODE);
+
+ mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 0), src[0] | REG_SOC_WMSK);
+ mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 1), src[1] | REG_SOC_WMSK);
+ mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 2), src[2]);
+ mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 4), src[4] | REG_SOC_WMSK);
+ mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 5), src[5] | REG_SOC_WMSK);
+
+ mmio_write_32(CRU_BASE + CRU_PLL_CON(pll_id, 3), src[3] | REG_SOC_WMSK);
+
+ while ((mmio_read_32(CRU_BASE + CRU_PLL_CON(pll_id, 2)) &
+ (1 << 31)) == 0x0)
+ ;
+}
+
+void dmc_suspend(void)
+{
+ struct rk3399_sdram_params *sdram_params = &sdram_config;
+ struct rk3399_ddr_publ_regs *phy_regs;
+ uint32_t *params_ctl;
+ uint32_t *params_pi;
+ uint32_t refdiv, postdiv2, postdiv1, fbdiv;
+ uint32_t ch, byte, i;
+
+ phy_regs = &sdram_params->phy_regs;
+ params_ctl = sdram_params->pctl_regs.denali_ctl;
+ params_pi = sdram_params->pi_regs.denali_pi;
+
+ /* save dpll register and ddr clock register value to pmusram */
+ cru_clksel_con6 = mmio_read_32(CRU_BASE + CRU_CLKSEL_CON6);
+ for (i = 0; i < PLL_CON_COUNT; i++)
+ dpll_data[i] = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, i));
+
+ fbdiv = dpll_data[0] & 0xfff;
+ postdiv2 = POSTDIV2_DEC(dpll_data[1]);
+ postdiv1 = POSTDIV1_DEC(dpll_data[1]);
+ refdiv = REFDIV_DEC(dpll_data[1]);
+
+ sdram_params->ddr_freq = ((fbdiv * 24) /
+ (refdiv * postdiv1 * postdiv2)) * MHz;
+
+ INFO("sdram_params->ddr_freq = %d\n", sdram_params->ddr_freq);
+ sdram_params->odt = (((mmio_read_32(PHY_REG(0, 5)) >> 16) &
+ 0x7) != 0) ? 1 : 0;
+
+ /* copy the registers CTL PI and PHY */
+ dram_regcpy((uintptr_t)&params_ctl[0], CTL_REG(0, 0), CTL_REG_NUM);
+
+ /* mask DENALI_CTL_00_DATA.START, only copy here, will trigger later */
+ params_ctl[0] &= ~(0x1 << 0);
+
+ dram_regcpy((uintptr_t)&params_pi[0], PI_REG(0, 0),
+ PI_REG_NUM);
+
+ /* mask DENALI_PI_00_DATA.START, only copy here, will trigger later*/
+ params_pi[0] &= ~(0x1 << 0);
+
+ dram_regcpy((uintptr_t)&phy_regs->phy0[0],
+ PHY_REG(0, 0), 91);
+
+ for (i = 0; i < 3; i++)
+ dram_regcpy((uintptr_t)&phy_regs->phy512[i][0],
+ PHY_REG(0, 512 + 128 * i), 38);
+
+ dram_regcpy((uintptr_t)&phy_regs->phy896[0], PHY_REG(0, 896), 63);
+
+ for (ch = 0; ch < sdram_params->num_channels; ch++) {
+ for (byte = 0; byte < 4; byte++)
+ sdram_params->rx_cal_dqs[ch][byte] = (0xfff << 16) &
+ mmio_read_32(PHY_REG(ch, 57 + byte * 128));
+ }
+
+ /* set DENALI_PHY_957_DATA.PHY_DLL_RST_EN = 0x1 */
+ phy_regs->phy896[957 - 896] &= ~(0x3 << 24);
+ phy_regs->phy896[957 - 896] |= 1 << 24;
+ phy_regs->phy896[0] |= 1;
+ phy_regs->phy896[0] &= ~(0x3 << 8);
+}
+
+__pmusramfunc void dmc_resume(void)
+{
+ struct rk3399_sdram_params *sdram_params = &sdram_config;
+ uint32_t channel_mask = 0;
+ uint32_t channel;
+
+ sram_secure_timer_init();
+
+ /*
+ * we switch ddr clock to abpll when suspend,
+ * we set back to dpll here
+ */
+ mmio_write_32(CRU_BASE + CRU_CLKSEL_CON6,
+ cru_clksel_con6 | REG_SOC_WMSK);
+ pmusram_restore_pll(DPLL_ID, dpll_data);
+
+ configure_sgrf();
+
+retry:
+ for (channel = 0; channel < sdram_params->num_channels; channel++) {
+ phy_pctrl_reset(channel);
+ pctl_cfg(channel, sdram_params);
+ }
+
+ for (channel = 0; channel < 2; channel++) {
+ if (sdram_params->ch[channel].col)
+ channel_mask |= 1 << channel;
+ }
+
+ if (pctl_start(channel_mask, sdram_params) < 0)
+ goto retry;
+
+ for (channel = 0; channel < sdram_params->num_channels; channel++) {
+ /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
+ if (sdram_params->dramtype == LPDDR3)
+ sram_udelay(10);
+
+ /* If traning fail, retry to do it again. */
+ if (data_training(channel, sdram_params, PI_FULL_TRAINING))
+ goto retry;
+
+ set_ddrconfig(sdram_params, channel,
+ sdram_params->ch[channel].ddrconfig);
+ }
+
+ dram_all_config(sdram_params);
+
+ /* Switch to index 1 and prepare for DDR frequency switch. */
+ dram_switch_to_next_index(sdram_params);
+}
diff --git a/plat/rockchip/rk3399/drivers/dram/suspend.h b/plat/rockchip/rk3399/drivers/dram/suspend.h
new file mode 100644
index 00000000..a8a86410
--- /dev/null
+++ b/plat/rockchip/rk3399/drivers/dram/suspend.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef __SOC_ROCKCHIP_RK3399_SUSPEND_H__
+#define __SOC_ROCKCHIP_RK3399_SUSPEND_H__
+#include <dram.h>
+
+#define KHz (1000)
+#define MHz (1000 * KHz)
+#define GHz (1000 * MHz)
+
+#define PI_CA_TRAINING (1 << 0)
+#define PI_WRITE_LEVELING (1 << 1)
+#define PI_READ_GATE_TRAINING (1 << 2)
+#define PI_READ_LEVELING (1 << 3)
+#define PI_WDQ_LEVELING (1 << 4)
+#define PI_FULL_TRAINING (0xff)
+
+void dmc_suspend(void);
+__pmusramfunc void dmc_resume(void);
+
+#endif /* __DRAM_H__ */