AMC13
Firmwares for the different applications of the AMC13 uTCA board made at Boston University
 All Classes Variables
mig_7series_v1_9_ddr_phy_init.v
1  //*****************************************************************************
2 // (c) Copyright 2009 - 2013 Xilinx, Inc. All rights reserved.
3 //
4 // This file contains confidential and proprietary information
5 // of Xilinx, Inc. and is protected under U.S. and
6 // international copyright and other intellectual property
7 // laws.
8 //
9 // DISCLAIMER
10 // This disclaimer is not a license and does not grant any
11 // rights to the materials distributed herewith. Except as
12 // otherwise provided in a valid license issued to you by
13 // Xilinx, and to the maximum extent permitted by applicable
14 // law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
15 // WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
16 // AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
17 // BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
18 // INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
19 // (2) Xilinx shall not be liable (whether in contract or tort,
20 // including negligence, or under any other theory of
21 // liability) for any loss or damage of any kind or nature
22 // related to, arising under or in connection with these
23 // materials, including for any direct, or any indirect,
24 // special, incidental, or consequential loss or damage
25 // (including loss of data, profits, goodwill, or any type of
26 // loss or damage suffered as a result of any action brought
27 // by a third party) even if such damage or loss was
28 // reasonably foreseeable or Xilinx had been advised of the
29 // possibility of the same.
30 //
31 // CRITICAL APPLICATIONS
32 // Xilinx products are not designed or intended to be fail-
33 // safe, or for use in any application requiring fail-safe
34 // performance, such as life-support or safety devices or
35 // systems, Class III medical devices, nuclear facilities,
36 // applications related to the deployment of airbags, or any
37 // other applications that could lead to death, personal
38 // injury, or severe property or environmental damage
39 // (individually and collectively, "Critical
40 // Applications"). Customer assumes the sole risk and
41 // liability of any use of Xilinx products in Critical
42 // Applications, subject only to applicable laws and
43 // regulations governing limitations on product liability.
44 //
45 // THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
46 // PART OF THIS FILE AT ALL TIMES.
47 //
48 //*****************************************************************************
49 // ____ ____
50 // / /\/ /
51 // /___/ \ / Vendor: Xilinx
52 // \ \ \/ Version: %version
53 // \ \ Application: MIG
54 // / / Filename: ddr_phy_init.v
55 // /___/ /\ Date Last Modified: $Date: 2011/06/02 08:35:09 $
56 // \ \ / \ Date Created:
57 // \___\/\___\
58 //
59 //Device: 7 Series
60 //Design Name: DDR3 SDRAM
61 //Purpose:
62 // Memory initialization and overall master state control during
63 // initialization and calibration. Specifically, the following functions
64 // are performed:
65 // 1. Memory initialization (initial AR, mode register programming, etc.)
66 // 2. Initiating write leveling
67 // 3. Generate training pattern writes for read leveling. Generate
68 // memory readback for read leveling.
69 // This module has an interface for providing control/address and write
70 // data to the PHY Control Block during initialization/calibration.
71 // Once initialization and calibration are complete, control is passed to the MC.
72 //
73 //Reference:
74 //Revision History:
75 //
76 //*****************************************************************************
77 
78 /******************************************************************************
79 **$Id: ddr_phy_init.v,v 1.1 2011/06/02 08:35:09 mishra Exp $
80 **$Date: 2011/06/02 08:35:09 $
81 **$Author: mishra $
82 **$Revision: 1.1 $
83 **$Source: /devl/xcs/repo/env/Databases/ip/src2/O/mig_7series_v1_3/data/dlib/7series/ddr3_sdram/verilog/rtl/phy/ddr_phy_init.v,v $
84 *******************************************************************************/
85 
86 `timescale 1ps/1ps
87 
88 
90  (
91  parameter TCQ = 100,
92  parameter nCK_PER_CLK = 4, // # of memory clocks per CLK
93  parameter CLK_PERIOD = 3000, // Logic (internal) clk period (in ps)
94  parameter USE_ODT_PORT = 0, // 0 - No ODT output from FPGA
95  // 1 - ODT output from FPGA
96  parameter PRBS_WIDTH = 8, // PRBS sequence = 2^PRBS_WIDTH
97  parameter BANK_WIDTH = 2,
98  parameter CA_MIRROR = "OFF", // C/A mirror opt for DDR3 dual rank
99  parameter COL_WIDTH = 10,
100  parameter nCS_PER_RANK = 1, // # of CS bits per rank e.g. for
101  // component I/F with CS_WIDTH=1,
102  // nCS_PER_RANK=# of components
103  parameter DQ_WIDTH = 64,
104  parameter DQS_WIDTH = 8,
105  parameter DQS_CNT_WIDTH = 3, // = ceil(log2(DQS_WIDTH))
106  parameter ROW_WIDTH = 14,
107  parameter CS_WIDTH = 1,
108  parameter RANKS = 1, // # of memory ranks in the interface
109  parameter CKE_WIDTH = 1, // # of cke outputs
110  parameter DRAM_TYPE = "DDR3",
111  parameter REG_CTRL = "ON",
112  parameter ADDR_CMD_MODE= "1T",
113 
114  // calibration Address
115  parameter CALIB_ROW_ADD = 16'h0000,// Calibration row address
116  parameter CALIB_COL_ADD = 12'h000, // Calibration column address
117  parameter CALIB_BA_ADD = 3'h0, // Calibration bank address
118 
119  // DRAM mode settings
120  parameter AL = "0", // Additive Latency option
121  parameter BURST_MODE = "8", // Burst length
122  parameter BURST_TYPE = "SEQ", // Burst type
123 // parameter nAL = 0, // Additive latency (in clk cyc)
124  parameter nCL = 5, // Read CAS latency (in clk cyc)
125  parameter nCWL = 5, // Write CAS latency (in clk cyc)
126  parameter tRFC = 110000, // Refresh-to-command delay (in ps)
127  parameter OUTPUT_DRV = "HIGH", // DRAM reduced output drive option
128  parameter RTT_NOM = "60", // Nominal ODT termination value
129  parameter RTT_WR = "60", // Write ODT termination value
130  parameter WRLVL = "ON", // Enable write leveling
131 // parameter PHASE_DETECT = "ON", // Enable read phase detector
132  parameter DDR2_DQSN_ENABLE = "YES", // Enable differential DQS for DDR2
133  parameter nSLOTS = 1, // Number of DIMM SLOTs in the system
134  parameter SIM_INIT_OPTION = "NONE", // "NONE", "SKIP_PU_DLY", "SKIP_INIT"
135  parameter SIM_CAL_OPTION = "NONE", // "NONE", "FAST_CAL", "SKIP_CAL"
136  parameter CKE_ODT_AUX = "FALSE",
137  parameter PRE_REV3ES = "OFF", // Enable TG error detection during calibration
138  parameter TEST_AL = "0" // Internal use for ICM verification
139  )
140  (
141  input clk,
142  input rst,
143  input [2*8*nCK_PER_CLK-1:0] prbs_o,
144  input delay_incdec_done,
145  input ck_addr_cmd_delay_done,
146  input pi_phase_locked_all,
147  input pi_dqs_found_done,
148  input dqsfound_retry,
149  input dqs_found_prech_req,
150  output reg pi_phaselock_start,
151  output pi_phase_locked_err,
152  output pi_calib_done,
153  input phy_if_empty,
154  // Read/write calibration interface
155  input wrlvl_done,
156  input wrlvl_rank_done,
157  input wrlvl_byte_done,
158  input wrlvl_byte_redo,
159  input wrlvl_final,
160  output reg wrlvl_final_if_rst,
161  input oclkdelay_calib_done,
162  input oclk_prech_req,
163  input oclk_calib_resume,
164  output reg oclkdelay_calib_start,
165  input done_dqs_tap_inc,
166  input [5:0] rd_data_offset_0,
167  input [5:0] rd_data_offset_1,
168  input [5:0] rd_data_offset_2,
169  input [6*RANKS-1:0] rd_data_offset_ranks_0,
170  input [6*RANKS-1:0] rd_data_offset_ranks_1,
171  input [6*RANKS-1:0] rd_data_offset_ranks_2,
172  input pi_dqs_found_rank_done,
173  input wrcal_done,
174  input wrcal_prech_req,
175  input wrcal_read_req,
176  input wrcal_act_req,
177  input temp_wrcal_done,
178  input [7:0] slot_0_present,
179  input [7:0] slot_1_present,
180  output reg wl_sm_start,
181  output reg wr_lvl_start,
182  output reg wrcal_start,
183  output reg wrcal_rd_wait,
184  output reg wrcal_sanity_chk,
185  output reg tg_timer_done,
186  output reg no_rst_tg_mc,
187  input rdlvl_stg1_done,
188  input rdlvl_stg1_rank_done,
189  output reg rdlvl_stg1_start,
190  output reg pi_dqs_found_start,
191  output reg detect_pi_found_dqs,
192  // rdlvl stage 1 precharge requested after each DQS
193  input rdlvl_prech_req,
194  input rdlvl_last_byte_done,
195  input wrcal_resume,
196  input wrcal_sanity_chk_done,
197  // MPR read leveling
198  input mpr_rdlvl_done,
199  input mpr_rnk_done,
200  input mpr_last_byte_done,
201  output reg mpr_rdlvl_start,
202  output reg mpr_end_if_reset,
203 
204  // PRBS Read Leveling
205  input prbs_rdlvl_done,
206  input prbs_last_byte_done,
207  input prbs_rdlvl_prech_req,
208  output reg prbs_rdlvl_start,
209  output reg prbs_gen_clk_en,
210 
211  // Signals shared btw multiple calibration stages
212  output reg prech_done,
213  // Data select / status
214  output reg init_calib_complete,
215  // Signal to mask memory model error for Invalid latching edge
216  output reg calib_writes,
217  // PHY address/control
218  // 2 commands to PHY Control Block per div 2 clock in 2:1 mode
219  // 4 commands to PHY Control Block per div 4 clock in 4:1 mode
220  output reg [nCK_PER_CLK*ROW_WIDTH-1:0] phy_address,
221  output reg [nCK_PER_CLK*BANK_WIDTH-1:0]phy_bank,
222  output reg [nCK_PER_CLK-1:0] phy_ras_n,
223  output reg [nCK_PER_CLK-1:0] phy_cas_n,
224  output reg [nCK_PER_CLK-1:0] phy_we_n,
225  output reg phy_reset_n,
226  output [CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK-1:0] phy_cs_n,
227 
228  // Hard PHY Interface signals
229  input phy_ctl_ready,
230  input phy_ctl_full,
231  input phy_cmd_full,
232  input phy_data_full,
233  output reg calib_ctl_wren,
234  output reg calib_cmd_wren,
235  output reg [1:0] calib_seq,
236  output reg write_calib,
237  output reg read_calib,
238  // PHY_Ctl_Wd
239  output reg [2:0] calib_cmd,
240  // calib_aux_out used for CKE and ODT
241  output reg [3:0] calib_aux_out,
242  output reg [1:0] calib_odt ,
243  output reg [nCK_PER_CLK-1:0] calib_cke ,
244  output [1:0] calib_rank_cnt,
245  output reg [1:0] calib_cas_slot,
246  output reg [5:0] calib_data_offset_0,
247  output reg [5:0] calib_data_offset_1,
248  output reg [5:0] calib_data_offset_2,
249  // PHY OUT_FIFO
250  output reg calib_wrdata_en,
251  output reg [2*nCK_PER_CLK*DQ_WIDTH-1:0] phy_wrdata,
252  // PHY Read
253  output phy_rddata_en,
254  output phy_rddata_valid,
255  output [255:0] dbg_phy_init
256  );
257 
258 //*****************************************************************************
259 // Assertions to be added
260 //*****************************************************************************
261 // The phy_ctl_full signal must never be asserted in synchronous mode of
262 // operation either 4:1 or 2:1
263 //
264 // The RANKS parameter must never be set to '0' by the user
265 // valid values: 1 to 4
266 //
267 //*****************************************************************************
268 
269  //***************************************************************************
270 
271  // Number of Read level stage 1 writes limited to a SDRAM row
272  // The address of Read Level stage 1 reads must also be limited
273  // to a single SDRAM row
274  // (2^COL_WIDTH)/BURST_MODE = (2^10)/8 = 128
275  localparam NUM_STG1_WR_RD = (BURST_MODE == "8") ? 4 :
276  (BURST_MODE == "4") ? 8 : 4;
277 
278 
279  localparam ADDR_INC = (BURST_MODE == "8") ? 8 :
280  (BURST_MODE == "4") ? 4 : 8;
281 
282  // In a 2 slot dual rank per system RTT_NOM values
283  // for Rank2 and Rank3 default to 40 ohms
284  localparam RTT_NOM2 = "40";
285  localparam RTT_NOM3 = "40";
286 
287  localparam RTT_NOM_int = (USE_ODT_PORT == 1) ? RTT_NOM : RTT_WR;
288 
289  // Specifically for use with half-frequency controller (nCK_PER_CLK=2)
290  // = 1 if burst length = 4, = 0 if burst length = 8. Determines how
291  // often row command needs to be issued during read-leveling
292  // For DDR3 the burst length is fixed during calibration
293  localparam BURST4_FLAG = (DRAM_TYPE == "DDR3")? 1'b0 :
294  (BURST_MODE == "8") ? 1'b0 :
295  ((BURST_MODE == "4") ? 1'b1 : 1'b0);
296 
297 
298 
299 
300  //***************************************************************************
301  // Counter values used to determine bus timing
302  // NOTE on all counter terminal counts - these can/should be one less than
303  // the actual delay to take into account extra clock cycle delay in
304  // generating the corresponding "done" signal
305  //***************************************************************************
306 
307  localparam CLK_MEM_PERIOD = CLK_PERIOD / nCK_PER_CLK;
308 
309  // Calculate initial delay required in number of CLK clock cycles
310  // to delay initially. The counter is clocked by [CLK/1024] - which
311  // is approximately division by 1000 - note that the formulas below will
312  // result in more than the minimum wait time because of this approximation.
313  // NOTE: For DDR3 JEDEC specifies to delay reset
314  // by 200us, and CKE by an additional 500us after power-up
315  // For DDR2 CKE is delayed by 200us after power up.
316  localparam DDR3_RESET_DELAY_NS = 200000;
317  localparam DDR3_CKE_DELAY_NS = 500000 + DDR3_RESET_DELAY_NS;
318  localparam DDR2_CKE_DELAY_NS = 200000;
319  localparam PWRON_RESET_DELAY_CNT =
320  ((DDR3_RESET_DELAY_NS+CLK_PERIOD-1)/CLK_PERIOD);
321  localparam PWRON_CKE_DELAY_CNT = (DRAM_TYPE == "DDR3") ?
322  (((DDR3_CKE_DELAY_NS+CLK_PERIOD-1)/CLK_PERIOD)) :
323  (((DDR2_CKE_DELAY_NS+CLK_PERIOD-1)/CLK_PERIOD));
324  // FOR DDR2 -1 taken out. With -1 not getting 200us. The equation
325  // needs to be reworked.
326  localparam DDR2_INIT_PRE_DELAY_PS = 400000;
327  localparam DDR2_INIT_PRE_CNT =
328  ((DDR2_INIT_PRE_DELAY_PS+CLK_PERIOD-1)/CLK_PERIOD)-1;
329 
330  // Calculate tXPR time: reset from CKE HIGH to valid command after power-up
331  // tXPR = (max(5nCK, tRFC(min)+10ns). Add a few (blah, messy) more clock
332  // cycles because this counter actually starts up before CKE is asserted
333  // to memory.
334  localparam TXPR_DELAY_CNT =
335  (5*CLK_MEM_PERIOD > tRFC+10000) ?
336  (((5+nCK_PER_CLK-1)/nCK_PER_CLK)-1)+11 :
337  (((tRFC+10000+CLK_PERIOD-1)/CLK_PERIOD)-1)+11;
338 
339  // tDLLK/tZQINIT time = 512*tCK = 256*tCLKDIV
340  localparam TDLLK_TZQINIT_DELAY_CNT = 255;
341 
342  // TWR values in ns. Both DDR2 and DDR3 have the same value.
343  // 15000ns/tCK
344  localparam TWR_CYC = ((15000) % CLK_MEM_PERIOD) ?
345  (15000/CLK_MEM_PERIOD) + 1 : 15000/CLK_MEM_PERIOD;
346 
347  // time to wait between consecutive commands in PHY_INIT - this is a
348  // generic number, and must be large enough to account for worst case
349  // timing parameter (tRFC - refresh-to-active) across all memory speed
350  // grades and operating frequencies. Expressed in clk
351  // (Divided by 4 or Divided by 2) clock cycles.
352  localparam CNTNEXT_CMD = 7'b1111111;
353 
354  // Counter values to keep track of which MR register to load during init
355  // Set value of INIT_CNT_MR_DONE to equal value of counter for last mode
356  // register configured during initialization.
357  // NOTE: Reserve more bits for DDR2 - more MR accesses for DDR2 init
358  localparam INIT_CNT_MR2 = 2'b00;
359  localparam INIT_CNT_MR3 = 2'b01;
360  localparam INIT_CNT_MR1 = 2'b10;
361  localparam INIT_CNT_MR0 = 2'b11;
362  localparam INIT_CNT_MR_DONE = 2'b11;
363 
364  // Register chip programmable values for DDR3
365  // The register chip for the registered DIMM needs to be programmed
366  // before the initialization of the registered DIMM.
367  // Address for the control word is in : DBA2, DA2, DA1, DA0
368  // Data for the control word is in: DBA1 DBA0, DA4, DA3
369  // The values will be stored in the local param in the following format
370  // {DBA[2:0], DA[4:0]}
371 
372  // RC0 is global features control word. Address == 000
373 
374  localparam REG_RC0 = 8'b00000000;
375 
376  // RC1 Clock driver enable control word. Enables or disables the four
377  // output clocks in the register chip. For single rank and dual rank
378  // two clocks will be enabled and for quad rank all the four clocks
379  // will be enabled. Address == 000. Data = 0110 for single and dual rank.
380  // = 0000 for quad rank
381  localparam REG_RC1 = (RANKS <= 2) ? 8'b00110001 : 8'b00000001;
382 
383  // RC2 timing control word. Set in 1T timing mode
384  // Address = 010. Data = 0000
385  localparam REG_RC2 = 8'b00000010;
386 
387  // RC3 timing control word. Setting the data to 0000
388  localparam REG_RC3 = 8'b00000011;
389 
390  // RC4 timing control work. Setting the data to 0000
391  localparam REG_RC4 = 8'b00000100;
392 
393  // RC5 timing control work. Setting the data to 0000
394  localparam REG_RC5 = 8'b00000101;
395 
396 // For non-zero AL values
397  localparam nAL = (AL == "CL-1") ? nCL - 1 : 0;
398 
399 // Adding the register dimm latency to write latency
400  localparam CWL_M = (REG_CTRL == "ON") ? nCWL + nAL + 1 : nCWL + nAL;
401 
402 // Count value to generate pi_phase_locked_err signal
403  localparam PHASELOCKED_TIMEOUT = (SIM_CAL_OPTION == "NONE") ? 16383 : 1000;
404 
405  // Timeout interval for detecting error with Traffic Generator
406  localparam [13:0] TG_TIMER_TIMEOUT
407  = (SIM_CAL_OPTION == "NONE") ? 14'h3FFF : 14'h0001;
408 
409  // Master state machine encoding
410  localparam INIT_IDLE = 6'b000000; //0
411  localparam INIT_WAIT_CKE_EXIT = 6'b000001; //1
412  localparam INIT_LOAD_MR = 6'b000010; //2
413  localparam INIT_LOAD_MR_WAIT = 6'b000011; //3
414  localparam INIT_ZQCL = 6'b000100; //4
415  localparam INIT_WAIT_DLLK_ZQINIT = 6'b000101; //5
416  localparam INIT_WRLVL_START = 6'b000110; //6
417  localparam INIT_WRLVL_WAIT = 6'b000111; //7
418  localparam INIT_WRLVL_LOAD_MR = 6'b001000; //8
419  localparam INIT_WRLVL_LOAD_MR_WAIT = 6'b001001; //9
420  localparam INIT_WRLVL_LOAD_MR2 = 6'b001010; //A
421  localparam INIT_WRLVL_LOAD_MR2_WAIT = 6'b001011; //B
422  localparam INIT_RDLVL_ACT = 6'b001100; //C
423  localparam INIT_RDLVL_ACT_WAIT = 6'b001101; //D
424  localparam INIT_RDLVL_STG1_WRITE = 6'b001110; //E
425  localparam INIT_RDLVL_STG1_WRITE_READ = 6'b001111; //F
426  localparam INIT_RDLVL_STG1_READ = 6'b010000; //10
427  localparam INIT_RDLVL_STG2_READ = 6'b010001; //11
428  localparam INIT_RDLVL_STG2_READ_WAIT = 6'b010010; //12
429  localparam INIT_PRECHARGE_PREWAIT = 6'b010011; //13
430  localparam INIT_PRECHARGE = 6'b010100; //14
431  localparam INIT_PRECHARGE_WAIT = 6'b010101; //15
432  localparam INIT_DONE = 6'b010110; //16
433  localparam INIT_DDR2_PRECHARGE = 6'b010111; //17
434  localparam INIT_DDR2_PRECHARGE_WAIT = 6'b011000; //18
435  localparam INIT_REFRESH = 6'b011001; //19
436  localparam INIT_REFRESH_WAIT = 6'b011010; //1A
437  localparam INIT_REG_WRITE = 6'b011011; //1B
438  localparam INIT_REG_WRITE_WAIT = 6'b011100; //1C
439  localparam INIT_DDR2_MULTI_RANK = 6'b011101; //1D
440  localparam INIT_DDR2_MULTI_RANK_WAIT = 6'b011110; //1E
441  localparam INIT_WRCAL_ACT = 6'b011111; //1F
442  localparam INIT_WRCAL_ACT_WAIT = 6'b100000; //20
443  localparam INIT_WRCAL_WRITE = 6'b100001; //21
444  localparam INIT_WRCAL_WRITE_READ = 6'b100010; //22
445  localparam INIT_WRCAL_READ = 6'b100011; //23
446  localparam INIT_WRCAL_READ_WAIT = 6'b100100; //24
447  localparam INIT_WRCAL_MULT_READS = 6'b100101; //25
448  localparam INIT_PI_PHASELOCK_READS = 6'b100110; //26
449  localparam INIT_MPR_RDEN = 6'b100111; //27
450  localparam INIT_MPR_WAIT = 6'b101000; //28
451  localparam INIT_MPR_READ = 6'b101001; //29
452  localparam INIT_MPR_DISABLE_PREWAIT = 6'b101010; //2A
453  localparam INIT_MPR_DISABLE = 6'b101011; //2B
454  localparam INIT_MPR_DISABLE_WAIT = 6'b101100; //2C
455  localparam INIT_OCLKDELAY_ACT = 6'b101101; //2D
456  localparam INIT_OCLKDELAY_ACT_WAIT = 6'b101110; //2E
457  localparam INIT_OCLKDELAY_WRITE = 6'b101111; //2F
458  localparam INIT_OCLKDELAY_WRITE_WAIT = 6'b110000; //30
459  localparam INIT_OCLKDELAY_READ = 6'b110001; //31
460  localparam INIT_OCLKDELAY_READ_WAIT = 6'b110010; //32
461  localparam INIT_REFRESH_RNK2_WAIT = 6'b110011; //33
462 
463  integer i, j, k, l, m, n, p, q;
464 
465  reg pi_dqs_found_all_r;
466  (* ASYNC_REG = "TRUE" *) reg pi_phase_locked_all_r1;
467  (* ASYNC_REG = "TRUE" *) reg pi_phase_locked_all_r2;
468  (* ASYNC_REG = "TRUE" *) reg pi_phase_locked_all_r3;
469  (* ASYNC_REG = "TRUE" *) reg pi_phase_locked_all_r4;
470  reg pi_calib_rank_done_r;
471  reg [13:0] pi_phaselock_timer;
472  reg stg1_wr_done;
473  reg rnk_ref_cnt;
474  reg pi_dqs_found_done_r1;
475  reg pi_dqs_found_rank_done_r;
476  reg read_calib_int;
477  reg read_calib_r;
478  reg pi_calib_done_r;
479  reg pi_calib_done_r1;
480  reg burst_addr_r;
481  reg [1:0] chip_cnt_r;
482  reg [6:0] cnt_cmd_r;
483  reg cnt_cmd_done_r;
484  reg cnt_cmd_done_m7_r;
485  reg [7:0] cnt_dllk_zqinit_r;
486  reg cnt_dllk_zqinit_done_r;
487  reg cnt_init_af_done_r;
488  reg [1:0] cnt_init_af_r;
489  reg [1:0] cnt_init_data_r;
490  reg [1:0] cnt_init_mr_r;
491  reg cnt_init_mr_done_r;
492  reg cnt_init_pre_wait_done_r;
493  reg [7:0] cnt_init_pre_wait_r;
494  reg [9:0] cnt_pwron_ce_r;
495  reg cnt_pwron_cke_done_r;
496  reg cnt_pwron_cke_done_r1;
497  reg [8:0] cnt_pwron_r;
498  reg cnt_pwron_reset_done_r;
499  reg cnt_txpr_done_r;
500  reg [7:0] cnt_txpr_r;
501  reg ddr2_pre_flag_r;
502  reg ddr2_refresh_flag_r;
503  reg ddr3_lm_done_r;
504  reg [4:0] enable_wrlvl_cnt;
505  reg init_complete_r;
506  reg init_complete_r1;
507  reg init_complete_r2;
508 (* keep = "true" *) reg init_complete_r_timing;
509 (* keep = "true" *) reg init_complete_r1_timing;
510  reg [5:0] init_next_state;
511  reg [5:0] init_state_r;
512  reg [5:0] init_state_r1;
513  wire [15:0] load_mr0;
514  wire [15:0] load_mr1;
515  wire [15:0] load_mr2;
516  wire [15:0] load_mr3;
517  reg mem_init_done_r;
518  reg [1:0] mr2_r [0:3];
519  reg [2:0] mr1_r [0:3];
520  reg new_burst_r;
521  reg [15:0] wrcal_start_dly_r;
522  wire wrcal_start_pre;
523  reg wrcal_resume_r;
524  // Only one ODT signal per rank in PHY Control Block
525  reg [nCK_PER_CLK-1:0] phy_tmp_odt_r;
526  reg [nCK_PER_CLK-1:0] phy_tmp_odt_r1;
527 
528  reg [CS_WIDTH*nCS_PER_RANK-1:0] phy_tmp_cs1_r;
529  reg [CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK-1:0] phy_int_cs_n;
530  wire prech_done_pre;
531  reg [15:0] prech_done_dly_r;
532  reg prech_pending_r;
533  reg prech_req_posedge_r;
534  reg prech_req_r;
535  reg pwron_ce_r;
536  reg first_rdlvl_pat_r;
537  reg first_wrcal_pat_r;
538  reg phy_wrdata_en;
539  reg phy_wrdata_en_r1;
540  reg [1:0] wrdata_pat_cnt;
541  reg [1:0] wrcal_pat_cnt;
542  reg [ROW_WIDTH-1:0] address_w;
543  reg [BANK_WIDTH-1:0] bank_w;
544  reg rdlvl_stg1_done_r1;
545  reg rdlvl_stg1_start_int;
546  reg [15:0] rdlvl_start_dly0_r;
547  reg rdlvl_start_pre;
548  reg rdlvl_last_byte_done_r;
549  wire rdlvl_rd;
550  wire rdlvl_wr;
551  reg rdlvl_wr_r;
552  wire rdlvl_wr_rd;
553  reg [2:0] reg_ctrl_cnt_r;
554  reg [1:0] tmp_mr2_r [0:3];
555  reg [2:0] tmp_mr1_r [0:3];
556  reg wrlvl_done_r;
557  reg wrlvl_done_r1;
558  reg wrlvl_rank_done_r1;
559  reg wrlvl_rank_done_r2;
560  reg wrlvl_rank_done_r3;
561  reg wrlvl_rank_done_r4;
562  reg wrlvl_rank_done_r5;
563  reg wrlvl_rank_done_r6;
564  reg wrlvl_rank_done_r7;
565  reg [2:0] wrlvl_rank_cntr;
566  reg wrlvl_odt_ctl;
567  reg wrlvl_odt;
568  reg wrlvl_active;
569  reg wrlvl_active_r1;
570  reg [2:0] num_reads;
571  reg temp_wrcal_done_r;
572  reg temp_lmr_done;
573  reg extend_cal_pat;
574  reg [13:0] tg_timer;
575  reg tg_timer_go;
576  reg cnt_wrcal_rd;
577  reg [3:0] cnt_wait;
578  reg [7:0] wrcal_reads;
579  reg [8:0] stg1_wr_rd_cnt;
580  reg phy_data_full_r;
581  reg wr_level_dqs_asrt;
582  reg wr_level_dqs_asrt_r1;
583  reg [1:0] dqs_asrt_cnt;
584 
585 
586  reg [3:0] num_refresh;
587  wire oclkdelay_calib_start_pre;
588  reg [15:0] oclkdelay_start_dly_r;
589  reg [3:0] oclk_wr_cnt;
590  reg [3:0] wrcal_wr_cnt;
591  reg wrlvl_final_r;
592 
593 
594  reg prbs_rdlvl_done_r1;
595  reg prbs_last_byte_done_r;
596  reg phy_if_empty_r;
597 
598  reg wrcal_final_chk;
599  //***************************************************************************
600  // Debug
601  //***************************************************************************
602 
603  //synthesis translate_off
604  always @(posedge mem_init_done_r) begin
605  if (!rst)
606  $display ("PHY_INIT: Memory Initialization completed at %t", $time);
607  end
608 
609  always @(posedge wrlvl_done) begin
610  if (!rst && (WRLVL == "ON"))
611  $display ("PHY_INIT: Write Leveling completed at %t", $time);
612  end
613 
614  always @(posedge rdlvl_stg1_done) begin
615  if (!rst)
616  $display ("PHY_INIT: Read Leveling Stage 1 completed at %t", $time);
617  end
618 
619  always @(posedge mpr_rdlvl_done) begin
620  if (!rst)
621  $display ("PHY_INIT: MPR Read Leveling completed at %t", $time);
622  end
623 
624  always @(posedge oclkdelay_calib_done) begin
625  if (!rst)
626  $display ("PHY_INIT: OCLKDELAY calibration completed at %t", $time);
627  end
628 
629  always @(posedge pi_calib_done_r1) begin
630  if (!rst)
631  $display ("PHY_INIT: Phaser_In Phase Locked at %t", $time);
632  end
633 
634  always @(posedge pi_dqs_found_done) begin
635  if (!rst)
636  $display ("PHY_INIT: Phaser_In DQSFOUND completed at %t", $time);
637  end
638 
639  always @(posedge wrcal_done) begin
640  if (!rst && (WRLVL == "ON"))
641  $display ("PHY_INIT: Write Calibration completed at %t", $time);
642  end
643 
644  //synthesis translate_on
645 
646  assign dbg_phy_init[5:0] = init_state_r;
647  //***************************************************************************
648  // DQS count to be sent to hard PHY during Phaser_IN Phase Locking stage
649  //***************************************************************************
650 
651 // assign pi_phaselock_calib_cnt = dqs_cnt_r;
652 
653  assign pi_calib_done = pi_calib_done_r1;
654 
655  always @(posedge clk) begin
656  if (rst)
657  wrcal_final_chk <= #TCQ 1'b0;
658  else if ((init_next_state == INIT_WRCAL_ACT) && wrcal_done &&
659  (DRAM_TYPE == "DDR3"))
660  wrcal_final_chk <= #TCQ 1'b1;
661  end
662 
663  always @(posedge clk) begin
664  rdlvl_stg1_done_r1 <= #TCQ rdlvl_stg1_done;
665  prbs_rdlvl_done_r1 <= #TCQ prbs_rdlvl_done;
666  wrcal_resume_r <= #TCQ wrcal_resume;
667  wrcal_sanity_chk <= #TCQ wrcal_final_chk;
668  end
669 
670  always @(posedge clk) begin
671  if (rst)
672  mpr_end_if_reset <= #TCQ 1'b0;
673  else if (mpr_last_byte_done && (num_refresh != 'd0))
674  mpr_end_if_reset <= #TCQ 1'b1;
675  else
676  mpr_end_if_reset <= #TCQ 1'b0;
677  end
678 
679  // Siganl to mask memory model error for Invalid latching edge
680 
681  always @(posedge clk)
682  if (rst)
683  calib_writes <= #TCQ 1'b0;
684  else if ((init_state_r == INIT_OCLKDELAY_WRITE) ||
685  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
686  (init_state_r == INIT_RDLVL_STG1_WRITE_READ) ||
687  (init_state_r == INIT_WRCAL_WRITE) ||
688  (init_state_r == INIT_WRCAL_WRITE_READ))
689  calib_writes <= #TCQ 1'b1;
690  else
691  calib_writes <= #TCQ 1'b0;
692 
693  always @(posedge clk)
694  if (rst)
695  wrcal_rd_wait <= #TCQ 1'b0;
696  else if (init_state_r == INIT_WRCAL_READ_WAIT)
697  wrcal_rd_wait <= #TCQ 1'b1;
698  else
699  wrcal_rd_wait <= #TCQ 1'b0;
700 
701  //***************************************************************************
702  // Signal PHY completion when calibration is finished
703  // Signal assertion is delayed by four clock cycles to account for the
704  // multi cycle path constraint to (phy_init_data_sel) signal.
705  //***************************************************************************
706 
707  always @(posedge clk)
708  if (rst) begin
709  init_complete_r <= #TCQ 1'b0;
710  init_complete_r_timing <= #TCQ 1'b0;
711  init_complete_r1 <= #TCQ 1'b0;
712  init_complete_r1_timing <= #TCQ 1'b0;
713  init_complete_r2 <= #TCQ 1'b0;
714  init_calib_complete <= #TCQ 1'b0;
715  end else begin
716  if (init_state_r == INIT_DONE) begin
717  init_complete_r <= #TCQ 1'b1;
718  init_complete_r_timing <= #TCQ 1'b1;
719  end
720  init_complete_r1 <= #TCQ init_complete_r;
721  init_complete_r1_timing <= #TCQ init_complete_r_timing;
722  init_complete_r2 <= #TCQ init_complete_r1;
723  init_calib_complete <= #TCQ init_complete_r2;
724  end
725 
726  //***************************************************************************
727  // Instantiate FF for the phy_init_data_sel signal. A multi cycle path
728  // constraint will be assigned to this signal. This signal will only be
729  // used within the PHY
730  //***************************************************************************
731 
732 // FDRSE u_ff_phy_init_data_sel
733 // (
734 // .Q (phy_init_data_sel),
735 // .C (clk),
736 // .CE (1'b1),
737 // .D (init_complete_r),
738 // .R (1'b0),
739 // .S (1'b0)
740 // ) /* synthesis syn_preserve=1 */
741 // /* synthesis syn_replicate = 0 */;
742 
743 
744  //***************************************************************************
745  // Mode register programming
746  //***************************************************************************
747 
748  //*****************************************************************
749  // DDR3 Load mode reg0
750  // Mode Register (MR0):
751  // [15:13] - unused - 000
752  // [12] - Precharge Power-down DLL usage - 0 (DLL frozen, slow-exit),
753  // 1 (DLL maintained)
754  // [11:9] - write recovery for Auto Precharge (tWR/tCK = 6)
755  // [8] - DLL reset - 0 or 1
756  // [7] - Test Mode - 0 (normal)
757  // [6:4],[2] - CAS latency - CAS_LAT
758  // [3] - Burst Type - BURST_TYPE
759  // [1:0] - Burst Length - BURST_LEN
760  // DDR2 Load mode register
761  // Mode Register (MR):
762  // [15:14] - unused - 00
763  // [13] - reserved - 0
764  // [12] - Power-down mode - 0 (normal)
765  // [11:9] - write recovery - write recovery for Auto Precharge
766  // (tWR/tCK = 6)
767  // [8] - DLL reset - 0 or 1
768  // [7] - Test Mode - 0 (normal)
769  // [6:4] - CAS latency - CAS_LAT
770  // [3] - Burst Type - BURST_TYPE
771  // [2:0] - Burst Length - BURST_LEN
772 
773  //*****************************************************************
774  generate
775  if(DRAM_TYPE == "DDR3") begin: gen_load_mr0_DDR3
776  assign load_mr0[1:0] = (BURST_MODE == "8") ? 2'b00 :
777  (BURST_MODE == "OTF") ? 2'b01 :
778  (BURST_MODE == "4") ? 2'b10 : 2'b11;
779  assign load_mr0[2] = (nCL >= 12) ? 1'b1 : 1'b0; // LSb of CAS latency
780  assign load_mr0[3] = (BURST_TYPE == "SEQ") ? 1'b0 : 1'b1;
781  assign load_mr0[6:4] = ((nCL == 5) || (nCL == 13)) ? 3'b001 :
782  ((nCL == 6) || (nCL == 14)) ? 3'b010 :
783  (nCL == 7) ? 3'b011 :
784  (nCL == 8) ? 3'b100 :
785  (nCL == 9) ? 3'b101 :
786  (nCL == 10) ? 3'b110 :
787  (nCL == 11) ? 3'b111 :
788  (nCL == 12) ? 3'b000 : 3'b111;
789  assign load_mr0[7] = 1'b0;
790  assign load_mr0[8] = 1'b1; // Reset DLL (init only)
791  assign load_mr0[11:9] = (TWR_CYC == 5) ? 3'b001 :
792  (TWR_CYC == 6) ? 3'b010 :
793  (TWR_CYC == 7) ? 3'b011 :
794  (TWR_CYC == 8) ? 3'b100 :
795  (TWR_CYC == 9) ? 3'b101 :
796  (TWR_CYC == 10) ? 3'b101 :
797  (TWR_CYC == 11) ? 3'b110 :
798  (TWR_CYC == 12) ? 3'b110 :
799  (TWR_CYC == 13) ? 3'b111 :
800  (TWR_CYC == 14) ? 3'b111 :
801  (TWR_CYC == 15) ? 3'b000 :
802  (TWR_CYC == 16) ? 3'b000 : 3'b010;
803  assign load_mr0[12] = 1'b0; // Precharge Power-Down DLL 'slow-exit'
804  assign load_mr0[15:13] = 3'b000;
805  end else if (DRAM_TYPE == "DDR2") begin: gen_load_mr0_DDR2 // block: gen
806  assign load_mr0[2:0] = (BURST_MODE == "8") ? 3'b011 :
807  (BURST_MODE == "4") ? 3'b010 : 3'b111;
808  assign load_mr0[3] = (BURST_TYPE == "SEQ") ? 1'b0 : 1'b1;
809  assign load_mr0[6:4] = (nCL == 3) ? 3'b011 :
810  (nCL == 4) ? 3'b100 :
811  (nCL == 5) ? 3'b101 :
812  (nCL == 6) ? 3'b110 : 3'b111;
813  assign load_mr0[7] = 1'b0;
814  assign load_mr0[8] = 1'b1; // Reset DLL (init only)
815  assign load_mr0[11:9] = (TWR_CYC == 2) ? 3'b001 :
816  (TWR_CYC == 3) ? 3'b010 :
817  (TWR_CYC == 4) ? 3'b011 :
818  (TWR_CYC == 5) ? 3'b100 :
819  (TWR_CYC == 6) ? 3'b101 : 3'b010;
820  assign load_mr0[15:12]= 4'b0000; // Reserved
821  end
822  endgenerate
823 
824  //*****************************************************************
825  // DDR3 Load mode reg1
826  // Mode Register (MR1):
827  // [15:13] - unused - 00
828  // [12] - output enable - 0 (enabled for DQ, DQS, DQS#)
829  // [11] - TDQS enable - 0 (TDQS disabled and DM enabled)
830  // [10] - reserved - 0 (must be '0')
831  // [9] - RTT[2] - 0
832  // [8] - reserved - 0 (must be '0')
833  // [7] - write leveling - 0 (disabled), 1 (enabled)
834  // [6] - RTT[1] - RTT[1:0] = 0(no ODT), 1(75), 2(150), 3(50)
835  // [5] - Output driver impedance[1] - 0 (RZQ/6 and RZQ/7)
836  // [4:3] - Additive CAS - ADDITIVE_CAS
837  // [2] - RTT[0]
838  // [1] - Output driver impedance[0] - 0(RZQ/6), or 1 (RZQ/7)
839  // [0] - DLL enable - 0 (normal)
840  // DDR2 ext mode register
841  // Extended Mode Register (MR):
842  // [15:14] - unused - 00
843  // [13] - reserved - 0
844  // [12] - output enable - 0 (enabled)
845  // [11] - RDQS enable - 0 (disabled)
846  // [10] - DQS# enable - 0 (enabled)
847  // [9:7] - OCD Program - 111 or 000 (first 111, then 000 during init)
848  // [6] - RTT[1] - RTT[1:0] = 0(no ODT), 1(75), 2(150), 3(50)
849  // [5:3] - Additive CAS - ADDITIVE_CAS
850  // [2] - RTT[0]
851  // [1] - Output drive - REDUCE_DRV (= 0(full), = 1 (reduced)
852  // [0] - DLL enable - 0 (normal)
853  //*****************************************************************
854 
855  generate
856  if(DRAM_TYPE == "DDR3") begin: gen_load_mr1_DDR3
857  assign load_mr1[0] = 1'b0; // DLL enabled during Imitialization
858  assign load_mr1[1] = (OUTPUT_DRV == "LOW") ? 1'b0 : 1'b1;
859  assign load_mr1[2] = ((RTT_NOM_int == "30") || (RTT_NOM_int == "40") ||
860  (RTT_NOM_int == "60")) ? 1'b1 : 1'b0;
861  assign load_mr1[4:3] = (AL == "0") ? 2'b00 :
862  (AL == "CL-1") ? 2'b01 :
863  (AL == "CL-2") ? 2'b10 : 2'b11;
864  assign load_mr1[5] = 1'b0;
865  assign load_mr1[6] = ((RTT_NOM_int == "40") || (RTT_NOM_int == "120")) ?
866  1'b1 : 1'b0;
867  assign load_mr1[7] = 1'b0; // Enable write lvl after init sequence
868  assign load_mr1[8] = 1'b0;
869  assign load_mr1[9] = ((RTT_NOM_int == "20") || (RTT_NOM_int == "30")) ?
870  1'b1 : 1'b0;
871  assign load_mr1[10] = 1'b0;
872  assign load_mr1[15:11] = 5'b00000;
873  end else if (DRAM_TYPE == "DDR2") begin: gen_load_mr1_DDR2
874  assign load_mr1[0] = 1'b0; // DLL enabled during Imitialization
875  assign load_mr1[1] = (OUTPUT_DRV == "LOW") ? 1'b1 : 1'b0;
876  assign load_mr1[2] = ((RTT_NOM_int == "75") || (RTT_NOM_int == "50")) ?
877  1'b1 : 1'b0;
878  assign load_mr1[5:3] = (AL == "0") ? 3'b000 :
879  (AL == "1") ? 3'b001 :
880  (AL == "2") ? 3'b010 :
881  (AL == "3") ? 3'b011 :
882  (AL == "4") ? 3'b100 : 3'b111;
883  assign load_mr1[6] = ((RTT_NOM_int == "50") ||
884  (RTT_NOM_int == "150")) ? 1'b1 : 1'b0;
885  assign load_mr1[9:7] = 3'b000;
886  assign load_mr1[10] = (DDR2_DQSN_ENABLE == "YES") ? 1'b0 : 1'b1;
887  assign load_mr1[15:11] = 5'b00000;
888 
889  end
890  endgenerate
891 
892  //*****************************************************************
893  // DDR3 Load mode reg2
894  // Mode Register (MR2):
895  // [15:11] - unused - 00
896  // [10:9] - RTT_WR - 00 (Dynamic ODT off)
897  // [8] - reserved - 0 (must be '0')
898  // [7] - self-refresh temperature range -
899  // 0 (normal), 1 (extended)
900  // [6] - Auto Self-Refresh - 0 (manual), 1(auto)
901  // [5:3] - CAS Write Latency (CWL) -
902  // 000 (5 for 400 MHz device),
903  // 001 (6 for 400 MHz to 533 MHz devices),
904  // 010 (7 for 533 MHz to 667 MHz devices),
905  // 011 (8 for 667 MHz to 800 MHz)
906  // [2:0] - Partial Array Self-Refresh (Optional) -
907  // 000 (full array)
908  // Not used for DDR2
909  //*****************************************************************
910  generate
911  if(DRAM_TYPE == "DDR3") begin: gen_load_mr2_DDR3
912  assign load_mr2[2:0] = 3'b000;
913  assign load_mr2[5:3] = (nCWL == 5) ? 3'b000 :
914  (nCWL == 6) ? 3'b001 :
915  (nCWL == 7) ? 3'b010 :
916  (nCWL == 8) ? 3'b011 :
917  (nCWL == 9) ? 3'b100 :
918  (nCWL == 10) ? 3'b101 :
919  (nCWL == 11) ? 3'b110 : 3'b111;
920  assign load_mr2[6] = 1'b0;
921  assign load_mr2[7] = 1'b0;
922  assign load_mr2[8] = 1'b0;
923  // Dynamic ODT disabled
924  assign load_mr2[10:9] = 2'b00;
925  assign load_mr2[15:11] = 5'b00000;
926  end else begin: gen_load_mr2_DDR2
927  assign load_mr2[15:0] = 16'd0;
928  end
929  endgenerate
930 
931  //*****************************************************************
932  // DDR3 Load mode reg3
933  // Mode Register (MR3):
934  // [15:3] - unused - All zeros
935  // [2] - MPR Operation - 0(normal operation), 1(data flow from MPR)
936  // [1:0] - MPR location - 00 (Predefined pattern)
937  //*****************************************************************
938 
939  assign load_mr3[1:0] = 2'b00;
940  assign load_mr3[2] = 1'b0;
941  assign load_mr3[15:3] = 13'b0000000000000;
942 
943  // For multi-rank systems the rank being accessed during writes in
944  // Read Leveling must be sent to phy_write for the bitslip logic
945  assign calib_rank_cnt = chip_cnt_r;
946 
947  //***************************************************************************
948  // Logic to begin initial calibration, and to handle precharge requests
949  // during read-leveling (to avoid tRAS violations if individual read
950  // levelling calibration stages take more than max{tRAS) to complete).
951  //***************************************************************************
952 
953  // Assert when readback for each stage of read-leveling begins. However,
954  // note this indicates only when the read command is issued and when
955  // Phaser_IN has phase aligned FREQ_REF clock to read DQS. It does not
956  // indicate when the read data is present on the bus (when this happens
957  // after the read command is issued depends on CAS LATENCY) - there will
958  // need to be some delay before valid data is present on the bus.
959 // assign rdlvl_start_pre = (init_state_r == INIT_PI_PHASELOCK_READS);
960 
961  // Assert when read back for oclkdelay calibration begins
962  assign oclkdelay_calib_start_pre = (init_state_r == INIT_OCLKDELAY_READ);
963 
964  // Assert when read back for write calibration begins
965  assign wrcal_start_pre = (init_state_r == INIT_WRCAL_READ) || (init_state_r == INIT_WRCAL_MULT_READS);
966 
967  // Common precharge signal done signal - pulses only when there has been
968  // a precharge issued as a result of a PRECH_REQ pulse. Note also a common
969  // PRECH_DONE signal is used for all blocks
970  assign prech_done_pre = (((init_state_r == INIT_RDLVL_STG1_READ) ||
971  ((rdlvl_last_byte_done_r || prbs_last_byte_done_r) && (init_state_r == INIT_RDLVL_ACT_WAIT) && cnt_cmd_done_r) ||
972  (dqs_found_prech_req && (init_state_r == INIT_RDLVL_ACT_WAIT)) ||
973  (init_state_r == INIT_MPR_RDEN) ||
974  ((init_state_r == INIT_WRCAL_ACT_WAIT) && cnt_cmd_done_r) ||
975  ((init_state_r == INIT_OCLKDELAY_ACT_WAIT) && cnt_cmd_done_r) ||
976  (wrlvl_final && (init_state_r == INIT_REFRESH_WAIT) && cnt_cmd_done_r && ~oclkdelay_calib_done)) &&
977  prech_pending_r &&
978  !prech_req_posedge_r);
979 
980  always @(posedge clk)
981  if (rst)
982  pi_phaselock_start <= #TCQ 1'b0;
983  else if (init_state_r == INIT_PI_PHASELOCK_READS)
984  pi_phaselock_start <= #TCQ 1'b1;
985 
986  // Delay start of each calibration by 16 clock cycles to ensure that when
987  // calibration logic begins, read data is already appearing on the bus.
988  // Each circuit should synthesize using an SRL16. Assume that reset is
989  // long enough to clear contents of SRL16.
990  always @(posedge clk) begin
991  rdlvl_last_byte_done_r <= #TCQ rdlvl_last_byte_done;
992  prbs_last_byte_done_r <= #TCQ prbs_last_byte_done;
993  rdlvl_start_dly0_r <= #TCQ {rdlvl_start_dly0_r[14:0],
994  rdlvl_start_pre};
995  wrcal_start_dly_r <= #TCQ {wrcal_start_dly_r[14:0],
996  wrcal_start_pre};
997  oclkdelay_start_dly_r <= #TCQ {oclkdelay_start_dly_r[14:0],
998  oclkdelay_calib_start_pre};
999  prech_done_dly_r <= #TCQ {prech_done_dly_r[14:0],
1000  prech_done_pre};
1001  end
1002 
1003  always @(posedge clk)
1004  prech_done <= #TCQ prech_done_dly_r[15];
1005 
1006  always @(posedge clk)
1007  if (rst)
1008  mpr_rdlvl_start <= #TCQ 1'b0;
1009  else if (pi_dqs_found_done &&
1010  (init_state_r == INIT_MPR_READ))
1011  mpr_rdlvl_start <= #TCQ 1'b1;
1012 
1013  always @(posedge clk)
1014  phy_if_empty_r <= #TCQ phy_if_empty;
1015 
1016  always @(posedge clk)
1017  if (rst || (phy_if_empty_r && prbs_rdlvl_prech_req) ||
1018  ((stg1_wr_rd_cnt == 'd1) && ~stg1_wr_done) || prbs_rdlvl_done)
1019  prbs_gen_clk_en <= #TCQ 1'b0;
1020  else if ((~phy_if_empty_r && rdlvl_stg1_done_r1 && ~prbs_rdlvl_done) ||
1021  ((init_state_r == INIT_RDLVL_ACT_WAIT) && rdlvl_stg1_done_r1 && (cnt_cmd_r == 'd0)))
1022  prbs_gen_clk_en <= #TCQ 1'b1;
1023 
1024 generate
1025 if (RANKS < 2) begin
1026  always @(posedge clk)
1027  if (rst) begin
1028  rdlvl_stg1_start <= #TCQ 1'b0;
1029  rdlvl_stg1_start_int <= #TCQ 1'b0;
1030  rdlvl_start_pre <= #TCQ 1'b0;
1031  prbs_rdlvl_start <= #TCQ 1'b0;
1032  end else begin
1033  if (pi_dqs_found_done && cnt_cmd_done_r &&
1034  (init_state_r == INIT_RDLVL_ACT_WAIT))
1035  rdlvl_stg1_start_int <= #TCQ 1'b1;
1036  if (pi_dqs_found_done &&
1037  (init_state_r == INIT_RDLVL_STG1_READ))begin
1038  rdlvl_start_pre <= #TCQ 1'b1;
1039  rdlvl_stg1_start <= #TCQ rdlvl_start_dly0_r[14];
1040  end
1041  if (pi_dqs_found_done && rdlvl_stg1_done &&
1042  (init_state_r == INIT_RDLVL_STG1_READ) && (WRLVL == "ON")) begin
1043  prbs_rdlvl_start <= #TCQ 1'b1;
1044  end
1045  end
1046 end else begin
1047  always @(posedge clk)
1048  if (rst || rdlvl_stg1_rank_done) begin
1049  rdlvl_stg1_start <= #TCQ 1'b0;
1050  rdlvl_stg1_start_int <= #TCQ 1'b0;
1051  rdlvl_start_pre <= #TCQ 1'b0;
1052  prbs_rdlvl_start <= #TCQ 1'b0;
1053  end else begin
1054  if (pi_dqs_found_done && cnt_cmd_done_r &&
1055  (init_state_r == INIT_RDLVL_ACT_WAIT))
1056  rdlvl_stg1_start_int <= #TCQ 1'b1;
1057  if (pi_dqs_found_done &&
1058  (init_state_r == INIT_RDLVL_STG1_READ))begin
1059  rdlvl_start_pre <= #TCQ 1'b1;
1060  rdlvl_stg1_start <= #TCQ rdlvl_start_dly0_r[14];
1061  end
1062  if (pi_dqs_found_done && rdlvl_stg1_done &&
1063  (init_state_r == INIT_RDLVL_STG1_READ) && (WRLVL == "ON")) begin
1064  prbs_rdlvl_start <= #TCQ 1'b1;
1065  end
1066  end
1067 end
1068 endgenerate
1069 
1070 
1071  always @(posedge clk) begin
1072  if (rst || dqsfound_retry || wrlvl_byte_redo) begin
1073  pi_dqs_found_start <= #TCQ 1'b0;
1074  wrcal_start <= #TCQ 1'b0;
1075  end else begin
1076  if (!pi_dqs_found_done && init_state_r == INIT_RDLVL_STG2_READ)
1077  pi_dqs_found_start <= #TCQ 1'b1;
1078  if (wrcal_start_dly_r[5])
1079  wrcal_start <= #TCQ 1'b1;
1080  end
1081  end // else: !if(rst)
1082 
1083 
1084  always @(posedge clk)
1085  if (rst)
1086  oclkdelay_calib_start <= #TCQ 1'b0;
1087  else if (oclkdelay_start_dly_r[5])
1088  oclkdelay_calib_start <= #TCQ 1'b1;
1089 
1090  always @(posedge clk)
1091  if (rst)
1092  pi_dqs_found_done_r1 <= #TCQ 1'b0;
1093  else
1094  pi_dqs_found_done_r1 <= #TCQ pi_dqs_found_done;
1095 
1096 
1097  always @(posedge clk)
1098  wrlvl_final_r <= #TCQ wrlvl_final;
1099 
1100  // Reset IN_FIFO after final write leveling to make sure the FIFO
1101  // pointers are initialized
1102  always @(posedge clk)
1103  if (rst || (init_state_r == INIT_WRCAL_WRITE) || (init_state_r == INIT_REFRESH))
1104  wrlvl_final_if_rst <= #TCQ 1'b0;
1105  else if (wrlvl_done_r && //(wrlvl_final_r && wrlvl_done_r &&
1106  (init_state_r == INIT_WRLVL_LOAD_MR2))
1107  wrlvl_final_if_rst <= #TCQ 1'b1;
1108 
1109  // Constantly enable DQS while write leveling is enabled in the memory
1110  // This is more to get rid of warnings in simulation, can later change
1111  // this code to only enable WRLVL_ACTIVE when WRLVL_START is asserted
1112 
1113  always @(posedge clk)
1114  if (rst ||
1115  ((init_state_r1 != INIT_WRLVL_START) &&
1116  (init_state_r == INIT_WRLVL_START)))
1117  wrlvl_odt_ctl <= #TCQ 1'b0;
1118  else if (wrlvl_rank_done && ~wrlvl_rank_done_r1)
1119  wrlvl_odt_ctl <= #TCQ 1'b1;
1120 
1121  generate
1122  if (nCK_PER_CLK == 4) begin: en_cnt_div4
1123  always @ (posedge clk)
1124  if (rst)
1125  enable_wrlvl_cnt <= #TCQ 5'd0;
1126  else if ((init_state_r == INIT_WRLVL_START) ||
1127  (wrlvl_odt && (enable_wrlvl_cnt == 5'd0)))
1128  enable_wrlvl_cnt <= #TCQ 5'd12;
1129  else if ((enable_wrlvl_cnt > 5'd0) && ~(phy_ctl_full || phy_cmd_full))
1130  enable_wrlvl_cnt <= #TCQ enable_wrlvl_cnt - 1;
1131 
1132  // ODT stays asserted as long as write_calib
1133  // signal is asserted
1134  always @(posedge clk)
1135  if (rst || wrlvl_odt_ctl)
1136  wrlvl_odt <= #TCQ 1'b0;
1137  else if (enable_wrlvl_cnt == 5'd1)
1138  wrlvl_odt <= #TCQ 1'b1;
1139 
1140  end else begin: en_cnt_div2
1141  always @ (posedge clk)
1142  if (rst)
1143  enable_wrlvl_cnt <= #TCQ 5'd0;
1144  else if ((init_state_r == INIT_WRLVL_START) ||
1145  (wrlvl_odt && (enable_wrlvl_cnt == 5'd0)))
1146  enable_wrlvl_cnt <= #TCQ 5'd21;
1147  else if ((enable_wrlvl_cnt > 5'd0) && ~(phy_ctl_full || phy_cmd_full))
1148  enable_wrlvl_cnt <= #TCQ enable_wrlvl_cnt - 1;
1149 
1150  // ODT stays asserted as long as write_calib
1151  // signal is asserted
1152  always @(posedge clk)
1153  if (rst || wrlvl_odt_ctl)
1154  wrlvl_odt <= #TCQ 1'b0;
1155  else if (enable_wrlvl_cnt == 5'd1)
1156  wrlvl_odt <= #TCQ 1'b1;
1157 
1158  end
1159  endgenerate
1160 
1161  always @(posedge clk)
1162  if (rst || wrlvl_rank_done || done_dqs_tap_inc)
1163  wrlvl_active <= #TCQ 1'b0;
1164  else if ((enable_wrlvl_cnt == 5'd1) && wrlvl_odt && !wrlvl_active)
1165  wrlvl_active <= #TCQ 1'b1;
1166 
1167 // signal used to assert DQS for write leveling.
1168 // the DQS will be asserted once every 16 clock cycles.
1169  always @(posedge clk)begin
1170  if(rst || (enable_wrlvl_cnt != 5'd1)) begin
1171  wr_level_dqs_asrt <= #TCQ 1'd0;
1172  end else if ((enable_wrlvl_cnt == 5'd1) && (wrlvl_active_r1)) begin
1173  wr_level_dqs_asrt <= #TCQ 1'd1;
1174  end
1175  end
1176 
1177  always @ (posedge clk) begin
1178  if (rst || (wrlvl_done_r && ~wrlvl_done_r1))
1179  dqs_asrt_cnt <= #TCQ 2'd0;
1180  else if (wr_level_dqs_asrt && dqs_asrt_cnt != 2'd3)
1181  dqs_asrt_cnt <= #TCQ (dqs_asrt_cnt + 1);
1182  end
1183 
1184  always @ (posedge clk) begin
1185  if (rst || ~wrlvl_active)
1186  wr_lvl_start <= #TCQ 1'd0;
1187  else if (dqs_asrt_cnt == 2'd3)
1188  wr_lvl_start <= #TCQ 1'd1;
1189  end
1190 
1191 
1192  always @(posedge clk) begin
1193  if (rst)
1194  wl_sm_start <= #TCQ 1'b0;
1195  else
1196  wl_sm_start <= #TCQ wr_level_dqs_asrt_r1;
1197  end
1198 
1199 
1200  always @(posedge clk) begin
1201  wrlvl_active_r1 <= #TCQ wrlvl_active;
1202  wr_level_dqs_asrt_r1 <= #TCQ wr_level_dqs_asrt;
1203  wrlvl_done_r <= #TCQ wrlvl_done;
1204  wrlvl_done_r1 <= #TCQ wrlvl_done_r;
1205  wrlvl_rank_done_r1 <= #TCQ wrlvl_rank_done;
1206  wrlvl_rank_done_r2 <= #TCQ wrlvl_rank_done_r1;
1207  wrlvl_rank_done_r3 <= #TCQ wrlvl_rank_done_r2;
1208  wrlvl_rank_done_r4 <= #TCQ wrlvl_rank_done_r3;
1209  wrlvl_rank_done_r5 <= #TCQ wrlvl_rank_done_r4;
1210  wrlvl_rank_done_r6 <= #TCQ wrlvl_rank_done_r5;
1211  wrlvl_rank_done_r7 <= #TCQ wrlvl_rank_done_r6;
1212  end
1213 
1214  always @ (posedge clk) begin
1215  //if (rst)
1216  wrlvl_rank_cntr <= #TCQ 3'd0;
1217  //else if (wrlvl_rank_done)
1218  // wrlvl_rank_cntr <= #TCQ wrlvl_rank_cntr + 1'b1;
1219  end
1220 
1221  //*****************************************************************
1222  // Precharge request logic - those calibration logic blocks
1223  // that require greater than tRAS(max) to finish must break up
1224  // their calibration into smaller units of time, with precharges
1225  // issued in between. This is done using the XXX_PRECH_REQ and
1226  // PRECH_DONE handshaking between PHY_INIT and those blocks
1227  //*****************************************************************
1228 
1229  // Shared request from multiple sources
1230  assign prech_req = oclk_prech_req | rdlvl_prech_req | wrcal_prech_req | prbs_rdlvl_prech_req |
1231  (dqs_found_prech_req & (init_state_r == INIT_RDLVL_STG2_READ_WAIT));
1232 
1233  // Handshaking logic to force precharge during read leveling, and to
1234  // notify read leveling logic when precharge has been initiated and
1235  // it's okay to proceed with leveling again
1236  always @(posedge clk)
1237  if (rst) begin
1238  prech_req_r <= #TCQ 1'b0;
1239  prech_req_posedge_r <= #TCQ 1'b0;
1240  prech_pending_r <= #TCQ 1'b0;
1241  end else begin
1242  prech_req_r <= #TCQ prech_req;
1243  prech_req_posedge_r <= #TCQ prech_req & ~prech_req_r;
1244  if (prech_req_posedge_r)
1245  prech_pending_r <= #TCQ 1'b1;
1246  // Clear after we've finished with the precharge and have
1247  // returned to issuing read leveling calibration reads
1248  else if (prech_done_pre)
1249  prech_pending_r <= #TCQ 1'b0;
1250  end
1251 
1252  //***************************************************************************
1253  // Various timing counters
1254  //***************************************************************************
1255 
1256  //*****************************************************************
1257  // Generic delay for various states that require it (e.g. for turnaround
1258  // between read and write). Make this a sufficiently large number of clock
1259  // cycles to cover all possible frequencies and memory components)
1260  // Requirements for this counter:
1261  // 1. Greater than tMRD
1262  // 2. tRFC (refresh-active) for DDR2
1263  // 3. (list the other requirements, slacker...)
1264  //*****************************************************************
1265 
1266  always @(posedge clk) begin
1267  case (init_state_r)
1268  INIT_LOAD_MR_WAIT,
1269  INIT_WRLVL_LOAD_MR_WAIT,
1270  INIT_WRLVL_LOAD_MR2_WAIT,
1271  INIT_MPR_WAIT,
1272  INIT_MPR_DISABLE_PREWAIT,
1273  INIT_MPR_DISABLE_WAIT,
1274  INIT_OCLKDELAY_ACT_WAIT,
1275  INIT_OCLKDELAY_WRITE_WAIT,
1276  INIT_RDLVL_ACT_WAIT,
1277  INIT_RDLVL_STG1_WRITE_READ,
1278  INIT_RDLVL_STG2_READ_WAIT,
1279  INIT_WRCAL_ACT_WAIT,
1280  INIT_WRCAL_WRITE_READ,
1281  INIT_WRCAL_READ_WAIT,
1282  INIT_PRECHARGE_PREWAIT,
1283  INIT_PRECHARGE_WAIT,
1284  INIT_DDR2_PRECHARGE_WAIT,
1285  INIT_REG_WRITE_WAIT,
1286  INIT_REFRESH_WAIT,
1287  INIT_REFRESH_RNK2_WAIT: begin
1288  if (phy_ctl_full || phy_cmd_full)
1289  cnt_cmd_r <= #TCQ cnt_cmd_r;
1290  else
1291  cnt_cmd_r <= #TCQ cnt_cmd_r + 1;
1292  end
1293  INIT_WRLVL_WAIT:
1294  cnt_cmd_r <= #TCQ 'b0;
1295  default:
1296  cnt_cmd_r <= #TCQ 'b0;
1297  endcase
1298  end
1299 
1300  // pulse when count reaches terminal count
1301  always @(posedge clk)
1302  cnt_cmd_done_r <= #TCQ (cnt_cmd_r == CNTNEXT_CMD);
1303 
1304  // For ODT deassertion - hold throughout post read/write wait stage, but
1305  // deassert before next command. The post read/write stage is very long, so
1306  // we simply address the longest case here plus some margin.
1307  always @(posedge clk)
1308  cnt_cmd_done_m7_r <= #TCQ (cnt_cmd_r == (CNTNEXT_CMD - 7));
1309 
1310 //************************************************************************
1311 // Added to support PO fine delay inc when TG errors
1312  always @(posedge clk) begin
1313  case (init_state_r)
1314  INIT_WRCAL_READ_WAIT: begin
1315  if (phy_ctl_full || phy_cmd_full)
1316  cnt_wait <= #TCQ cnt_wait;
1317  else
1318  cnt_wait <= #TCQ cnt_wait + 1;
1319  end
1320  default:
1321  cnt_wait <= #TCQ 'b0;
1322  endcase
1323  end
1324 
1325  always @(posedge clk)
1326  cnt_wrcal_rd <= #TCQ (cnt_wait == 'd4);
1327 
1328  always @(posedge clk) begin
1329  if (rst || ~temp_wrcal_done)
1330  temp_lmr_done <= #TCQ 1'b0;
1331  else if (temp_wrcal_done && (init_state_r == INIT_LOAD_MR))
1332  temp_lmr_done <= #TCQ 1'b1;
1333  end
1334 
1335  always @(posedge clk)
1336  temp_wrcal_done_r <= #TCQ temp_wrcal_done;
1337 
1338  always @(posedge clk)
1339  if (rst) begin
1340  tg_timer_go <= #TCQ 1'b0;
1341  end else if ((PRE_REV3ES == "ON") && temp_wrcal_done && temp_lmr_done &&
1342  (init_state_r == INIT_WRCAL_READ_WAIT)) begin
1343  tg_timer_go <= #TCQ 1'b1;
1344  end else begin
1345  tg_timer_go <= #TCQ 1'b0;
1346  end
1347 
1348  always @(posedge clk) begin
1349  if (rst || (temp_wrcal_done && ~temp_wrcal_done_r) ||
1350  (init_state_r == INIT_PRECHARGE_PREWAIT))
1351  tg_timer <= #TCQ 'd0;
1352  else if ((pi_phaselock_timer == PHASELOCKED_TIMEOUT) &&
1353  tg_timer_go &&
1354  (tg_timer != TG_TIMER_TIMEOUT))
1355  tg_timer <= #TCQ tg_timer + 1;
1356  end
1357 
1358  always @(posedge clk) begin
1359  if (rst)
1360  tg_timer_done <= #TCQ 1'b0;
1361  else if (tg_timer == TG_TIMER_TIMEOUT)
1362  tg_timer_done <= #TCQ 1'b1;
1363  else
1364  tg_timer_done <= #TCQ 1'b0;
1365  end
1366 
1367  always @(posedge clk) begin
1368  if (rst)
1369  no_rst_tg_mc <= #TCQ 1'b0;
1370  else if ((init_state_r == INIT_WRCAL_ACT) && wrcal_read_req)
1371  no_rst_tg_mc <= #TCQ 1'b1;
1372  else
1373  no_rst_tg_mc <= #TCQ 1'b0;
1374  end
1375 
1376 //************************************************************************
1377 
1378  always @(posedge clk) begin
1379  if (rst)
1380  detect_pi_found_dqs <= #TCQ 1'b0;
1381  else if ((cnt_cmd_r == 7'b0111111) &&
1382  (init_state_r == INIT_RDLVL_STG2_READ_WAIT))
1383  detect_pi_found_dqs <= #TCQ 1'b1;
1384  else
1385  detect_pi_found_dqs <= #TCQ 1'b0;
1386  end
1387 
1388  //*****************************************************************
1389  // Initial delay after power-on for RESET, CKE
1390  // NOTE: Could reduce power consumption by turning off these counters
1391  // after initial power-up (at expense of more logic)
1392  // NOTE: Likely can combine multiple counters into single counter
1393  //*****************************************************************
1394 
1395  // Create divided by 1024 version of clock
1396  always @(posedge clk)
1397  if (rst) begin
1398  cnt_pwron_ce_r <= #TCQ 10'h000;
1399  pwron_ce_r <= #TCQ 1'b0;
1400  end else begin
1401  cnt_pwron_ce_r <= #TCQ cnt_pwron_ce_r + 1;
1402  pwron_ce_r <= #TCQ (cnt_pwron_ce_r == 10'h3FF);
1403  end
1404 
1405  // "Main" power-on counter - ticks every CLKDIV/1024 cycles
1406  always @(posedge clk)
1407  if (rst)
1408  cnt_pwron_r <= #TCQ 'b0;
1409  else if (pwron_ce_r)
1410  cnt_pwron_r <= #TCQ cnt_pwron_r + 1;
1411 
1412  always @(posedge clk)
1413  if (rst || ~phy_ctl_ready) begin
1414  cnt_pwron_reset_done_r <= #TCQ 1'b0;
1415  cnt_pwron_cke_done_r <= #TCQ 1'b0;
1416  end else begin
1417  // skip power-up count for simulation purposes only
1418  if ((SIM_INIT_OPTION == "SKIP_PU_DLY") ||
1419  (SIM_INIT_OPTION == "SKIP_INIT")) begin
1420  cnt_pwron_reset_done_r <= #TCQ 1'b1;
1421  cnt_pwron_cke_done_r <= #TCQ 1'b1;
1422  end else begin
1423  // otherwise, create latched version of done signal for RESET, CKE
1424  if (DRAM_TYPE == "DDR3") begin
1425  if (!cnt_pwron_reset_done_r)
1426  cnt_pwron_reset_done_r
1427  <= #TCQ (cnt_pwron_r == PWRON_RESET_DELAY_CNT);
1428  if (!cnt_pwron_cke_done_r)
1429  cnt_pwron_cke_done_r
1430  <= #TCQ (cnt_pwron_r == PWRON_CKE_DELAY_CNT);
1431  end else begin // DDR2
1432  cnt_pwron_reset_done_r <= #TCQ 1'b1; // not needed
1433  if (!cnt_pwron_cke_done_r)
1434  cnt_pwron_cke_done_r
1435  <= #TCQ (cnt_pwron_r == PWRON_CKE_DELAY_CNT);
1436  end
1437  end
1438  end // else: !if(rst || ~phy_ctl_ready)
1439 
1440 
1441  always @(posedge clk)
1442  cnt_pwron_cke_done_r1 <= #TCQ cnt_pwron_cke_done_r;
1443 
1444  // Keep RESET asserted and CKE deasserted until after power-on delay
1445  always @(posedge clk or posedge rst) begin
1446  if (rst)
1447  phy_reset_n <= #TCQ 1'b0;
1448  else
1449  phy_reset_n <= #TCQ cnt_pwron_reset_done_r;
1450 // phy_cke <= #TCQ {CKE_WIDTH{cnt_pwron_cke_done_r}};
1451  end
1452 
1453  //*****************************************************************
1454  // Counter for tXPR (pronouned "Tax-Payer") - wait time after
1455  // CKE deassertion before first MRS command can be asserted
1456  //*****************************************************************
1457 
1458  always @(posedge clk)
1459  if (!cnt_pwron_cke_done_r) begin
1460  cnt_txpr_r <= #TCQ 'b0;
1461  cnt_txpr_done_r <= #TCQ 1'b0;
1462  end else begin
1463  cnt_txpr_r <= #TCQ cnt_txpr_r + 1;
1464  if (!cnt_txpr_done_r)
1465  cnt_txpr_done_r <= #TCQ (cnt_txpr_r == TXPR_DELAY_CNT);
1466  end
1467 
1468  //*****************************************************************
1469  // Counter for the initial 400ns wait for issuing precharge all
1470  // command after CKE assertion. Only for DDR2.
1471  //*****************************************************************
1472 
1473  always @(posedge clk)
1474  if (!cnt_pwron_cke_done_r) begin
1475  cnt_init_pre_wait_r <= #TCQ 'b0;
1476  cnt_init_pre_wait_done_r <= #TCQ 1'b0;
1477  end else begin
1478  cnt_init_pre_wait_r <= #TCQ cnt_init_pre_wait_r + 1;
1479  if (!cnt_init_pre_wait_done_r)
1480  cnt_init_pre_wait_done_r
1481  <= #TCQ (cnt_init_pre_wait_r >= DDR2_INIT_PRE_CNT);
1482  end
1483 
1484  //*****************************************************************
1485  // Wait for both DLL to lock (tDLLK) and ZQ calibration to finish
1486  // (tZQINIT). Both take the same amount of time (512*tCK)
1487  //*****************************************************************
1488 
1489  always @(posedge clk)
1490  if (init_state_r == INIT_ZQCL) begin
1491  cnt_dllk_zqinit_r <= #TCQ 'b0;
1492  cnt_dllk_zqinit_done_r <= #TCQ 1'b0;
1493  end else if (~(phy_ctl_full || phy_cmd_full)) begin
1494  cnt_dllk_zqinit_r <= #TCQ cnt_dllk_zqinit_r + 1;
1495  if (!cnt_dllk_zqinit_done_r)
1496  cnt_dllk_zqinit_done_r
1497  <= #TCQ (cnt_dllk_zqinit_r == TDLLK_TZQINIT_DELAY_CNT);
1498  end
1499 
1500  //*****************************************************************
1501  // Keep track of which MRS counter needs to be programmed during
1502  // memory initialization
1503  // The counter and the done signal are reset an additional time
1504  // for DDR2. The same signals are used for the additional DDR2
1505  // initialization sequence.
1506  //*****************************************************************
1507 
1508  always @(posedge clk)
1509  if ((init_state_r == INIT_IDLE)||
1510  ((init_state_r == INIT_REFRESH)
1511  && (~mem_init_done_r))) begin
1512  cnt_init_mr_r <= #TCQ 'b0;
1513  cnt_init_mr_done_r <= #TCQ 1'b0;
1514  end else if (init_state_r == INIT_LOAD_MR) begin
1515  cnt_init_mr_r <= #TCQ cnt_init_mr_r + 1;
1516  cnt_init_mr_done_r <= #TCQ (cnt_init_mr_r == INIT_CNT_MR_DONE);
1517  end
1518 
1519 
1520  //*****************************************************************
1521  // Flag to tell if the first precharge for DDR2 init sequence is
1522  // done
1523  //*****************************************************************
1524 
1525  always @(posedge clk)
1526  if (init_state_r == INIT_IDLE)
1527  ddr2_pre_flag_r<= #TCQ 'b0;
1528  else if (init_state_r == INIT_LOAD_MR)
1529  ddr2_pre_flag_r<= #TCQ 1'b1;
1530  // reset the flag for multi rank case
1531  else if ((ddr2_refresh_flag_r) &&
1532  (init_state_r == INIT_LOAD_MR_WAIT)&&
1533  (cnt_cmd_done_r) && (cnt_init_mr_done_r))
1534  ddr2_pre_flag_r <= #TCQ 'b0;
1535 
1536  //*****************************************************************
1537  // Flag to tell if the refresh stat for DDR2 init sequence is
1538  // reached
1539  //*****************************************************************
1540 
1541  always @(posedge clk)
1542  if (init_state_r == INIT_IDLE)
1543  ddr2_refresh_flag_r<= #TCQ 'b0;
1544  else if ((init_state_r == INIT_REFRESH) && (~mem_init_done_r))
1545  // reset the flag for multi rank case
1546  ddr2_refresh_flag_r<= #TCQ 1'b1;
1547  else if ((ddr2_refresh_flag_r) &&
1548  (init_state_r == INIT_LOAD_MR_WAIT)&&
1549  (cnt_cmd_done_r) && (cnt_init_mr_done_r))
1550  ddr2_refresh_flag_r <= #TCQ 'b0;
1551 
1552  //*****************************************************************
1553  // Keep track of the number of auto refreshes for DDR2
1554  // initialization. The spec asks for a minimum of two refreshes.
1555  // Four refreshes are performed here. The two extra refreshes is to
1556  // account for the 200 clock cycle wait between step h and l.
1557  // Without the two extra refreshes we would have to have a
1558  // wait state.
1559  //*****************************************************************
1560 
1561  always @(posedge clk)
1562  if (init_state_r == INIT_IDLE) begin
1563  cnt_init_af_r <= #TCQ 'b0;
1564  cnt_init_af_done_r <= #TCQ 1'b0;
1565  end else if ((init_state_r == INIT_REFRESH) && (~mem_init_done_r))begin
1566  cnt_init_af_r <= #TCQ cnt_init_af_r + 1;
1567  cnt_init_af_done_r <= #TCQ (cnt_init_af_r == 2'b11);
1568  end
1569 
1570  //*****************************************************************
1571  // Keep track of the register control word programming for
1572  // DDR3 RDIMM
1573  //*****************************************************************
1574 
1575  always @(posedge clk)
1576  if (init_state_r == INIT_IDLE)
1577  reg_ctrl_cnt_r <= #TCQ 'b0;
1578  else if (init_state_r == INIT_REG_WRITE)
1579  reg_ctrl_cnt_r <= #TCQ reg_ctrl_cnt_r + 1;
1580 
1581  generate
1582  if (RANKS < 2) begin: one_rank
1583  always @(posedge clk)
1584  if ((init_state_r == INIT_IDLE) || rdlvl_last_byte_done)
1585  stg1_wr_done <= #TCQ 1'b0;
1586  else if (init_state_r == INIT_RDLVL_STG1_WRITE_READ)
1587  stg1_wr_done <= #TCQ 1'b1;
1588  end else begin: two_ranks
1589  always @(posedge clk)
1590  if ((init_state_r == INIT_IDLE) || rdlvl_last_byte_done ||
1591  (rdlvl_stg1_rank_done ))
1592  stg1_wr_done <= #TCQ 1'b0;
1593  else if (init_state_r == INIT_RDLVL_STG1_WRITE_READ)
1594  stg1_wr_done <= #TCQ 1'b1;
1595  end
1596  endgenerate
1597 
1598  always @(posedge clk)
1599  if (rst)
1600  rnk_ref_cnt <= #TCQ 1'b0;
1601  else if (stg1_wr_done &&
1602  (init_state_r == INIT_REFRESH_WAIT) && cnt_cmd_done_r)
1603  rnk_ref_cnt <= #TCQ ~rnk_ref_cnt;
1604 
1605 
1606  always @(posedge clk)
1607  if (rst || (init_state_r == INIT_MPR_RDEN) ||
1608  (init_state_r == INIT_OCLKDELAY_ACT) || (init_state_r == INIT_RDLVL_ACT))
1609  num_refresh <= #TCQ 'd0;
1610  else if ((init_state_r == INIT_REFRESH) &&
1611  (~pi_dqs_found_done || ((DRAM_TYPE == "DDR3") && ~oclkdelay_calib_done) ||
1612  (rdlvl_stg1_done && ~prbs_rdlvl_done) ||
1613  ((CLK_PERIOD/nCK_PER_CLK <= 2500) && wrcal_done && ~rdlvl_stg1_done) ||
1614  ((CLK_PERIOD/nCK_PER_CLK > 2500) && wrlvl_done_r1 && ~rdlvl_stg1_done)))
1615  num_refresh <= #TCQ num_refresh + 1;
1616 
1617 
1618  //***************************************************************************
1619  // Initialization state machine
1620  //***************************************************************************
1621 
1622  //*****************************************************************
1623  // Next-state logic
1624  //*****************************************************************
1625 
1626  always @(posedge clk)
1627  if (rst)begin
1628  init_state_r <= #TCQ INIT_IDLE;
1629  init_state_r1 <= #TCQ INIT_IDLE;
1630  end else begin
1631  init_state_r <= #TCQ init_next_state;
1632  init_state_r1 <= #TCQ init_state_r;
1633  end
1634 
1635  always @(burst_addr_r or chip_cnt_r or cnt_cmd_done_r
1636  or cnt_dllk_zqinit_done_r or cnt_init_af_done_r
1637  or cnt_init_mr_done_r or phy_ctl_ready or phy_ctl_full
1638  or stg1_wr_done or rdlvl_last_byte_done
1639  or phy_cmd_full or num_reads or rnk_ref_cnt or mpr_last_byte_done
1640  or oclk_wr_cnt or mpr_rdlvl_done or mpr_rnk_done or num_refresh
1641  or oclkdelay_calib_done or oclk_prech_req or oclk_calib_resume
1642  or wrlvl_byte_redo or wrlvl_byte_done or wrlvl_final or wrlvl_final_r
1643  or cnt_init_pre_wait_done_r or cnt_pwron_cke_done_r
1644  or delay_incdec_done or wrcal_wr_cnt
1645  or ck_addr_cmd_delay_done or wrcal_read_req or wrcal_reads or cnt_wrcal_rd
1646  or wrcal_act_req or temp_wrcal_done or temp_lmr_done
1647  or cnt_txpr_done_r or ddr2_pre_flag_r
1648  or ddr2_refresh_flag_r or ddr3_lm_done_r
1649  or init_state_r or mem_init_done_r or dqsfound_retry or dqs_found_prech_req
1650  or prech_req_posedge_r or prech_req_r or wrcal_done or wrcal_resume_r
1651  or rdlvl_stg1_done or rdlvl_stg1_done_r1 or rdlvl_stg1_rank_done or rdlvl_stg1_start_int
1652  or prbs_rdlvl_done or prbs_last_byte_done or prbs_rdlvl_done_r1
1653  or stg1_wr_rd_cnt or rdlvl_prech_req or wrcal_prech_req
1654  or read_calib_int or read_calib_r or pi_calib_done_r1
1655  or pi_phase_locked_all_r3 or pi_phase_locked_all_r4
1656  or pi_dqs_found_done or pi_dqs_found_rank_done or pi_dqs_found_start
1657  or reg_ctrl_cnt_r or wrlvl_done_r1 or wrlvl_rank_done_r7
1658  or wrcal_final_chk or wrcal_sanity_chk_done) begin
1659  init_next_state = init_state_r;
1660  (* full_case, parallel_case *) case (init_state_r)
1661 
1662  //*******************************************************
1663  // DRAM initialization
1664  //*******************************************************
1665 
1666  // Initial state - wait for:
1667  // 1. Power-on delays to pass
1668  // 2. PHY Control Block to assert phy_ctl_ready
1669  // 3. PHY Control FIFO must not be FULL
1670  // 4. Read path initialization to finish
1671  INIT_IDLE:
1672  if (cnt_pwron_cke_done_r && phy_ctl_ready && ck_addr_cmd_delay_done && delay_incdec_done
1673  && ~(phy_ctl_full || phy_cmd_full) ) begin
1674  // If skipping memory initialization (simulation only)
1675  if (SIM_INIT_OPTION == "SKIP_INIT")
1676  //if (WRLVL == "ON")
1677  // Proceed to write leveling
1678  // init_next_state = INIT_WRLVL_START;
1679  //else //if (SIM_CAL_OPTION != "SKIP_CAL")
1680  // Proceed to Phaser_In phase lock
1681  init_next_state = INIT_RDLVL_ACT;
1682  // else
1683  // Skip read leveling
1684  //init_next_state = INIT_DONE;
1685  else
1686  init_next_state = INIT_WAIT_CKE_EXIT;
1687  end
1688 
1689  // Wait minimum of Reset CKE exit time (tXPR = max(tXS,
1690  INIT_WAIT_CKE_EXIT:
1691  if ((cnt_txpr_done_r) && (DRAM_TYPE == "DDR3")
1692  && ~(phy_ctl_full || phy_cmd_full)) begin
1693  if((REG_CTRL == "ON") && ((nCS_PER_RANK > 1) ||
1694  (RANKS > 1)))
1695  //register write for reg dimm. Some register chips
1696  // have the register chip in a pre-programmed state
1697  // in that case the nCS_PER_RANK == 1 && RANKS == 1
1698  init_next_state = INIT_REG_WRITE;
1699  else
1700  // Load mode register - this state is repeated multiple times
1701  init_next_state = INIT_LOAD_MR;
1702  end else if ((cnt_init_pre_wait_done_r) && (DRAM_TYPE == "DDR2")
1703  && ~(phy_ctl_full || phy_cmd_full))
1704  // DDR2 start with a precharge all command
1705  init_next_state = INIT_DDR2_PRECHARGE;
1706 
1707  INIT_REG_WRITE:
1708  init_next_state = INIT_REG_WRITE_WAIT;
1709 
1710  INIT_REG_WRITE_WAIT:
1711  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full)) begin
1712  if(reg_ctrl_cnt_r == 3'd5)
1713  init_next_state = INIT_LOAD_MR;
1714  else
1715  init_next_state = INIT_REG_WRITE;
1716  end
1717 
1718  INIT_LOAD_MR:
1719  init_next_state = INIT_LOAD_MR_WAIT;
1720  // After loading MR, wait at least tMRD
1721 
1722  INIT_LOAD_MR_WAIT:
1723  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full)) begin
1724  // If finished loading all mode registers, proceed to next step
1725  if (prbs_rdlvl_done && pi_dqs_found_done && rdlvl_stg1_done)
1726  // for ddr3 when the correct burst length is writtern at end
1727  init_next_state = INIT_PRECHARGE;
1728  else if (~wrcal_done && temp_lmr_done)
1729  init_next_state = INIT_PRECHARGE_PREWAIT;
1730  else if (cnt_init_mr_done_r)begin
1731  if(DRAM_TYPE == "DDR3")
1732  init_next_state = INIT_ZQCL;
1733  else begin //DDR2
1734  if(ddr2_refresh_flag_r)begin
1735  // memory initialization per rank for multi-rank case
1736  if (!mem_init_done_r && (chip_cnt_r <= RANKS-1))
1737  init_next_state = INIT_DDR2_MULTI_RANK;
1738  else
1739  init_next_state = INIT_RDLVL_ACT;
1740  // ddr2 initialization done.load mode state after refresh
1741  end else
1742  init_next_state = INIT_DDR2_PRECHARGE;
1743  end
1744  end else
1745  init_next_state = INIT_LOAD_MR;
1746  end
1747 
1748  // DDR2 multi rank transition state
1749  INIT_DDR2_MULTI_RANK:
1750  init_next_state = INIT_DDR2_MULTI_RANK_WAIT;
1751 
1752  INIT_DDR2_MULTI_RANK_WAIT:
1753  init_next_state = INIT_DDR2_PRECHARGE;
1754 
1755  // Initial ZQ calibration
1756  INIT_ZQCL:
1757  init_next_state = INIT_WAIT_DLLK_ZQINIT;
1758 
1759  // Wait until both DLL have locked, and ZQ calibration done
1760  INIT_WAIT_DLLK_ZQINIT:
1761  if (cnt_dllk_zqinit_done_r && ~(phy_ctl_full || phy_cmd_full))
1762  // memory initialization per rank for multi-rank case
1763  if (!mem_init_done_r && (chip_cnt_r <= RANKS-1))
1764  init_next_state = INIT_LOAD_MR;
1765  //else if (WRLVL == "ON")
1766  // init_next_state = INIT_WRLVL_START;
1767  else
1768  // skip write-leveling (e.g. for DDR2 interface)
1769  init_next_state = INIT_RDLVL_ACT;
1770 
1771  // Initial precharge for DDR2
1772  INIT_DDR2_PRECHARGE:
1773  init_next_state = INIT_DDR2_PRECHARGE_WAIT;
1774 
1775  INIT_DDR2_PRECHARGE_WAIT:
1776  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full)) begin
1777  if (ddr2_pre_flag_r)
1778  init_next_state = INIT_REFRESH;
1779  else // from precharge state initially go to load mode
1780  init_next_state = INIT_LOAD_MR;
1781  end
1782 
1783  INIT_REFRESH:
1784  if ((RANKS == 2) && (chip_cnt_r == RANKS - 1))
1785  init_next_state = INIT_REFRESH_RNK2_WAIT;
1786  else
1787  init_next_state = INIT_REFRESH_WAIT;
1788 
1789  INIT_REFRESH_RNK2_WAIT:
1790  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))
1791  init_next_state = INIT_PRECHARGE;
1792 
1793  INIT_REFRESH_WAIT:
1794  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))begin
1795  if(cnt_init_af_done_r && (~mem_init_done_r))
1796  // go to lm state as part of DDR2 init sequence
1797  init_next_state = INIT_LOAD_MR;
1798  else if (pi_dqs_found_done && ~wrlvl_done_r1 && ~wrlvl_final && ~wrlvl_byte_redo && (WRLVL == "ON"))
1799  init_next_state = INIT_WRLVL_START;
1800  else if (~pi_dqs_found_done ||
1801  (rdlvl_stg1_done && ~prbs_rdlvl_done) ||
1802  ((CLK_PERIOD/nCK_PER_CLK <= 2500) && wrcal_done && ~rdlvl_stg1_done) ||
1803  ((CLK_PERIOD/nCK_PER_CLK > 2500) && wrlvl_done_r1 && ~rdlvl_stg1_done)) begin
1804  if (num_refresh == 'd8)
1805  init_next_state = INIT_RDLVL_ACT;
1806  else
1807  init_next_state = INIT_REFRESH;
1808  end else if ((~wrcal_done && wrlvl_byte_redo)&& (DRAM_TYPE == "DDR3")
1809  && (CLK_PERIOD/nCK_PER_CLK > 2500))
1810  init_next_state = INIT_WRLVL_LOAD_MR2;
1811  else if (((prbs_rdlvl_done && rdlvl_stg1_done && pi_dqs_found_done) && (WRLVL == "ON"))
1812  && mem_init_done_r && (CLK_PERIOD/nCK_PER_CLK > 2500))
1813  init_next_state = INIT_WRCAL_ACT;
1814  else if (pi_dqs_found_done && (DRAM_TYPE == "DDR3") && ~(mpr_last_byte_done || mpr_rdlvl_done)) begin
1815  if (num_refresh == 'd8)
1816  init_next_state = INIT_MPR_RDEN;
1817  else
1818  init_next_state = INIT_REFRESH;
1819  end else if (((~oclkdelay_calib_done && wrlvl_final) ||
1820  (~wrcal_done && wrlvl_byte_redo)) && (DRAM_TYPE == "DDR3"))
1821  init_next_state = INIT_WRLVL_LOAD_MR2;
1822  else if (~oclkdelay_calib_done && (mpr_last_byte_done || mpr_rdlvl_done) && (DRAM_TYPE == "DDR3")) begin
1823  if (num_refresh == 'd8)
1824  init_next_state = INIT_OCLKDELAY_ACT;
1825  else
1826  init_next_state = INIT_REFRESH;
1827  end else if ((~wrcal_done && (WRLVL == "ON") && (CLK_PERIOD/nCK_PER_CLK <= 2500))
1828  && pi_dqs_found_done)
1829  init_next_state = INIT_WRCAL_ACT;
1830  else if (mem_init_done_r) begin
1831  if (RANKS < 2)
1832  init_next_state = INIT_RDLVL_ACT;
1833  else if (stg1_wr_done && ~rnk_ref_cnt && ~rdlvl_stg1_done)
1834  init_next_state = INIT_PRECHARGE;
1835  else
1836  init_next_state = INIT_RDLVL_ACT;
1837  end else // to DDR2 init state as part of DDR2 init sequence
1838  init_next_state = INIT_REFRESH;
1839  end
1840 
1841  //******************************************************
1842  // Write Leveling
1843  //*******************************************************
1844 
1845  // Enable write leveling in MR1 and start write leveling
1846  // for current rank
1847  INIT_WRLVL_START:
1848  init_next_state = INIT_WRLVL_WAIT;
1849 
1850  // Wait for both MR load and write leveling to complete
1851  // (write leveling should take much longer than MR load..)
1852  INIT_WRLVL_WAIT:
1853  if (wrlvl_rank_done_r7 && ~(phy_ctl_full || phy_cmd_full))
1854  init_next_state = INIT_WRLVL_LOAD_MR;
1855 
1856  // Disable write leveling in MR1 for current rank
1857  INIT_WRLVL_LOAD_MR:
1858  init_next_state = INIT_WRLVL_LOAD_MR_WAIT;
1859 
1860  INIT_WRLVL_LOAD_MR_WAIT:
1861  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))
1862  init_next_state = INIT_WRLVL_LOAD_MR2;
1863 
1864  // Load MR2 to set ODT: Dynamic ODT for single rank case
1865  // And ODTs for multi-rank case as well
1866  INIT_WRLVL_LOAD_MR2:
1867  init_next_state = INIT_WRLVL_LOAD_MR2_WAIT;
1868 
1869  // Wait tMRD before proceeding
1870  INIT_WRLVL_LOAD_MR2_WAIT:
1871  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full)) begin
1872  //if (wrlvl_byte_done)
1873  // init_next_state = INIT_PRECHARGE_PREWAIT;
1874  // else if ((RANKS == 2) && wrlvl_rank_done_r2)
1875  // init_next_state = INIT_WRLVL_LOAD_MR2_WAIT;
1876  if (~wrlvl_done_r1)
1877  init_next_state = INIT_WRLVL_START;
1878  else if (SIM_CAL_OPTION == "SKIP_CAL")
1879  // If skip rdlvl, then we're done
1880  init_next_state = INIT_DONE;
1881  else
1882  // Otherwise, proceed to read leveling
1883  //init_next_state = INIT_RDLVL_ACT;
1884  init_next_state = INIT_PRECHARGE_PREWAIT;
1885  end
1886 
1887  //*******************************************************
1888  // Read Leveling
1889  //*******************************************************
1890 
1891  // single row activate. All subsequent read leveling writes and
1892  // read will take place in this row
1893  INIT_RDLVL_ACT:
1894  init_next_state = INIT_RDLVL_ACT_WAIT;
1895 
1896  // hang out for awhile before issuing subsequent column commands
1897  // it's also possible to reach this state at various points
1898  // during read leveling - determine what the current stage is
1899  INIT_RDLVL_ACT_WAIT:
1900  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full)) begin
1901  // Just finished an activate. Now either write, read, or precharge
1902  // depending on where we are in the training sequence
1903  if (!pi_calib_done_r1)
1904  init_next_state = INIT_PI_PHASELOCK_READS;
1905  else if (!pi_dqs_found_done)
1906  // (!pi_dqs_found_start || pi_dqs_found_rank_done))
1907  init_next_state = INIT_RDLVL_STG2_READ;
1908  else if (~wrcal_done && (WRLVL == "ON") && (CLK_PERIOD/nCK_PER_CLK <= 2500))
1909  init_next_state = INIT_WRCAL_ACT_WAIT;
1910  else if ((!rdlvl_stg1_done && ~stg1_wr_done && ~rdlvl_last_byte_done) ||
1911  (!prbs_rdlvl_done && ~stg1_wr_done && ~prbs_last_byte_done)) begin
1912  // Added to avoid rdlvl_stg1 write data pattern at the start of PRBS rdlvl
1913  if (!prbs_rdlvl_done && ~stg1_wr_done && rdlvl_last_byte_done)
1914  init_next_state = INIT_RDLVL_ACT_WAIT;
1915  else
1916  init_next_state = INIT_RDLVL_STG1_WRITE;
1917  end else if ((!rdlvl_stg1_done && rdlvl_stg1_start_int) || !prbs_rdlvl_done) begin
1918  if (rdlvl_last_byte_done || prbs_last_byte_done)
1919  // Added to avoid extra reads at the end of read leveling
1920  init_next_state = INIT_RDLVL_ACT_WAIT;
1921  else
1922  // Case 2: If in stage 1, and just precharged after training
1923  // previous byte, then continue reading
1924  init_next_state = INIT_RDLVL_STG1_READ;
1925  end else if ((prbs_rdlvl_done && rdlvl_stg1_done && (RANKS == 1)) && (WRLVL == "ON") &&
1926  (CLK_PERIOD/nCK_PER_CLK > 2500))
1927  init_next_state = INIT_WRCAL_ACT_WAIT;
1928  else
1929  // Otherwise, if we're finished with calibration, then precharge
1930  // the row - silly, because we just opened it - possible to take
1931  // this out by adding logic to avoid the ACT in first place. Make
1932  // sure that cnt_cmd_done will handle tRAS(min)
1933  init_next_state = INIT_PRECHARGE_PREWAIT;
1934  end
1935 
1936  //**************************************************
1937  // Back-to-back reads for Phaser_IN Phase locking
1938  // DQS to FREQ_REF clock
1939  //**************************************************
1940 
1941  INIT_PI_PHASELOCK_READS:
1942  if (pi_phase_locked_all_r3 && ~pi_phase_locked_all_r4)
1943  init_next_state = INIT_PRECHARGE_PREWAIT;
1944 
1945  //*********************************************
1946  // Stage 1 read-leveling (write and continuous read)
1947  //*********************************************
1948 
1949  // Write training pattern for stage 1
1950  // PRBS pattern of TBD length
1951  INIT_RDLVL_STG1_WRITE:
1952  // 4:1 DDR3 BL8 will require all 8 words in 1 DIV4 clock cycle
1953  // 2:1 DDR2/DDR3 BL8 will require 2 DIV2 clock cycles for 8 words
1954  // 2:1 DDR2 BL4 will require 1 DIV2 clock cycle for 4 words
1955  // An entire row worth of writes issued before proceeding to reads
1956  // The number of write is (2^column width)/burst length to accomodate
1957  // PRBS pattern for window detection.
1958  if (stg1_wr_rd_cnt == 9'd1)
1959  init_next_state = INIT_RDLVL_STG1_WRITE_READ;
1960 
1961  // Write-read turnaround
1962  INIT_RDLVL_STG1_WRITE_READ:
1963  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))
1964  init_next_state = INIT_RDLVL_STG1_READ;
1965 
1966  // Continuous read, where interruptible by precharge request from
1967  // calibration logic. Also precharges when stage 1 is complete
1968  // No precharges when reads provided to Phaser_IN for phase locking
1969  // FREQ_REF to read DQS since data integrity is not important.
1970  INIT_RDLVL_STG1_READ:
1971  if (rdlvl_stg1_rank_done || (rdlvl_stg1_done && ~rdlvl_stg1_done_r1) ||
1972  prech_req_posedge_r || (prbs_rdlvl_done && ~prbs_rdlvl_done_r1))
1973  init_next_state = INIT_PRECHARGE_PREWAIT;
1974 
1975  //*********************************************
1976  // DQSFOUND calibration (set of 4 reads with gaps)
1977  //*********************************************
1978 
1979  // Read of training data. Note that Stage 2 is not a constant read,
1980  // instead there is a large gap between each set of back-to-back reads
1981  INIT_RDLVL_STG2_READ:
1982  // 4 read commands issued back-to-back
1983  if (num_reads == 'b1)
1984  init_next_state = INIT_RDLVL_STG2_READ_WAIT;
1985 
1986  // Wait before issuing the next set of reads. If a precharge request
1987  // comes in then handle - this can occur after stage 2 calibration is
1988  // completed for a DQS group
1989  INIT_RDLVL_STG2_READ_WAIT:
1990  if (~(phy_ctl_full || phy_cmd_full)) begin
1991  if (pi_dqs_found_rank_done ||
1992  pi_dqs_found_done || prech_req_posedge_r)
1993  init_next_state = INIT_PRECHARGE_PREWAIT;
1994  else if (cnt_cmd_done_r)
1995  init_next_state = INIT_RDLVL_STG2_READ;
1996  end
1997 
1998 
1999  //******************************************************************
2000  // MPR Read Leveling for DDR3 OCLK_DELAYED calibration
2001  //******************************************************************
2002 
2003  // Issue Load Mode Register 3 command with A[2]=1, A[1:0]=2'b00
2004  // to enable Multi Purpose Register (MPR) Read
2005  INIT_MPR_RDEN:
2006  init_next_state = INIT_MPR_WAIT;
2007 
2008  //Wait tMRD, tMOD
2009  INIT_MPR_WAIT:
2010  if (cnt_cmd_done_r) begin
2011  init_next_state = INIT_MPR_READ;
2012  end
2013 
2014  // Issue back-to-back read commands to read from MPR with
2015  // Address bus 0x0000 for BL=8. DQ[0] will output the pre-defined
2016  // MPR pattern of 01010101 (Rise0 = 1'b0, Fall0 = 1'b1 ...)
2017  INIT_MPR_READ:
2018  if (mpr_rdlvl_done || mpr_rnk_done || rdlvl_prech_req)
2019  init_next_state = INIT_MPR_DISABLE_PREWAIT;
2020 
2021  INIT_MPR_DISABLE_PREWAIT:
2022  if (cnt_cmd_done_r)
2023  init_next_state = INIT_MPR_DISABLE;
2024 
2025  // Issue Load Mode Register 3 command with A[2]=0 to disable
2026  // MPR read
2027  INIT_MPR_DISABLE:
2028  init_next_state = INIT_MPR_DISABLE_WAIT;
2029 
2030  INIT_MPR_DISABLE_WAIT:
2031  init_next_state = INIT_PRECHARGE_PREWAIT;
2032 
2033 
2034  //***********************************************************************
2035  // OCLKDELAY Calibration
2036  //***********************************************************************
2037 
2038  // This calibration requires single write followed by single read to
2039  // determine the Phaser_Out stage 3 delay required to center write DQS
2040  // in write DQ valid window.
2041 
2042  // Single Row Activate command before issuing Write command
2043  INIT_OCLKDELAY_ACT:
2044  init_next_state = INIT_OCLKDELAY_ACT_WAIT;
2045 
2046  INIT_OCLKDELAY_ACT_WAIT:
2047  if (cnt_cmd_done_r && ~oclk_prech_req)
2048  init_next_state = INIT_OCLKDELAY_WRITE;
2049  else if (oclkdelay_calib_done || prech_req_posedge_r)
2050  init_next_state = INIT_PRECHARGE_PREWAIT;
2051 
2052  INIT_OCLKDELAY_WRITE:
2053  if (oclk_wr_cnt == 4'd1)
2054  init_next_state = INIT_OCLKDELAY_WRITE_WAIT;
2055 
2056  INIT_OCLKDELAY_WRITE_WAIT:
2057  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))
2058  init_next_state = INIT_OCLKDELAY_READ;
2059 
2060  INIT_OCLKDELAY_READ:
2061  init_next_state = INIT_OCLKDELAY_READ_WAIT;
2062 
2063  INIT_OCLKDELAY_READ_WAIT:
2064  if (~(phy_ctl_full || phy_cmd_full)) begin
2065  if (oclk_calib_resume)
2066  init_next_state = INIT_OCLKDELAY_WRITE;
2067  else if (oclkdelay_calib_done || prech_req_posedge_r ||
2068  wrlvl_final)
2069  init_next_state = INIT_PRECHARGE_PREWAIT;
2070  end
2071 
2072 
2073  //*********************************************
2074  // Write calibration
2075  //*********************************************
2076 
2077  // single row activate
2078  INIT_WRCAL_ACT:
2079  init_next_state = INIT_WRCAL_ACT_WAIT;
2080 
2081  // hang out for awhile before issuing subsequent column command
2082  INIT_WRCAL_ACT_WAIT:
2083  if (cnt_cmd_done_r && ~wrcal_prech_req)
2084  init_next_state = INIT_WRCAL_WRITE;
2085  else if (wrcal_done || prech_req_posedge_r)
2086  init_next_state = INIT_PRECHARGE_PREWAIT;
2087 
2088  // Write training pattern for write calibration
2089  INIT_WRCAL_WRITE:
2090  // Once we've issued enough commands for 8 words - proceed to reads
2091  //if (burst_addr_r == 1'b1)
2092  if (wrcal_wr_cnt == 4'd1)
2093  init_next_state = INIT_WRCAL_WRITE_READ;
2094 
2095  // Write-read turnaround
2096  INIT_WRCAL_WRITE_READ:
2097  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))
2098  init_next_state = INIT_WRCAL_READ;
2099  else if (dqsfound_retry)
2100  init_next_state = INIT_RDLVL_STG2_READ_WAIT;
2101 
2102 
2103  INIT_WRCAL_READ:
2104  if (burst_addr_r == 1'b1)
2105  init_next_state = INIT_WRCAL_READ_WAIT;
2106 
2107  INIT_WRCAL_READ_WAIT:
2108  if (~(phy_ctl_full || phy_cmd_full)) begin
2109  if (wrcal_resume_r) begin
2110  if (wrcal_final_chk)
2111  init_next_state = INIT_WRCAL_READ;
2112  else
2113  init_next_state = INIT_WRCAL_WRITE;
2114  end else if (wrcal_done || prech_req_posedge_r || wrcal_act_req ||
2115  // Added to support PO fine delay inc when TG errors
2116  wrlvl_byte_redo || (temp_wrcal_done && ~temp_lmr_done))
2117  init_next_state = INIT_PRECHARGE_PREWAIT;
2118  else if (dqsfound_retry)
2119  init_next_state = INIT_RDLVL_STG2_READ_WAIT;
2120  else if (wrcal_read_req && cnt_wrcal_rd)
2121  init_next_state = INIT_WRCAL_MULT_READS;
2122  end
2123 
2124  INIT_WRCAL_MULT_READS:
2125  // multiple read commands issued back-to-back
2126  if (wrcal_reads == 'b1)
2127  init_next_state = INIT_WRCAL_READ_WAIT;
2128 
2129  //*********************************************
2130  // Handling of precharge during and in between read-level stages
2131  //*********************************************
2132 
2133  // Make sure we aren't violating any timing specs by precharging
2134  // immediately
2135  INIT_PRECHARGE_PREWAIT:
2136  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full))
2137  init_next_state = INIT_PRECHARGE;
2138 
2139  // Initiate precharge
2140  INIT_PRECHARGE:
2141  init_next_state = INIT_PRECHARGE_WAIT;
2142 
2143  INIT_PRECHARGE_WAIT:
2144  if (cnt_cmd_done_r && ~(phy_ctl_full || phy_cmd_full)) begin
2145  if ((wrcal_sanity_chk_done && (DRAM_TYPE == "DDR3")) ||
2146  (rdlvl_stg1_done && prbs_rdlvl_done && pi_dqs_found_done &&
2147  (DRAM_TYPE == "DDR2")))
2148  init_next_state = INIT_DONE;
2149  else if ((wrcal_done || (WRLVL == "OFF")) && rdlvl_stg1_done && prbs_rdlvl_done &&
2150  pi_dqs_found_done && ((ddr3_lm_done_r) || (DRAM_TYPE == "DDR2")))
2151  // If read leveling and phase detection calibration complete,
2152  // and programing the correct burst length then we're finished
2153  init_next_state = INIT_WRCAL_ACT;
2154  else if ((wrcal_done || (WRLVL == "OFF") || (~wrcal_done && temp_wrcal_done && ~temp_lmr_done))
2155  && (rdlvl_stg1_done || (~wrcal_done && temp_wrcal_done && ~temp_lmr_done))
2156  && prbs_rdlvl_done && rdlvl_stg1_done && pi_dqs_found_done) begin
2157  // after all calibration program the correct burst length
2158  init_next_state = INIT_LOAD_MR;
2159  // Added to support PO fine delay inc when TG errors
2160  end else if (~wrcal_done && temp_wrcal_done && temp_lmr_done)
2161  init_next_state = INIT_WRCAL_READ_WAIT;
2162  else if (rdlvl_stg1_done && pi_dqs_found_done && (WRLVL == "ON"))
2163  // If read leveling finished, proceed to write calibration
2164  init_next_state = INIT_REFRESH;
2165  else
2166  // Otherwise, open row for read-leveling purposes
2167  init_next_state = INIT_REFRESH;
2168  end
2169 
2170  //*******************************************************
2171  // Initialization/Calibration done. Take a long rest, relax
2172  //*******************************************************
2173 
2174  INIT_DONE:
2175  init_next_state = INIT_DONE;
2176 
2177  endcase
2178  end
2179 
2180  //*****************************************************************
2181  // Initialization done signal - asserted before leveling starts
2182  //*****************************************************************
2183 
2184 
2185  always @(posedge clk)
2186  if (rst)
2187  mem_init_done_r <= #TCQ 1'b0;
2188  else if ((!cnt_dllk_zqinit_done_r &&
2189  (cnt_dllk_zqinit_r == TDLLK_TZQINIT_DELAY_CNT) &&
2190  (chip_cnt_r == RANKS-1) && (DRAM_TYPE == "DDR3"))
2191  || ( (init_state_r == INIT_LOAD_MR_WAIT) &&
2192  (ddr2_refresh_flag_r) && (chip_cnt_r == RANKS-1)
2193  && (cnt_init_mr_done_r) && (DRAM_TYPE == "DDR2")))
2194  mem_init_done_r <= #TCQ 1'b1;
2195 
2196  //*****************************************************************
2197  // Write Calibration signal to PHY Control Block - asserted before
2198  // Write Leveling starts
2199  //*****************************************************************
2200 
2201  //generate
2202  //if (RANKS < 2) begin: ranks_one
2203  always @(posedge clk) begin
2204  if (rst || (done_dqs_tap_inc &&
2205  (init_state_r == INIT_WRLVL_LOAD_MR2)))
2206  write_calib <= #TCQ 1'b0;
2207  else if (wrlvl_active_r1)
2208  write_calib <= #TCQ 1'b1;
2209  end
2210  //end else begin: ranks_two
2211  // always @(posedge clk) begin
2212  // if (rst ||
2213  // ((init_state_r1 == INIT_WRLVL_LOAD_MR_WAIT) &&
2214  // ((wrlvl_rank_done_r2 && (chip_cnt_r == RANKS-1)) ||
2215  // (SIM_CAL_OPTION == "FAST_CAL"))))
2216  // write_calib <= #TCQ 1'b0;
2217  // else if (wrlvl_active_r1)
2218  // write_calib <= #TCQ 1'b1;
2219  // end
2220  //end
2221  //endgenerate
2222 
2223  //*****************************************************************
2224  // Read Calibration signal to PHY Control Block - asserted after
2225  // Write Leveling during PHASER_IN phase locking stage.
2226  // Must be de-asserted before Read Leveling
2227  //*****************************************************************
2228 
2229  always @(posedge clk) begin
2230  if (rst || pi_calib_done_r1)
2231  read_calib_int <= #TCQ 1'b0;
2232  else if (~pi_calib_done_r1 && (init_state_r == INIT_RDLVL_ACT_WAIT) &&
2233  (cnt_cmd_r == CNTNEXT_CMD))
2234  read_calib_int <= #TCQ 1'b1;
2235  end
2236 
2237  always @(posedge clk)
2238  read_calib_r <= #TCQ read_calib_int;
2239 
2240 
2241  always @(posedge clk) begin
2242  if (rst || pi_calib_done_r1)
2243  read_calib <= #TCQ 1'b0;
2244  else if (~pi_calib_done_r1 && (init_state_r == INIT_PI_PHASELOCK_READS))
2245  read_calib <= #TCQ 1'b1;
2246  end
2247 
2248 
2249  always @(posedge clk)
2250  if (rst)
2251  pi_calib_done_r <= #TCQ 1'b0;
2252  else if (pi_calib_rank_done_r)// && (chip_cnt_r == RANKS-1))
2253  pi_calib_done_r <= #TCQ 1'b1;
2254 
2255  always @(posedge clk)
2256  if (rst)
2257  pi_calib_rank_done_r <= #TCQ 1'b0;
2258  else if (pi_phase_locked_all_r3 && ~pi_phase_locked_all_r4)
2259  pi_calib_rank_done_r <= #TCQ 1'b1;
2260  else
2261  pi_calib_rank_done_r <= #TCQ 1'b0;
2262 
2263  always @(posedge clk) begin
2264  if (rst || ((PRE_REV3ES == "ON") && temp_wrcal_done && ~temp_wrcal_done_r))
2265  pi_phaselock_timer <= #TCQ 'd0;
2266  else if (((init_state_r == INIT_PI_PHASELOCK_READS) &&
2267  (pi_phaselock_timer != PHASELOCKED_TIMEOUT)) ||
2268  tg_timer_go)
2269  pi_phaselock_timer <= #TCQ pi_phaselock_timer + 1;
2270  else
2271  pi_phaselock_timer <= #TCQ pi_phaselock_timer;
2272  end
2273 
2274  assign pi_phase_locked_err = (pi_phaselock_timer == PHASELOCKED_TIMEOUT) ? 1'b1 : 1'b0;
2275 
2276  //*****************************************************************
2277  // DDR3 final burst length programming done. For DDR3 during
2278  // calibration the burst length is fixed to BL8. After calibration
2279  // the correct burst length is programmed.
2280  //*****************************************************************
2281  always @(posedge clk)
2282  if (rst)
2283  ddr3_lm_done_r <= #TCQ 1'b0;
2284  else if ((init_state_r == INIT_LOAD_MR_WAIT) &&
2285  (chip_cnt_r == RANKS-1) && wrcal_done)
2286  ddr3_lm_done_r <= #TCQ 1'b1;
2287 
2288  always @(posedge clk) begin
2289  pi_dqs_found_rank_done_r <= #TCQ pi_dqs_found_rank_done;
2290  pi_phase_locked_all_r1 <= #TCQ pi_phase_locked_all;
2291  pi_phase_locked_all_r2 <= #TCQ pi_phase_locked_all_r1;
2292  pi_phase_locked_all_r3 <= #TCQ pi_phase_locked_all_r2;
2293  pi_phase_locked_all_r4 <= #TCQ pi_phase_locked_all_r3;
2294  pi_dqs_found_all_r <= #TCQ pi_dqs_found_done;
2295  pi_calib_done_r1 <= #TCQ pi_calib_done_r;
2296  end
2297 
2298  //***************************************************************************
2299  // Logic for deep memory (multi-rank) configurations
2300  //***************************************************************************
2301 
2302  // For DDR3 asserted when
2303 
2304 generate
2305  if (RANKS < 2) begin: single_rank
2306  always @(posedge clk)
2307  chip_cnt_r <= #TCQ 2'b00;
2308  end else begin: dual_rank
2309  always @(posedge clk)
2310  if (rst ||
2311  // Set chip_cnt_r to 2'b00 after both Ranks are read leveled
2312  (rdlvl_stg1_done && prbs_rdlvl_done && ~wrcal_done) ||
2313  // Set chip_cnt_r to 2'b00 after both Ranks are write leveled
2314  (wrlvl_done_r &&
2315  (init_state_r==INIT_WRLVL_LOAD_MR2_WAIT)))begin
2316  chip_cnt_r <= #TCQ 2'b00;
2317  end else if ((((init_state_r == INIT_WAIT_DLLK_ZQINIT) &&
2318  (cnt_dllk_zqinit_r == TDLLK_TZQINIT_DELAY_CNT)) &&
2319  (DRAM_TYPE == "DDR3")) ||
2320  ((init_state_r==INIT_REFRESH_RNK2_WAIT) &&
2321  (cnt_cmd_r=='d36)) ||
2322  //mpr_rnk_done ||
2323  //(rdlvl_stg1_rank_done && ~rdlvl_last_byte_done) ||
2324  //(stg1_wr_done && (init_state_r == INIT_REFRESH) &&
2325  //~(rnk_ref_cnt && rdlvl_last_byte_done)) ||
2326 
2327  // Increment chip_cnt_r to issue Refresh to second rank
2328  (~pi_dqs_found_all_r &&
2329  (init_state_r==INIT_PRECHARGE_PREWAIT) &&
2330  (cnt_cmd_r=='d36)) ||
2331 
2332  // Increment chip_cnt_r when DQSFOUND done for the Rank
2333  (pi_dqs_found_rank_done && ~pi_dqs_found_rank_done_r) ||
2334  ((init_state_r == INIT_LOAD_MR_WAIT)&& cnt_cmd_done_r
2335  && wrcal_done) ||
2336  ((init_state_r == INIT_DDR2_MULTI_RANK)
2337  && (DRAM_TYPE == "DDR2"))) begin
2338  if ((~mem_init_done_r || ~rdlvl_stg1_done || ~pi_dqs_found_done ||
2339  // condition to increment chip_cnt during
2340  // final burst length programming for DDR3
2341  ~pi_calib_done_r || wrcal_done) //~mpr_rdlvl_done ||
2342  && (chip_cnt_r != RANKS-1))
2343  chip_cnt_r <= #TCQ chip_cnt_r + 1;
2344  else
2345  chip_cnt_r <= #TCQ 2'b00;
2346  end
2347  end
2348  endgenerate
2349 
2350 generate
2351  if ((REG_CTRL == "ON") && (RANKS == 1)) begin: DDR3_RDIMM_1rank
2352  always @(posedge clk) begin
2353  if (rst)
2354  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2355  else if (init_state_r == INIT_REG_WRITE) begin
2356  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2357  if(!(CWL_M%2)) begin
2358  phy_int_cs_n[0%nCK_PER_CLK] <= #TCQ 1'b0;
2359  phy_int_cs_n[1%nCK_PER_CLK] <= #TCQ 1'b0;
2360  end else begin
2361  phy_int_cs_n[2%nCK_PER_CLK] <= #TCQ 1'b0;
2362  phy_int_cs_n[3%nCK_PER_CLK] <= #TCQ 1'b0;
2363  end
2364  end else if ((init_state_r == INIT_LOAD_MR) ||
2365  (init_state_r == INIT_MPR_RDEN) ||
2366  (init_state_r == INIT_MPR_DISABLE) ||
2367  (init_state_r == INIT_WRLVL_START) ||
2368  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2369  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2370  (init_state_r == INIT_ZQCL) ||
2371  (init_state_r == INIT_RDLVL_ACT) ||
2372  (init_state_r == INIT_WRCAL_ACT) ||
2373  (init_state_r == INIT_OCLKDELAY_ACT) ||
2374  (init_state_r == INIT_PRECHARGE) ||
2375  (init_state_r == INIT_DDR2_PRECHARGE) ||
2376  (init_state_r == INIT_REFRESH) ||
2377  (rdlvl_wr_rd && new_burst_r)) begin
2378  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2379  if (!(CWL_M % 2)) //even CWL
2380  phy_int_cs_n[0] <= #TCQ 1'b0;
2381  else // odd CWL
2382  phy_int_cs_n[1*nCS_PER_RANK] <= #TCQ 1'b0;
2383  end else
2384  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2385  end
2386  end else if (RANKS == 1) begin: DDR3_1rank
2387  always @(posedge clk) begin
2388  if (rst)
2389  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2390  else if ((init_state_r == INIT_LOAD_MR) ||
2391  (init_state_r == INIT_MPR_RDEN) ||
2392  (init_state_r == INIT_MPR_DISABLE) ||
2393  (init_state_r == INIT_WRLVL_START) ||
2394  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2395  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2396  (init_state_r == INIT_ZQCL) ||
2397  (init_state_r == INIT_RDLVL_ACT) ||
2398  (init_state_r == INIT_WRCAL_ACT) ||
2399  (init_state_r == INIT_OCLKDELAY_ACT) ||
2400  (init_state_r == INIT_PRECHARGE) ||
2401  (init_state_r == INIT_DDR2_PRECHARGE) ||
2402  (init_state_r == INIT_REFRESH) ||
2403  (rdlvl_wr_rd && new_burst_r)) begin
2404  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2405  if (!(CWL_M % 2)) begin //even CWL
2406  for (n = 0; n < nCS_PER_RANK; n = n + 1) begin
2407  phy_int_cs_n[n] <= #TCQ 1'b0;
2408  end
2409  end else begin //odd CWL
2410  for (p = nCS_PER_RANK; p < 2*nCS_PER_RANK; p = p + 1) begin
2411  phy_int_cs_n[p] <= #TCQ 1'b0;
2412  end
2413  end
2414  end else
2415  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2416  end
2417  end else if ((REG_CTRL == "ON") && (RANKS == 2)) begin: DDR3_2rank
2418  always @(posedge clk) begin
2419  if (rst)
2420  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2421  else if (init_state_r == INIT_REG_WRITE) begin
2422  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2423  if(!(CWL_M%2)) begin
2424  phy_int_cs_n[0%nCK_PER_CLK] <= #TCQ 1'b0;
2425  phy_int_cs_n[1%nCK_PER_CLK] <= #TCQ 1'b0;
2426  end else begin
2427  phy_int_cs_n[2%nCK_PER_CLK] <= #TCQ 1'b0;
2428  phy_int_cs_n[3%nCK_PER_CLK] <= #TCQ 1'b0;
2429  end
2430  end else begin
2431  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2432  case (chip_cnt_r)
2433  2'b00:begin
2434  if ((init_state_r == INIT_LOAD_MR) ||
2435  (init_state_r == INIT_MPR_RDEN) ||
2436  (init_state_r == INIT_MPR_DISABLE) ||
2437  (init_state_r == INIT_WRLVL_START) ||
2438  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2439  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2440  (init_state_r == INIT_ZQCL) ||
2441  (init_state_r == INIT_RDLVL_ACT) ||
2442  (init_state_r == INIT_WRCAL_ACT) ||
2443  (init_state_r == INIT_OCLKDELAY_ACT) ||
2444  (init_state_r == INIT_PRECHARGE) ||
2445  (init_state_r == INIT_DDR2_PRECHARGE) ||
2446  (init_state_r == INIT_REFRESH) ||
2447  (rdlvl_wr_rd && new_burst_r)) begin
2448  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2449  if (!(CWL_M % 2)) //even CWL
2450  phy_int_cs_n[0] <= #TCQ 1'b0;
2451  else // odd CWL
2452  phy_int_cs_n[1*CS_WIDTH*nCS_PER_RANK] <= #TCQ 1'b0;
2453  end else
2454  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2455  //for (n = 0; n < nCS_PER_RANK*nCK_PER_CLK*2; n = n + (nCS_PER_RANK*2)) begin
2456  //
2457  // phy_int_cs_n[n+:nCS_PER_RANK] <= #TCQ {nCS_PER_RANK{1'b0}};
2458  //end
2459  end
2460  2'b01:begin
2461  if ((init_state_r == INIT_LOAD_MR) ||
2462  (init_state_r == INIT_MPR_RDEN) ||
2463  (init_state_r == INIT_MPR_DISABLE) ||
2464  (init_state_r == INIT_WRLVL_START) ||
2465  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2466  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2467  (init_state_r == INIT_ZQCL) ||
2468  (init_state_r == INIT_RDLVL_ACT) ||
2469  (init_state_r == INIT_WRCAL_ACT) ||
2470  (init_state_r == INIT_OCLKDELAY_ACT) ||
2471  (init_state_r == INIT_PRECHARGE) ||
2472  (init_state_r == INIT_DDR2_PRECHARGE) ||
2473  (init_state_r == INIT_REFRESH) ||
2474  (rdlvl_wr_rd && new_burst_r)) begin
2475  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2476  if (!(CWL_M % 2)) //even CWL
2477  phy_int_cs_n[1] <= #TCQ 1'b0;
2478  else // odd CWL
2479  phy_int_cs_n[1+1*CS_WIDTH*nCS_PER_RANK] <= #TCQ 1'b0;
2480  end else
2481  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2482  //for (p = nCS_PER_RANK; p < nCS_PER_RANK*nCK_PER_CLK*2; p = p + (nCS_PER_RANK*2)) begin
2483  //
2484  // phy_int_cs_n[p+:nCS_PER_RANK] <= #TCQ {nCS_PER_RANK{1'b0}};
2485  //end
2486  end
2487  endcase
2488  end
2489  end
2490  end else if (RANKS == 2) begin: DDR3_2rank
2491  always @(posedge clk) begin
2492  if (rst)
2493  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2494  else if (init_state_r == INIT_REG_WRITE) begin
2495  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2496  if(!(CWL_M%2)) begin
2497  phy_int_cs_n[0%nCK_PER_CLK] <= #TCQ 1'b0;
2498  phy_int_cs_n[1%nCK_PER_CLK] <= #TCQ 1'b0;
2499  end else begin
2500  phy_int_cs_n[2%nCK_PER_CLK] <= #TCQ 1'b0;
2501  phy_int_cs_n[3%nCK_PER_CLK] <= #TCQ 1'b0;
2502  end
2503  end else begin
2504  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2505  case (chip_cnt_r)
2506  2'b00:begin
2507  if ((init_state_r == INIT_LOAD_MR) ||
2508  (init_state_r == INIT_MPR_RDEN) ||
2509  (init_state_r == INIT_MPR_DISABLE) ||
2510  (init_state_r == INIT_WRLVL_START) ||
2511  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2512  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2513  (init_state_r == INIT_ZQCL) ||
2514  (init_state_r == INIT_RDLVL_ACT) ||
2515  (init_state_r == INIT_WRCAL_ACT) ||
2516  (init_state_r == INIT_OCLKDELAY_ACT) ||
2517  (init_state_r == INIT_PRECHARGE) ||
2518  (init_state_r == INIT_DDR2_PRECHARGE) ||
2519  (init_state_r == INIT_REFRESH) ||
2520  (rdlvl_wr_rd && new_burst_r)) begin
2521  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2522  if (!(CWL_M % 2)) begin //even CWL
2523  for (n = 0; n < nCS_PER_RANK; n = n + 1) begin
2524  phy_int_cs_n[n] <= #TCQ 1'b0;
2525  end
2526  end else begin // odd CWL
2527  for (p = CS_WIDTH*nCS_PER_RANK; p < (CS_WIDTH*nCS_PER_RANK + nCS_PER_RANK); p = p + 1) begin
2528  phy_int_cs_n[p] <= #TCQ 1'b0;
2529  end
2530  end
2531  end else
2532  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2533  //for (n = 0; n < nCS_PER_RANK*nCK_PER_CLK*2; n = n + (nCS_PER_RANK*2)) begin
2534  //
2535  // phy_int_cs_n[n+:nCS_PER_RANK] <= #TCQ {nCS_PER_RANK{1'b0}};
2536  //end
2537  end
2538  2'b01:begin
2539  if ((init_state_r == INIT_LOAD_MR) ||
2540  (init_state_r == INIT_MPR_RDEN) ||
2541  (init_state_r == INIT_MPR_DISABLE) ||
2542  (init_state_r == INIT_WRLVL_START) ||
2543  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2544  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2545  (init_state_r == INIT_ZQCL) ||
2546  (init_state_r == INIT_RDLVL_ACT) ||
2547  (init_state_r == INIT_WRCAL_ACT) ||
2548  (init_state_r == INIT_OCLKDELAY_ACT) ||
2549  (init_state_r == INIT_PRECHARGE) ||
2550  (init_state_r == INIT_DDR2_PRECHARGE) ||
2551  (init_state_r == INIT_REFRESH) ||
2552  (rdlvl_wr_rd && new_burst_r)) begin
2553  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2554  if (!(CWL_M % 2)) begin //even CWL
2555  for (q = nCS_PER_RANK; q < (2 * nCS_PER_RANK); q = q + 1) begin
2556  phy_int_cs_n[q] <= #TCQ 1'b0;
2557  end
2558  end else begin // odd CWL
2559  for (m = (nCS_PER_RANK*CS_WIDTH + nCS_PER_RANK); m < (nCS_PER_RANK*CS_WIDTH + 2*nCS_PER_RANK); m = m + 1) begin
2560  phy_int_cs_n[m] <= #TCQ 1'b0;
2561  end
2562  end
2563  end else
2564  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2565  //for (p = nCS_PER_RANK; p < nCS_PER_RANK*nCK_PER_CLK*2; p = p + (nCS_PER_RANK*2)) begin
2566  //
2567  // phy_int_cs_n[p+:nCS_PER_RANK] <= #TCQ {nCS_PER_RANK{1'b0}};
2568  //end
2569  end
2570  endcase
2571  end
2572  end // always @ (posedge clk)
2573  end
2574 
2575  // commented out for now. Need it for DDR2 2T timing
2576  /* end else begin: DDR2
2577  always @(posedge clk)
2578  if (rst) begin
2579  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2580  end else begin
2581  if (init_state_r == INIT_REG_WRITE) begin
2582  // All ranks selected simultaneously
2583  phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b0}};
2584  end else if ((wrlvl_odt) ||
2585  (init_state_r == INIT_LOAD_MR) ||
2586  (init_state_r == INIT_ZQCL) ||
2587  (init_state_r == INIT_WRLVL_START) ||
2588  (init_state_r == INIT_WRLVL_LOAD_MR) ||
2589  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
2590  (init_state_r == INIT_RDLVL_ACT) ||
2591  (init_state_r == INIT_PI_PHASELOCK_READS) ||
2592  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
2593  (init_state_r == INIT_RDLVL_STG1_READ) ||
2594  (init_state_r == INIT_PRECHARGE) ||
2595  (init_state_r == INIT_RDLVL_STG2_READ) ||
2596  (init_state_r == INIT_WRCAL_ACT) ||
2597  (init_state_r == INIT_WRCAL_READ) ||
2598  (init_state_r == INIT_WRCAL_WRITE) ||
2599  (init_state_r == INIT_DDR2_PRECHARGE) ||
2600  (init_state_r == INIT_REFRESH)) begin
2601  phy_int_cs_n[0] <= #TCQ 1'b0;
2602  end
2603  else phy_int_cs_n <= #TCQ {CS_WIDTH*nCS_PER_RANK*nCK_PER_CLK{1'b1}};
2604  end // else: !if(rst)
2605  end // block: DDR2 **/
2606 endgenerate
2607 
2608  assign phy_cs_n = phy_int_cs_n;
2609 
2610  //***************************************************************************
2611  // Write/read burst logic for calibration
2612  //***************************************************************************
2613 
2614  assign rdlvl_wr = (init_state_r == INIT_OCLKDELAY_WRITE) ||
2615  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
2616  (init_state_r == INIT_WRCAL_WRITE);
2617  assign rdlvl_rd = (init_state_r == INIT_PI_PHASELOCK_READS) ||
2618  (init_state_r == INIT_RDLVL_STG1_READ) ||
2619  (init_state_r == INIT_RDLVL_STG2_READ) ||
2620  (init_state_r == INIT_OCLKDELAY_READ) ||
2621  (init_state_r == INIT_WRCAL_READ) ||
2622  (init_state_r == INIT_MPR_READ) ||
2623  (init_state_r == INIT_WRCAL_MULT_READS);
2624  assign rdlvl_wr_rd = rdlvl_wr | rdlvl_rd;
2625 
2626  //***************************************************************************
2627  // Address generation and logic to count # of writes/reads issued during
2628  // certain stages of calibration
2629  //***************************************************************************
2630 
2631  // Column address generation logic:
2632  // Keep track of the current column address - since all bursts are in
2633  // increments of 8 only during calibration, we need to keep track of
2634  // addresses [COL_WIDTH-1:3], lower order address bits will always = 0
2635 
2636  always @(posedge clk)
2637  if (rst || wrcal_done)
2638  burst_addr_r <= #TCQ 1'b0;
2639  else if ((init_state_r == INIT_WRCAL_ACT_WAIT) ||
2640  (init_state_r == INIT_OCLKDELAY_ACT_WAIT) ||
2641  (init_state_r == INIT_OCLKDELAY_WRITE) ||
2642  (init_state_r == INIT_OCLKDELAY_READ) ||
2643  (init_state_r == INIT_WRCAL_WRITE) ||
2644  (init_state_r == INIT_WRCAL_WRITE_READ) ||
2645  (init_state_r == INIT_WRCAL_READ) ||
2646  (init_state_r == INIT_WRCAL_MULT_READS) ||
2647  (init_state_r == INIT_WRCAL_READ_WAIT))
2648  burst_addr_r <= #TCQ 1'b1;
2649  else if (rdlvl_wr_rd && new_burst_r)
2650  burst_addr_r <= #TCQ ~burst_addr_r;
2651  else
2652  burst_addr_r <= #TCQ 1'b0;
2653 
2654  // Read Level Stage 1 requires writes to the entire row since
2655  // a PRBS pattern is being written. This counter keeps track
2656  // of the number of writes which depends on the column width
2657  // The (stg1_wr_rd_cnt==9'd0) condition was added so the col
2658  // address wraps around during stage1 reads
2659  always @(posedge clk)
2660  if (rst || ((init_state_r == INIT_RDLVL_STG1_WRITE_READ) &&
2661  ~rdlvl_stg1_done))
2662  stg1_wr_rd_cnt <= #TCQ NUM_STG1_WR_RD;
2663  else if (rdlvl_last_byte_done || (stg1_wr_rd_cnt == 9'd1) ||
2664  (prbs_rdlvl_prech_req && (init_state_r == INIT_RDLVL_ACT_WAIT)))
2665  stg1_wr_rd_cnt <= #TCQ 'd128;
2666  else if (((init_state_r == INIT_RDLVL_STG1_WRITE) && new_burst_r && ~phy_data_full)
2667  ||((init_state_r == INIT_RDLVL_STG1_READ) && rdlvl_stg1_done))
2668  stg1_wr_rd_cnt <= #TCQ stg1_wr_rd_cnt - 1;
2669 
2670  // OCLKDELAY calibration requires multiple writes because
2671  // write can be up to 2 cycles early since OCLKDELAY tap
2672  // can go down to 0
2673  always @(posedge clk)
2674  if (rst || (init_state_r == INIT_OCLKDELAY_WRITE_WAIT) ||
2675  (oclk_wr_cnt == 4'd0))
2676  oclk_wr_cnt <= #TCQ NUM_STG1_WR_RD;
2677  else if ((init_state_r == INIT_OCLKDELAY_WRITE) &&
2678  new_burst_r && ~phy_data_full)
2679  oclk_wr_cnt <= #TCQ oclk_wr_cnt - 1;
2680 
2681  // Write calibration requires multiple writes because
2682  // write can be up to 2 cycles early due to new write
2683  // leveling algorithm to avoid late writes
2684  always @(posedge clk)
2685  if (rst || (init_state_r == INIT_WRCAL_WRITE_READ) ||
2686  (wrcal_wr_cnt == 4'd0))
2687  wrcal_wr_cnt <= #TCQ NUM_STG1_WR_RD;
2688  else if ((init_state_r == INIT_WRCAL_WRITE) &&
2689  new_burst_r && ~phy_data_full)
2690  wrcal_wr_cnt <= #TCQ wrcal_wr_cnt - 1;
2691 
2692 
2693 generate
2694 if(nCK_PER_CLK == 4) begin:back_to_back_reads_4_1
2695  // 4 back-to-back reads with gaps for
2696  // read data_offset calibration (rdlvl stage 2)
2697  always @(posedge clk)
2698  if (rst || (init_state_r == INIT_RDLVL_STG2_READ_WAIT))
2699  num_reads <= #TCQ 3'b000;
2700  else if ((num_reads > 3'b000) && ~(phy_ctl_full || phy_cmd_full))
2701  num_reads <= #TCQ num_reads - 1;
2702  else if ((init_state_r == INIT_RDLVL_STG2_READ) || phy_ctl_full ||
2703  phy_cmd_full && new_burst_r)
2704  num_reads <= #TCQ 3'b011;
2705 end else if(nCK_PER_CLK == 2) begin: back_to_back_reads_2_1
2706  // 4 back-to-back reads with gaps for
2707  // read data_offset calibration (rdlvl stage 2)
2708  always @(posedge clk)
2709  if (rst || (init_state_r == INIT_RDLVL_STG2_READ_WAIT))
2710  num_reads <= #TCQ 3'b000;
2711  else if ((num_reads > 3'b000) && ~(phy_ctl_full || phy_cmd_full))
2712  num_reads <= #TCQ num_reads - 1;
2713  else if ((init_state_r == INIT_RDLVL_STG2_READ) || phy_ctl_full ||
2714  phy_cmd_full && new_burst_r)
2715  num_reads <= #TCQ 3'b111;
2716 end
2717 endgenerate
2718 
2719  // back-to-back reads during write calibration
2720  always @(posedge clk)
2721  if (rst ||(init_state_r == INIT_WRCAL_READ_WAIT))
2722  wrcal_reads <= #TCQ 2'b00;
2723  else if ((wrcal_reads > 2'b00) && ~(phy_ctl_full || phy_cmd_full))
2724  wrcal_reads <= #TCQ wrcal_reads - 1;
2725  else if ((init_state_r == INIT_WRCAL_MULT_READS) || phy_ctl_full ||
2726  phy_cmd_full && new_burst_r)
2727  wrcal_reads <= #TCQ 'd255;
2728 
2729  // determine how often to issue row command during read leveling writes
2730  // and reads
2731  always @(posedge clk)
2732  if (rdlvl_wr_rd) begin
2733  // 2:1 mode - every other command issued is a data command
2734  // 4:1 mode - every command issued is a data command
2735  if (nCK_PER_CLK == 2) begin
2736  if (!phy_ctl_full)
2737  new_burst_r <= #TCQ ~new_burst_r;
2738  end else
2739  new_burst_r <= #TCQ 1'b1;
2740  end else
2741  new_burst_r <= #TCQ 1'b1;
2742 
2743  // indicate when a write is occurring. PHY_WRDATA_EN must be asserted
2744  // simultaneous with the corresponding command/address for CWL = 5,6
2745  always @(posedge clk) begin
2746  rdlvl_wr_r <= #TCQ rdlvl_wr;
2747  calib_wrdata_en <= #TCQ phy_wrdata_en;
2748  end
2749 
2750  always @(posedge clk) begin
2751  if (rst || wrcal_done)
2752  extend_cal_pat <= #TCQ 1'b0;
2753  else if (temp_lmr_done && (PRE_REV3ES == "ON"))
2754  extend_cal_pat <= #TCQ 1'b1;
2755  end
2756 
2757 
2758  generate
2759  if ((nCK_PER_CLK == 4) || (BURST_MODE == "4")) begin: wrdqen_div4
2760  // Write data enable asserted for one DIV4 clock cycle
2761  // Only BL8 supported with DIV4. DDR2 BL4 will use DIV2.
2762  always @(rst or phy_data_full or init_state_r) begin
2763  if (~phy_data_full && ((init_state_r == INIT_RDLVL_STG1_WRITE) ||
2764  (init_state_r == INIT_OCLKDELAY_WRITE) ||
2765  (init_state_r == INIT_WRCAL_WRITE)))
2766  phy_wrdata_en = 1'b1;
2767  else
2768  phy_wrdata_en = 1'b0;
2769  end
2770  end else begin: wrdqen_div2 // block: wrdqen_div4
2771  always @(rdlvl_wr or phy_ctl_full or new_burst_r or phy_wrdata_en_r1
2772  or phy_data_full)
2773  if((rdlvl_wr & ~phy_ctl_full & new_burst_r & ~phy_data_full)
2774  | phy_wrdata_en_r1)
2775  phy_wrdata_en = 1'b1;
2776  else
2777  phy_wrdata_en = 1'b0;
2778 
2779  always @(posedge clk)
2780  phy_wrdata_en_r1 <= #TCQ rdlvl_wr & ~phy_ctl_full & new_burst_r
2781  & ~phy_data_full;
2782 
2783  always @(posedge clk) begin
2784  if (!phy_wrdata_en & first_rdlvl_pat_r)
2785  wrdata_pat_cnt <= #TCQ 2'b00;
2786  else if (wrdata_pat_cnt == 2'b11)
2787  wrdata_pat_cnt <= #TCQ 2'b10;
2788  else
2789  wrdata_pat_cnt <= #TCQ wrdata_pat_cnt + 1;
2790  end
2791 
2792  always @(posedge clk) begin
2793  if (!phy_wrdata_en & first_wrcal_pat_r)
2794  wrcal_pat_cnt <= #TCQ 2'b00;
2795  else if (extend_cal_pat && (wrcal_pat_cnt == 2'b01))
2796  wrcal_pat_cnt <= #TCQ 2'b00;
2797  else if (wrcal_pat_cnt == 2'b11)
2798  wrcal_pat_cnt <= #TCQ 2'b10;
2799  else
2800  wrcal_pat_cnt <= #TCQ wrcal_pat_cnt + 1;
2801  end
2802 
2803  end
2804  endgenerate
2805 
2806 
2807  // indicate when a write is occurring. PHY_RDDATA_EN must be asserted
2808  // simultaneous with the corresponding command/address. PHY_RDDATA_EN
2809  // is used during read-leveling to determine read latency
2810  assign phy_rddata_en = ~phy_if_empty;
2811 
2812  // Read data valid generation for MC and User Interface after calibration is
2813  // complete
2814  assign phy_rddata_valid = init_complete_r1_timing ? phy_rddata_en : 1'b0;
2815 
2816  //***************************************************************************
2817  // Generate training data written at start of each read-leveling stage
2818  // For every stage of read leveling, 8 words are written into memory
2819  // The format is as follows (shown as {rise,fall}):
2820  // Stage 1: 0xF, 0x0, 0xF, 0x0, 0xF, 0x0, 0xF, 0x0
2821  // Stage 2: 0xF, 0x0, 0xA, 0x5, 0x5, 0xA, 0x9, 0x6
2822  //***************************************************************************
2823 
2824 
2825  always @(posedge clk)
2826  if ((init_state_r == INIT_IDLE) ||
2827  (init_state_r == INIT_RDLVL_STG1_WRITE))
2828  cnt_init_data_r <= #TCQ 2'b00;
2829  else if (phy_wrdata_en)
2830  cnt_init_data_r <= #TCQ cnt_init_data_r + 1;
2831  else if (init_state_r == INIT_WRCAL_WRITE)
2832  cnt_init_data_r <= #TCQ 2'b10;
2833 
2834 
2835  // write different sequence for very
2836  // first write to memory only. Used to help us differentiate
2837  // if the writes are "early" or "on-time" during read leveling
2838  always @(posedge clk)
2839  if (rst || rdlvl_stg1_rank_done)
2840  first_rdlvl_pat_r <= #TCQ 1'b1;
2841  else if (phy_wrdata_en && (init_state_r == INIT_RDLVL_STG1_WRITE))
2842  first_rdlvl_pat_r <= #TCQ 1'b0;
2843 
2844 
2845  always @(posedge clk)
2846  if (rst || wrcal_resume ||
2847  (init_state_r == INIT_WRCAL_ACT_WAIT))
2848  first_wrcal_pat_r <= #TCQ 1'b1;
2849  else if (phy_wrdata_en && (init_state_r == INIT_WRCAL_WRITE))
2850  first_wrcal_pat_r <= #TCQ 1'b0;
2851 
2852 generate
2853  if ((CLK_PERIOD/nCK_PER_CLK > 2500) && (nCK_PER_CLK == 2)) begin: wrdq_div2_2to1_rdlvl_first
2854 
2855  always @(posedge clk)
2856  if (~oclkdelay_calib_done)
2857  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hF}},
2858  {DQ_WIDTH/4{4'h0}},
2859  {DQ_WIDTH/4{4'hF}},
2860  {DQ_WIDTH/4{4'h0}}};
2861  else if (!rdlvl_stg1_done) begin
2862  // The 16 words for stage 1 write data in 2:1 mode is written
2863  // over 4 consecutive controller clock cycles. Note that write
2864  // data follows phy_wrdata_en by one clock cycle
2865  case (wrdata_pat_cnt)
2866  2'b00: begin
2867  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hE}},
2868  {DQ_WIDTH/4{4'h7}},
2869  {DQ_WIDTH/4{4'h3}},
2870  {DQ_WIDTH/4{4'h9}}};
2871  end
2872 
2873  2'b01: begin
2874  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},
2875  {DQ_WIDTH/4{4'h2}},
2876  {DQ_WIDTH/4{4'h9}},
2877  {DQ_WIDTH/4{4'hC}}};
2878  end
2879 
2880  2'b10: begin
2881  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hE}},
2882  {DQ_WIDTH/4{4'h7}},
2883  {DQ_WIDTH/4{4'h1}},
2884  {DQ_WIDTH/4{4'hB}}};
2885  end
2886 
2887  2'b11: begin
2888  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},
2889  {DQ_WIDTH/4{4'h2}},
2890  {DQ_WIDTH/4{4'h9}},
2891  {DQ_WIDTH/4{4'hC}}};
2892  end
2893  endcase
2894  end else if (!prbs_rdlvl_done && ~phy_data_full) begin
2895  // prbs_o is 8-bits wide hence {DQ_WIDTH/8{prbs_o}} results in
2896  // prbs_o being concatenated 8 times resulting in DQ_WIDTH
2897  phy_wrdata <= #TCQ {{DQ_WIDTH/8{prbs_o[4*8-1:3*8]}},
2898  {DQ_WIDTH/8{prbs_o[3*8-1:2*8]}},
2899  {DQ_WIDTH/8{prbs_o[2*8-1:8]}},
2900  {DQ_WIDTH/8{prbs_o[8-1:0]}}};
2901  end else if (!wrcal_done) begin
2902  case (wrcal_pat_cnt)
2903  2'b00: begin
2904  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h5}},
2905  {DQ_WIDTH/4{4'hA}},
2906  {DQ_WIDTH/4{4'h0}},
2907  {DQ_WIDTH/4{4'hF}}};
2908  end
2909  2'b01: begin
2910  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h6}},
2911  {DQ_WIDTH/4{4'h9}},
2912  {DQ_WIDTH/4{4'hA}},
2913  {DQ_WIDTH/4{4'h5}}};
2914  end
2915  2'b10: begin
2916  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},
2917  {DQ_WIDTH/4{4'hE}},
2918  {DQ_WIDTH/4{4'h1}},
2919  {DQ_WIDTH/4{4'hB}}};
2920  end
2921  2'b11: begin
2922  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h8}},
2923  {DQ_WIDTH/4{4'hD}},
2924  {DQ_WIDTH/4{4'hE}},
2925  {DQ_WIDTH/4{4'h4}}};
2926  end
2927  endcase
2928  end
2929 
2930  end else if ((CLK_PERIOD/nCK_PER_CLK > 2500) && (nCK_PER_CLK == 4)) begin: wrdq_div2_4to1_rdlvl_first
2931 
2932  always @(posedge clk)
2933  if (~oclkdelay_calib_done)
2934  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}},
2935  {DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}},
2936  {DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}},
2937  {DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}}};
2938  else if (!rdlvl_stg1_done && ~phy_data_full)
2939  // write different sequence for very
2940  // first write to memory only. Used to help us differentiate
2941  // if the writes are "early" or "on-time" during read leveling
2942  if (first_rdlvl_pat_r)
2943  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},{DQ_WIDTH/4{4'h2}},
2944  {DQ_WIDTH/4{4'h9}},{DQ_WIDTH/4{4'hC}},
2945  {DQ_WIDTH/4{4'hE}},{DQ_WIDTH/4{4'h7}},
2946  {DQ_WIDTH/4{4'h3}},{DQ_WIDTH/4{4'h9}}};
2947  else
2948  // For all others, change the first two words written in order
2949  // to differentiate the "early write" and "on-time write"
2950  // readback patterns during read leveling
2951  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},{DQ_WIDTH/4{4'h2}},
2952  {DQ_WIDTH/4{4'h9}},{DQ_WIDTH/4{4'hC}},
2953  {DQ_WIDTH/4{4'hE}},{DQ_WIDTH/4{4'h7}},
2954  {DQ_WIDTH/4{4'h1}},{DQ_WIDTH/4{4'hB}}};
2955  else if (!prbs_rdlvl_done && ~phy_data_full)
2956  // prbs_o is 8-bits wide hence {DQ_WIDTH/8{prbs_o}} results in
2957  // prbs_o being concatenated 8 times resulting in DQ_WIDTH
2958  phy_wrdata <= #TCQ {{DQ_WIDTH/8{prbs_o[8*8-1:7*8]}},{DQ_WIDTH/8{prbs_o[7*8-1:6*8]}},
2959  {DQ_WIDTH/8{prbs_o[6*8-1:5*8]}},{DQ_WIDTH/8{prbs_o[5*8-1:4*8]}},
2960  {DQ_WIDTH/8{prbs_o[4*8-1:3*8]}},{DQ_WIDTH/8{prbs_o[3*8-1:2*8]}},
2961  {DQ_WIDTH/8{prbs_o[2*8-1:8]}},{DQ_WIDTH/8{prbs_o[8-1:0]}}};
2962  else if (!wrcal_done)
2963  if (first_wrcal_pat_r)
2964  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h6}},{DQ_WIDTH/4{4'h9}},
2965  {DQ_WIDTH/4{4'hA}},{DQ_WIDTH/4{4'h5}},
2966  {DQ_WIDTH/4{4'h5}},{DQ_WIDTH/4{4'hA}},
2967  {DQ_WIDTH/4{4'h0}},{DQ_WIDTH/4{4'hF}}};
2968  else
2969  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h8}},{DQ_WIDTH/4{4'hD}},
2970  {DQ_WIDTH/4{4'hE}},{DQ_WIDTH/4{4'h4}},
2971  {DQ_WIDTH/4{4'h4}},{DQ_WIDTH/4{4'hE}},
2972  {DQ_WIDTH/4{4'h1}},{DQ_WIDTH/4{4'hB}}};
2973 
2974 
2975  end else if (nCK_PER_CLK == 4) begin: wrdq_div1_4to1_wrcal_first
2976 
2977  always @(posedge clk)
2978  if ((~oclkdelay_calib_done) && (DRAM_TYPE == "DDR3"))
2979  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}},
2980  {DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}},
2981  {DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}},
2982  {DQ_WIDTH/4{4'hF}},{DQ_WIDTH/4{4'h0}}};
2983  else if ((!wrcal_done)&& (DRAM_TYPE == "DDR3")) begin
2984  if (extend_cal_pat)
2985  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h6}},{DQ_WIDTH/4{4'h9}},
2986  {DQ_WIDTH/4{4'hA}},{DQ_WIDTH/4{4'h5}},
2987  {DQ_WIDTH/4{4'h5}},{DQ_WIDTH/4{4'hA}},
2988  {DQ_WIDTH/4{4'h0}},{DQ_WIDTH/4{4'hF}}};
2989  else if (first_wrcal_pat_r)
2990  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h6}},{DQ_WIDTH/4{4'h9}},
2991  {DQ_WIDTH/4{4'hA}},{DQ_WIDTH/4{4'h5}},
2992  {DQ_WIDTH/4{4'h5}},{DQ_WIDTH/4{4'hA}},
2993  {DQ_WIDTH/4{4'h0}},{DQ_WIDTH/4{4'hF}}};
2994  else
2995  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h8}},{DQ_WIDTH/4{4'hD}},
2996  {DQ_WIDTH/4{4'hE}},{DQ_WIDTH/4{4'h4}},
2997  {DQ_WIDTH/4{4'h4}},{DQ_WIDTH/4{4'hE}},
2998  {DQ_WIDTH/4{4'h1}},{DQ_WIDTH/4{4'hB}}};
2999  end else if (!rdlvl_stg1_done && ~phy_data_full) begin
3000  // write different sequence for very
3001  // first write to memory only. Used to help us differentiate
3002  // if the writes are "early" or "on-time" during read leveling
3003  if (first_rdlvl_pat_r)
3004  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},{DQ_WIDTH/4{4'h2}},
3005  {DQ_WIDTH/4{4'h9}},{DQ_WIDTH/4{4'hC}},
3006  {DQ_WIDTH/4{4'hE}},{DQ_WIDTH/4{4'h7}},
3007  {DQ_WIDTH/4{4'h3}},{DQ_WIDTH/4{4'h9}}};
3008  else
3009  // For all others, change the first two words written in order
3010  // to differentiate the "early write" and "on-time write"
3011  // readback patterns during read leveling
3012  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},{DQ_WIDTH/4{4'h2}},
3013  {DQ_WIDTH/4{4'h9}},{DQ_WIDTH/4{4'hC}},
3014  {DQ_WIDTH/4{4'hE}},{DQ_WIDTH/4{4'h7}},
3015  {DQ_WIDTH/4{4'h1}},{DQ_WIDTH/4{4'hB}}};
3016  end else if (!prbs_rdlvl_done && ~phy_data_full)
3017  // prbs_o is 8-bits wide hence {DQ_WIDTH/8{prbs_o}} results in
3018  // prbs_o being concatenated 8 times resulting in DQ_WIDTH
3019  phy_wrdata <= #TCQ {{DQ_WIDTH/8{prbs_o[8*8-1:7*8]}},{DQ_WIDTH/8{prbs_o[7*8-1:6*8]}},
3020  {DQ_WIDTH/8{prbs_o[6*8-1:5*8]}},{DQ_WIDTH/8{prbs_o[5*8-1:4*8]}},
3021  {DQ_WIDTH/8{prbs_o[4*8-1:3*8]}},{DQ_WIDTH/8{prbs_o[3*8-1:2*8]}},
3022  {DQ_WIDTH/8{prbs_o[2*8-1:8]}},{DQ_WIDTH/8{prbs_o[8-1:0]}}};
3023 
3024  end else begin: wrdq_div1_2to1_wrcal_first
3025 
3026  always @(posedge clk)
3027  if ((~oclkdelay_calib_done)&& (DRAM_TYPE == "DDR3"))
3028  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hF}},
3029  {DQ_WIDTH/4{4'h0}},
3030  {DQ_WIDTH/4{4'hF}},
3031  {DQ_WIDTH/4{4'h0}}};
3032  else if ((!wrcal_done) && (DRAM_TYPE == "DDR3"))begin
3033  case (wrcal_pat_cnt)
3034  2'b00: begin
3035  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h5}},
3036  {DQ_WIDTH/4{4'hA}},
3037  {DQ_WIDTH/4{4'h0}},
3038  {DQ_WIDTH/4{4'hF}}};
3039  end
3040  2'b01: begin
3041  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h6}},
3042  {DQ_WIDTH/4{4'h9}},
3043  {DQ_WIDTH/4{4'hA}},
3044  {DQ_WIDTH/4{4'h5}}};
3045  end
3046  2'b10: begin
3047  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},
3048  {DQ_WIDTH/4{4'hE}},
3049  {DQ_WIDTH/4{4'h1}},
3050  {DQ_WIDTH/4{4'hB}}};
3051  end
3052  2'b11: begin
3053  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h8}},
3054  {DQ_WIDTH/4{4'hD}},
3055  {DQ_WIDTH/4{4'hE}},
3056  {DQ_WIDTH/4{4'h4}}};
3057  end
3058  endcase
3059  end else if (!rdlvl_stg1_done) begin
3060  // The 16 words for stage 1 write data in 2:1 mode is written
3061  // over 4 consecutive controller clock cycles. Note that write
3062  // data follows phy_wrdata_en by one clock cycle
3063  case (wrdata_pat_cnt)
3064  2'b00: begin
3065  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hE}},
3066  {DQ_WIDTH/4{4'h7}},
3067  {DQ_WIDTH/4{4'h3}},
3068  {DQ_WIDTH/4{4'h9}}};
3069  end
3070 
3071  2'b01: begin
3072  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},
3073  {DQ_WIDTH/4{4'h2}},
3074  {DQ_WIDTH/4{4'h9}},
3075  {DQ_WIDTH/4{4'hC}}};
3076  end
3077 
3078  2'b10: begin
3079  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'hE}},
3080  {DQ_WIDTH/4{4'h7}},
3081  {DQ_WIDTH/4{4'h1}},
3082  {DQ_WIDTH/4{4'hB}}};
3083  end
3084 
3085  2'b11: begin
3086  phy_wrdata <= #TCQ {{DQ_WIDTH/4{4'h4}},
3087  {DQ_WIDTH/4{4'h2}},
3088  {DQ_WIDTH/4{4'h9}},
3089  {DQ_WIDTH/4{4'hC}}};
3090  end
3091  endcase
3092  end else if (!prbs_rdlvl_done && ~phy_data_full) begin
3093  // prbs_o is 8-bits wide hence {DQ_WIDTH/8{prbs_o}} results in
3094  // prbs_o being concatenated 8 times resulting in DQ_WIDTH
3095  phy_wrdata <= #TCQ {{DQ_WIDTH/8{prbs_o[4*8-1:3*8]}},
3096  {DQ_WIDTH/8{prbs_o[3*8-1:2*8]}},
3097  {DQ_WIDTH/8{prbs_o[2*8-1:8]}},
3098  {DQ_WIDTH/8{prbs_o[8-1:0]}}};
3099  end
3100 
3101  end
3102 endgenerate
3103 
3104  //***************************************************************************
3105  // Memory control/address
3106  //***************************************************************************
3107 
3108 
3109  // Phases [2] and [3] are always deasserted for 4:1 mode
3110  generate
3111  if (nCK_PER_CLK == 4) begin: gen_div4_ca_tieoff
3112  always @(posedge clk) begin
3113  phy_ras_n[3:2] <= #TCQ 3'b11;
3114  phy_cas_n[3:2] <= #TCQ 3'b11;
3115  phy_we_n[3:2] <= #TCQ 3'b11;
3116  end
3117  end
3118  endgenerate
3119 
3120  // Assert RAS when: (1) Loading MRS, (2) Activating Row, (3) Precharging
3121  // (4) auto refresh
3122  generate
3123  if (!(CWL_M % 2)) begin: even_cwl
3124  always @(posedge clk) begin
3125  if ((init_state_r == INIT_LOAD_MR) ||
3126  (init_state_r == INIT_MPR_RDEN) ||
3127  (init_state_r == INIT_MPR_DISABLE) ||
3128  (init_state_r == INIT_REG_WRITE) ||
3129  (init_state_r == INIT_WRLVL_START) ||
3130  (init_state_r == INIT_WRLVL_LOAD_MR) ||
3131  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
3132  (init_state_r == INIT_RDLVL_ACT) ||
3133  (init_state_r == INIT_WRCAL_ACT) ||
3134  (init_state_r == INIT_OCLKDELAY_ACT) ||
3135  (init_state_r == INIT_PRECHARGE) ||
3136  (init_state_r == INIT_DDR2_PRECHARGE) ||
3137  (init_state_r == INIT_REFRESH))begin
3138  phy_ras_n[0] <= #TCQ 1'b0;
3139  phy_ras_n[1] <= #TCQ 1'b1;
3140  end else begin
3141  phy_ras_n[0] <= #TCQ 1'b1;
3142  phy_ras_n[1] <= #TCQ 1'b1;
3143  end
3144  end
3145 
3146  // Assert CAS when: (1) Loading MRS, (2) Issuing Read/Write command
3147  // (3) auto refresh
3148  always @(posedge clk) begin
3149  if ((init_state_r == INIT_LOAD_MR) ||
3150  (init_state_r == INIT_MPR_RDEN) ||
3151  (init_state_r == INIT_MPR_DISABLE) ||
3152  (init_state_r == INIT_REG_WRITE) ||
3153  (init_state_r == INIT_WRLVL_START) ||
3154  (init_state_r == INIT_WRLVL_LOAD_MR) ||
3155  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
3156  (init_state_r == INIT_REFRESH) ||
3157  (rdlvl_wr_rd && new_burst_r))begin
3158  phy_cas_n[0] <= #TCQ 1'b0;
3159  phy_cas_n[1] <= #TCQ 1'b1;
3160  end else begin
3161  phy_cas_n[0] <= #TCQ 1'b1;
3162  phy_cas_n[1] <= #TCQ 1'b1;
3163  end
3164  end
3165  // Assert WE when: (1) Loading MRS, (2) Issuing Write command (only
3166  // occur during read leveling), (3) Issuing ZQ Long Calib command,
3167  // (4) Precharge
3168  always @(posedge clk) begin
3169  if ((init_state_r == INIT_LOAD_MR) ||
3170  (init_state_r == INIT_MPR_RDEN) ||
3171  (init_state_r == INIT_MPR_DISABLE) ||
3172  (init_state_r == INIT_REG_WRITE) ||
3173  (init_state_r == INIT_ZQCL) ||
3174  (init_state_r == INIT_WRLVL_START) ||
3175  (init_state_r == INIT_WRLVL_LOAD_MR) ||
3176  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
3177  (init_state_r == INIT_PRECHARGE) ||
3178  (init_state_r == INIT_DDR2_PRECHARGE)||
3179  (rdlvl_wr && new_burst_r))begin
3180  phy_we_n[0] <= #TCQ 1'b0;
3181  phy_we_n[1] <= #TCQ 1'b1;
3182  end else begin
3183  phy_we_n[0] <= #TCQ 1'b1;
3184  phy_we_n[1] <= #TCQ 1'b1;
3185  end
3186  end
3187  end else begin: odd_cwl
3188  always @(posedge clk) begin
3189  if ((init_state_r == INIT_LOAD_MR) ||
3190  (init_state_r == INIT_MPR_RDEN) ||
3191  (init_state_r == INIT_MPR_DISABLE) ||
3192  (init_state_r == INIT_REG_WRITE) ||
3193  (init_state_r == INIT_WRLVL_START) ||
3194  (init_state_r == INIT_WRLVL_LOAD_MR) ||
3195  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
3196  (init_state_r == INIT_RDLVL_ACT) ||
3197  (init_state_r == INIT_WRCAL_ACT) ||
3198  (init_state_r == INIT_OCLKDELAY_ACT) ||
3199  (init_state_r == INIT_PRECHARGE) ||
3200  (init_state_r == INIT_DDR2_PRECHARGE) ||
3201  (init_state_r == INIT_REFRESH))begin
3202  phy_ras_n[0] <= #TCQ 1'b1;
3203  phy_ras_n[1] <= #TCQ 1'b0;
3204  end else begin
3205  phy_ras_n[0] <= #TCQ 1'b1;
3206  phy_ras_n[1] <= #TCQ 1'b1;
3207  end
3208  end
3209  // Assert CAS when: (1) Loading MRS, (2) Issuing Read/Write command
3210  // (3) auto refresh
3211  always @(posedge clk) begin
3212  if ((init_state_r == INIT_LOAD_MR) ||
3213  (init_state_r == INIT_MPR_RDEN) ||
3214  (init_state_r == INIT_MPR_DISABLE) ||
3215  (init_state_r == INIT_REG_WRITE) ||
3216  (init_state_r == INIT_WRLVL_START) ||
3217  (init_state_r == INIT_WRLVL_LOAD_MR) ||
3218  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
3219  (init_state_r == INIT_REFRESH) ||
3220  (rdlvl_wr_rd && new_burst_r))begin
3221  phy_cas_n[0] <= #TCQ 1'b1;
3222  phy_cas_n[1] <= #TCQ 1'b0;
3223  end else begin
3224  phy_cas_n[0] <= #TCQ 1'b1;
3225  phy_cas_n[1] <= #TCQ 1'b1;
3226  end
3227  end
3228  // Assert WE when: (1) Loading MRS, (2) Issuing Write command (only
3229  // occur during read leveling), (3) Issuing ZQ Long Calib command,
3230  // (4) Precharge
3231  always @(posedge clk) begin
3232  if ((init_state_r == INIT_LOAD_MR) ||
3233  (init_state_r == INIT_MPR_RDEN) ||
3234  (init_state_r == INIT_MPR_DISABLE) ||
3235  (init_state_r == INIT_REG_WRITE) ||
3236  (init_state_r == INIT_ZQCL) ||
3237  (init_state_r == INIT_WRLVL_START) ||
3238  (init_state_r == INIT_WRLVL_LOAD_MR) ||
3239  (init_state_r == INIT_WRLVL_LOAD_MR2) ||
3240  (init_state_r == INIT_PRECHARGE) ||
3241  (init_state_r == INIT_DDR2_PRECHARGE)||
3242  (rdlvl_wr && new_burst_r))begin
3243  phy_we_n[0] <= #TCQ 1'b1;
3244  phy_we_n[1] <= #TCQ 1'b0;
3245  end else begin
3246  phy_we_n[0] <= #TCQ 1'b1;
3247  phy_we_n[1] <= #TCQ 1'b1;
3248  end
3249  end
3250  end
3251  endgenerate
3252 
3253 
3254 
3255  // Assign calib_cmd for the command field in PHY_Ctl_Word
3256  always @(posedge clk) begin
3257  if (wr_level_dqs_asrt) begin
3258  // Request to toggle DQS during write leveling
3259  calib_cmd <= #TCQ 3'b001;
3260  if (CWL_M % 2) begin // odd write latency
3261  calib_data_offset_0 <= #TCQ CWL_M + 3;
3262  calib_data_offset_1 <= #TCQ CWL_M + 3;
3263  calib_data_offset_2 <= #TCQ CWL_M + 3;
3264  calib_cas_slot <= #TCQ 2'b01;
3265  end else begin // even write latency
3266  calib_data_offset_0 <= #TCQ CWL_M + 2;
3267  calib_data_offset_1 <= #TCQ CWL_M + 2;
3268  calib_data_offset_2 <= #TCQ CWL_M + 2;
3269  calib_cas_slot <= #TCQ 2'b00;
3270  end
3271  end else if (rdlvl_wr && new_burst_r) begin
3272  // Write Command
3273  calib_cmd <= #TCQ 3'b001;
3274  if (CWL_M % 2) begin // odd write latency
3275  calib_data_offset_0 <= #TCQ (nCK_PER_CLK == 4) ? CWL_M + 3 : CWL_M - 1;
3276  calib_data_offset_1 <= #TCQ (nCK_PER_CLK == 4) ? CWL_M + 3 : CWL_M - 1;
3277  calib_data_offset_2 <= #TCQ (nCK_PER_CLK == 4) ? CWL_M + 3 : CWL_M - 1;
3278  calib_cas_slot <= #TCQ 2'b01;
3279  end else begin // even write latency
3280  calib_data_offset_0 <= #TCQ (nCK_PER_CLK == 4) ? CWL_M + 2 : CWL_M - 2 ;
3281  calib_data_offset_1 <= #TCQ (nCK_PER_CLK == 4) ? CWL_M + 2 : CWL_M - 2 ;
3282  calib_data_offset_2 <= #TCQ (nCK_PER_CLK == 4) ? CWL_M + 2 : CWL_M - 2 ;
3283  calib_cas_slot <= #TCQ 2'b00;
3284  end
3285  end else if (rdlvl_rd && new_burst_r) begin
3286  // Read Command
3287  calib_cmd <= #TCQ 3'b011;
3288  if (CWL_M % 2)
3289  calib_cas_slot <= #TCQ 2'b01;
3290  else
3291  calib_cas_slot <= #TCQ 2'b00;
3292  if (~pi_calib_done_r1) begin
3293  calib_data_offset_0 <= #TCQ 6'd0;
3294  calib_data_offset_1 <= #TCQ 6'd0;
3295  calib_data_offset_2 <= #TCQ 6'd0;
3296  end else if (~pi_dqs_found_done_r1) begin
3297  calib_data_offset_0 <= #TCQ rd_data_offset_0;
3298  calib_data_offset_1 <= #TCQ rd_data_offset_1;
3299  calib_data_offset_2 <= #TCQ rd_data_offset_2;
3300  end else begin
3301  calib_data_offset_0 <= #TCQ rd_data_offset_ranks_0[6*chip_cnt_r+:6];
3302  calib_data_offset_1 <= #TCQ rd_data_offset_ranks_1[6*chip_cnt_r+:6];
3303  calib_data_offset_2 <= #TCQ rd_data_offset_ranks_2[6*chip_cnt_r+:6];
3304  end
3305  end else begin
3306  // Non-Data Commands like NOP, MRS, ZQ Long Cal, Precharge,
3307  // Active, Refresh
3308  calib_cmd <= #TCQ 3'b100;
3309  calib_data_offset_0 <= #TCQ 6'd0;
3310  calib_data_offset_1 <= #TCQ 6'd0;
3311  calib_data_offset_2 <= #TCQ 6'd0;
3312  if (CWL_M % 2)
3313  calib_cas_slot <= #TCQ 2'b01;
3314  else
3315  calib_cas_slot <= #TCQ 2'b00;
3316  end
3317  end
3318 
3319  // Write Enable to PHY_Control FIFO always asserted
3320  // No danger of this FIFO being Full with 4:1 sync clock ratio
3321  // This is also the write enable to the command OUT_FIFO
3322  always @(posedge clk) begin
3323  if (rst) begin
3324  calib_ctl_wren <= #TCQ 1'b0;
3325  calib_cmd_wren <= #TCQ 1'b0;
3326  calib_seq <= #TCQ 2'b00;
3327  end else if (cnt_pwron_cke_done_r && phy_ctl_ready
3328  && ~(phy_ctl_full || phy_cmd_full )) begin
3329  calib_ctl_wren <= #TCQ 1'b1;
3330  calib_cmd_wren <= #TCQ 1'b1;
3331  calib_seq <= #TCQ calib_seq + 1;
3332  end else begin
3333  calib_ctl_wren <= #TCQ 1'b0;
3334  calib_cmd_wren <= #TCQ 1'b0;
3335  calib_seq <= #TCQ calib_seq;
3336  end
3337  end
3338 
3339  generate
3340  genvar rnk_i;
3341  for (rnk_i = 0; rnk_i < 4; rnk_i = rnk_i + 1) begin: gen_rnk
3342  always @(posedge clk) begin
3343  if (rst) begin
3344  mr2_r[rnk_i] <= #TCQ 2'b00;
3345  mr1_r[rnk_i] <= #TCQ 3'b000;
3346  end else begin
3347  mr2_r[rnk_i] <= #TCQ tmp_mr2_r[rnk_i];
3348  mr1_r[rnk_i] <= #TCQ tmp_mr1_r[rnk_i];
3349  end
3350  end
3351  end
3352  endgenerate
3353 
3354  // ODT assignment based on slot config and slot present
3355  // For single slot systems slot_1_present input will be ignored
3356  // Assuming component interfaces to be single slot systems
3357  generate
3358  if (nSLOTS == 1) begin: gen_single_slot_odt
3359  always @(posedge clk) begin
3360  if (rst) begin
3361  tmp_mr2_r[1] <= #TCQ 2'b00;
3362  tmp_mr2_r[2] <= #TCQ 2'b00;
3363  tmp_mr2_r[3] <= #TCQ 2'b00;
3364  tmp_mr1_r[1] <= #TCQ 3'b000;
3365  tmp_mr1_r[2] <= #TCQ 3'b000;
3366  tmp_mr1_r[3] <= #TCQ 3'b000;
3367  phy_tmp_cs1_r <= #TCQ {CS_WIDTH*nCS_PER_RANK{1'b1}};
3368  phy_tmp_odt_r <= #TCQ 4'b0000;
3369  phy_tmp_odt_r1 <= #TCQ phy_tmp_odt_r;
3370  end else begin
3371  case ({slot_0_present[0],slot_0_present[1],
3372  slot_0_present[2],slot_0_present[3]})
3373  // Single slot configuration with quad rank
3374  // Assuming same behavior as single slot dual rank for now
3375  // DDR2 does not have quad rank parts
3376  4'b1111: begin
3377  if ((RTT_WR == "OFF") ||
3378  ((WRLVL=="ON") && ~wrlvl_done &&
3379  (wrlvl_rank_cntr==3'd0))) begin
3380  //Rank0 Dynamic ODT disabled
3381  tmp_mr2_r[0] <= #TCQ 2'b00;
3382  //Rank0 RTT_NOM
3383  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3384  (RTT_NOM_int == "60") ? 3'b001 :
3385  (RTT_NOM_int == "120") ? 3'b010 :
3386  3'b000;
3387  end else begin
3388  //Rank0 Dynamic ODT defaults to 120 ohms
3389  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3390  2'b10;
3391  //Rank0 RTT_NOM after write leveling completes
3392  tmp_mr1_r[0] <= #TCQ 3'b000;
3393  end
3394  phy_tmp_odt_r <= #TCQ 4'b0001;
3395  // Chip Select assignments
3396  phy_tmp_cs1_r[((chip_cnt_r*nCS_PER_RANK)
3397  ) +: nCS_PER_RANK] <= #TCQ 'b0;
3398  end
3399 
3400  // Single slot configuration with single rank
3401  4'b1000: begin
3402  phy_tmp_odt_r <= #TCQ 4'b0001;
3403  if ((REG_CTRL == "ON") && (nCS_PER_RANK > 1)) begin
3404  phy_tmp_cs1_r[chip_cnt_r] <= #TCQ 1'b0;
3405  end else begin
3406  phy_tmp_cs1_r <= #TCQ {CS_WIDTH*nCS_PER_RANK{1'b0}};
3407  end
3408  if ((RTT_WR == "OFF") ||
3409  ((WRLVL=="ON") && ~wrlvl_done &&
3410  ((cnt_init_mr_r == 2'd0) || (USE_ODT_PORT == 1)))) begin
3411  //Rank0 Dynamic ODT disabled
3412  tmp_mr2_r[0] <= #TCQ 2'b00;
3413  //Rank0 RTT_NOM
3414  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3415  (RTT_NOM_int == "60") ? 3'b001 :
3416  (RTT_NOM_int == "120") ? 3'b010 :
3417  3'b000;
3418  end else begin
3419  //Rank0 Dynamic ODT defaults to 120 ohms
3420  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3421  2'b10;
3422  //Rank0 RTT_NOM after write leveling completes
3423  tmp_mr1_r[0] <= #TCQ 3'b000;
3424  end
3425  end
3426 
3427  // Single slot configuration with dual rank
3428  4'b1100: begin
3429  phy_tmp_odt_r <= #TCQ 4'b0001;
3430  // Chip Select assignments
3431 
3432  phy_tmp_cs1_r[((chip_cnt_r*nCS_PER_RANK)
3433  ) +: nCS_PER_RANK] <= #TCQ 'b0;
3434  if ((RTT_WR == "OFF") ||
3435  ((WRLVL=="ON") && ~wrlvl_done &&
3436  (wrlvl_rank_cntr==3'd0))) begin
3437  //Rank0 Dynamic ODT disabled
3438  tmp_mr2_r[0] <= #TCQ 2'b00;
3439  //Rank0 Rtt_NOM
3440  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3441  (RTT_NOM_int == "60") ? 3'b001 :
3442  (RTT_NOM_int == "120") ? 3'b010 :
3443  3'b000;
3444  end else begin
3445  //Rank0 Dynamic ODT defaults to 120 ohms
3446  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3447  2'b10;
3448  //Rank0 Rtt_NOM after write leveling completes
3449  tmp_mr1_r[0] <= #TCQ 3'b000;
3450  end
3451  end
3452 
3453  default: begin
3454  phy_tmp_odt_r <= #TCQ 4'b0001;
3455  phy_tmp_cs1_r <= #TCQ {CS_WIDTH*nCS_PER_RANK{1'b0}};
3456  if ((RTT_WR == "OFF") ||
3457  ((WRLVL=="ON") && ~wrlvl_done)) begin
3458  //Rank0 Dynamic ODT disabled
3459  tmp_mr2_r[0] <= #TCQ 2'b00;
3460  //Rank0 Rtt_NOM
3461  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3462  (RTT_NOM_int == "60") ? 3'b001 :
3463  (RTT_NOM_int == "120") ? 3'b010 :
3464  3'b000;
3465  end else begin
3466  //Rank0 Dynamic ODT defaults to 120 ohms
3467  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3468  2'b10;
3469  //Rank0 Rtt_NOM after write leveling completes
3470  tmp_mr1_r[0] <= #TCQ 3'b000;
3471  end
3472  end
3473  endcase
3474  end
3475  end
3476  end else if (nSLOTS == 2) begin: gen_dual_slot_odt
3477  always @ (posedge clk) begin
3478  if (rst) begin
3479  tmp_mr2_r[1] <= #TCQ 2'b00;
3480  tmp_mr2_r[2] <= #TCQ 2'b00;
3481  tmp_mr2_r[3] <= #TCQ 2'b00;
3482  tmp_mr1_r[1] <= #TCQ 3'b000;
3483  tmp_mr1_r[2] <= #TCQ 3'b000;
3484  tmp_mr1_r[3] <= #TCQ 3'b000;
3485  phy_tmp_odt_r <= #TCQ 4'b0000;
3486  phy_tmp_cs1_r <= #TCQ {CS_WIDTH*nCS_PER_RANK{1'b1}};
3487  phy_tmp_odt_r1 <= #TCQ phy_tmp_odt_r;
3488  end else begin
3489  case ({slot_0_present[0],slot_0_present[1],
3490  slot_1_present[0],slot_1_present[1]})
3491  // Two slot configuration, one slot present, single rank
3492  4'b10_00: begin
3493  if (//wrlvl_odt ||
3494  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3495  (init_state_r == INIT_WRCAL_WRITE) ||
3496  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3497  // odt turned on only during write
3498  phy_tmp_odt_r <= #TCQ 4'b0001;
3499  end
3500  phy_tmp_cs1_r <= #TCQ {nCS_PER_RANK{1'b0}};
3501  if ((RTT_WR == "OFF") ||
3502  ((WRLVL=="ON") && ~wrlvl_done)) begin
3503  //Rank0 Dynamic ODT disabled
3504  tmp_mr2_r[0] <= #TCQ 2'b00;
3505  //Rank0 Rtt_NOM
3506  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3507  (RTT_NOM_int == "60") ? 3'b001 :
3508  (RTT_NOM_int == "120") ? 3'b010 :
3509  3'b000;
3510  end else begin
3511  //Rank0 Dynamic ODT defaults to 120 ohms
3512  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3513  2'b10;
3514  //Rank0 Rtt_NOM after write leveling completes
3515  tmp_mr1_r[0] <= #TCQ 3'b000;
3516  end
3517  end
3518  4'b00_10: begin
3519 
3520  //Rank1 ODT enabled
3521  if (//wrlvl_odt ||
3522  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3523  (init_state_r == INIT_WRCAL_WRITE) ||
3524  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3525  // odt turned on only during write
3526  phy_tmp_odt_r <= #TCQ 4'b0001;
3527  end
3528  phy_tmp_cs1_r <= #TCQ {nCS_PER_RANK{1'b0}};
3529  if ((RTT_WR == "OFF") ||
3530  ((WRLVL=="ON") && ~wrlvl_done)) begin
3531  //Rank1 Dynamic ODT disabled
3532  tmp_mr2_r[0] <= #TCQ 2'b00;
3533  //Rank1 Rtt_NOM defaults to 120 ohms
3534  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3535  (RTT_NOM_int == "60") ? 3'b001 :
3536  (RTT_NOM_int == "120") ? 3'b010 :
3537  3'b000;
3538  end else begin
3539  //Rank1 Dynamic ODT defaults to 120 ohms
3540  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3541  2'b10;
3542  //Rank1 Rtt_NOM after write leveling completes
3543  tmp_mr1_r[0] <= #TCQ 3'b000;
3544  end
3545  end
3546  // Two slot configuration, one slot present, dual rank
3547  4'b00_11: begin
3548  if (//wrlvl_odt ||
3549  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3550  (init_state_r == INIT_WRCAL_WRITE) ||
3551  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3552  // odt turned on only during write
3553  phy_tmp_odt_r
3554  <= #TCQ 4'b0001;
3555  end
3556 
3557  // Chip Select assignments
3558  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3559  <= #TCQ {nCS_PER_RANK{1'b0}};
3560 
3561  if ((RTT_WR == "OFF") ||
3562  ((WRLVL=="ON") && ~wrlvl_done &&
3563  (wrlvl_rank_cntr==3'd0))) begin
3564  //Rank0 Dynamic ODT disabled
3565  tmp_mr2_r[0] <= #TCQ 2'b00;
3566  //Rank0 Rtt_NOM
3567  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3568  (RTT_NOM_int == "60") ? 3'b001 :
3569  (RTT_NOM_int == "120") ? 3'b010 :
3570  3'b000;
3571  end else begin
3572  //Rank0 Dynamic ODT defaults to 120 ohms
3573  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3574  2'b10;
3575  //Rank0 Rtt_NOM after write leveling completes
3576  tmp_mr1_r[0] <= #TCQ 3'b000;
3577  end
3578  end
3579  4'b11_00: begin
3580  if (//wrlvl_odt ||
3581  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3582  (init_state_r == INIT_WRCAL_WRITE) ||
3583  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3584  // odt turned on only during write
3585  phy_tmp_odt_r <= #TCQ 4'b0001;
3586  end
3587 
3588  // Chip Select assignments
3589  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3590  <= #TCQ {nCS_PER_RANK{1'b0}};
3591 
3592  if ((RTT_WR == "OFF") ||
3593  ((WRLVL=="ON") && ~wrlvl_done &&
3594  (wrlvl_rank_cntr==3'd0))) begin
3595  //Rank1 Dynamic ODT disabled
3596  tmp_mr2_r[0] <= #TCQ 2'b00;
3597  //Rank1 Rtt_NOM
3598  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3599  (RTT_NOM_int == "60") ? 3'b001 :
3600  (RTT_NOM_int == "120") ? 3'b010 :
3601  3'b000;
3602  end else begin
3603  //Rank1 Dynamic ODT defaults to 120 ohms
3604  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3605  2'b10;
3606  //Rank1 Rtt_NOM after write leveling completes
3607  tmp_mr1_r[0] <= #TCQ 3'b000;
3608  end
3609  end
3610  // Two slot configuration, one rank per slot
3611  4'b10_10: begin
3612  if(DRAM_TYPE == "DDR2")begin
3613  if(chip_cnt_r == 2'b00)begin
3614  phy_tmp_odt_r
3615  <= #TCQ 4'b0010; //bit0 for rank0
3616  end else begin
3617  phy_tmp_odt_r
3618  <= #TCQ 4'b0001; //bit0 for rank0
3619  end
3620  end else begin
3621  if(init_state_r == INIT_WRLVL_WAIT)
3622  phy_tmp_odt_r <= #TCQ 4'b0011; // rank 0/1 odt0
3623  else if((init_next_state == INIT_RDLVL_STG1_WRITE) ||
3624  (init_next_state == INIT_WRCAL_WRITE) ||
3625  (init_next_state == INIT_OCLKDELAY_WRITE))
3626  phy_tmp_odt_r <= #TCQ 4'b0011; // bit0 for rank0/1 (write)
3627  else if ((init_next_state == INIT_PI_PHASELOCK_READS) ||
3628  (init_next_state == INIT_MPR_READ) ||
3629  (init_next_state == INIT_RDLVL_STG1_READ) ||
3630  (init_next_state == INIT_RDLVL_STG2_READ) ||
3631  (init_next_state == INIT_OCLKDELAY_READ) ||
3632  (init_next_state == INIT_WRCAL_READ) ||
3633  (init_next_state == INIT_WRCAL_MULT_READS))
3634  phy_tmp_odt_r <= #TCQ 4'b0010; // bit0 for rank1 (rank 0 rd)
3635  end
3636 
3637  // Chip Select assignments
3638  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3639  <= #TCQ {nCS_PER_RANK{1'b0}};
3640 
3641  if ((RTT_WR == "OFF") ||
3642  ((WRLVL=="ON") && ~wrlvl_done &&
3643  (wrlvl_rank_cntr==3'd0))) begin
3644  //Rank0 Dynamic ODT disabled
3645  tmp_mr2_r[0] <= #TCQ 2'b00;
3646  //Rank0 Rtt_NOM
3647  tmp_mr1_r[0] <= #TCQ (RTT_WR == "60") ? 3'b001 :
3648  (RTT_WR == "120") ? 3'b010 :
3649  3'b000;
3650  //Rank1 Dynamic ODT disabled
3651  tmp_mr2_r[1] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3652  2'b10;
3653  //Rank1 Rtt_NOM
3654  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3655  (RTT_NOM_int == "60") ? 3'b001 :
3656  (RTT_NOM_int == "120") ? 3'b010 :
3657  3'b000;
3658  end else begin
3659  //Rank0 Dynamic ODT defaults to 120 ohms
3660  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3661  2'b10;
3662  //Rank0 Rtt_NOM
3663  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3664  (RTT_NOM_int == "120") ? 3'b010 :
3665  (RTT_NOM_int == "20") ? 3'b100 :
3666  (RTT_NOM_int == "30") ? 3'b101 :
3667  (RTT_NOM_int == "40") ? 3'b011 :
3668  3'b000;
3669  //Rank1 Dynamic ODT defaults to 120 ohms
3670  tmp_mr2_r[1] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3671  2'b10;
3672  //Rank1 Rtt_NOM
3673  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3674  (RTT_NOM_int == "120") ? 3'b010 :
3675  (RTT_NOM_int == "20") ? 3'b100 :
3676  (RTT_NOM_int == "30") ? 3'b101 :
3677  (RTT_NOM_int == "40") ? 3'b011 :
3678  3'b000;
3679  end
3680  end
3681  // Two Slots - One slot with dual rank and other with single rank
3682  4'b10_11: begin
3683 
3684  //Rank3 Rtt_NOM
3685  tmp_mr1_r[2] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3686  (RTT_NOM_int == "120") ? 3'b010 :
3687  (RTT_NOM_int == "20") ? 3'b100 :
3688  (RTT_NOM_int == "30") ? 3'b101 :
3689  (RTT_NOM_int == "40") ? 3'b011 :
3690  3'b000;
3691  tmp_mr2_r[2] <= #TCQ 2'b00;
3692  if ((RTT_WR == "OFF") ||
3693  ((WRLVL=="ON") && ~wrlvl_done &&
3694  (wrlvl_rank_cntr==3'd0))) begin
3695  //Rank0 Dynamic ODT disabled
3696  tmp_mr2_r[0] <= #TCQ 2'b00;
3697  //Rank0 Rtt_NOM
3698  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3699  (RTT_NOM_int == "60") ? 3'b001 :
3700  (RTT_NOM_int == "120") ? 3'b010 :
3701  3'b000;
3702  //Rank1 Dynamic ODT disabled
3703  tmp_mr2_r[1] <= #TCQ 2'b00;
3704  //Rank1 Rtt_NOM
3705  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3706  (RTT_NOM_int == "60") ? 3'b001 :
3707  (RTT_NOM_int == "120") ? 3'b010 :
3708  3'b000;
3709  end else begin
3710  //Rank0 Dynamic ODT defaults to 120 ohms
3711  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3712  2'b10;
3713  //Rank0 Rtt_NOM after write leveling completes
3714  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3715  (RTT_NOM_int == "120") ? 3'b010 :
3716  (RTT_NOM_int == "20") ? 3'b100 :
3717  (RTT_NOM_int == "30") ? 3'b101 :
3718  (RTT_NOM_int == "40") ? 3'b011 :
3719  3'b000;
3720  //Rank1 Dynamic ODT defaults to 120 ohms
3721  tmp_mr2_r[1] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3722  2'b10;
3723  //Rank1 Rtt_NOM after write leveling completes
3724  tmp_mr1_r[1] <= #TCQ 3'b000;
3725  end
3726  //Slot1 Rank1 or Rank3 is being written
3727  if(DRAM_TYPE == "DDR2")begin
3728  if(chip_cnt_r == 2'b00)begin
3729  phy_tmp_odt_r
3730  <= #TCQ 4'b0010;
3731  end else begin
3732  phy_tmp_odt_r
3733  <= #TCQ 4'b0001;
3734  end
3735  end else begin
3736  if (//wrlvl_odt ||
3737  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3738  (init_state_r == INIT_WRCAL_WRITE) ||
3739  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3740  if (chip_cnt_r[0] == 1'b1) begin
3741  phy_tmp_odt_r
3742  <= #TCQ 4'b0011;
3743  //Slot0 Rank0 is being written
3744  end else begin
3745  phy_tmp_odt_r
3746  <= #TCQ 4'b0101; // ODT for ranks 0 and 2 aserted
3747  end
3748  end else if ((init_state_r == INIT_RDLVL_STG1_READ)
3749  || (init_state_r == INIT_PI_PHASELOCK_READS) ||
3750  (init_state_r == INIT_RDLVL_STG2_READ) ||
3751  (init_state_r == INIT_OCLKDELAY_READ) ||
3752  (init_state_r == INIT_WRCAL_READ) ||
3753  (init_state_r == INIT_WRCAL_MULT_READS))begin
3754  if (chip_cnt_r == 2'b00) begin
3755  phy_tmp_odt_r
3756  <= #TCQ 4'b0100;
3757  end else begin
3758  phy_tmp_odt_r
3759  <= #TCQ 4'b0001;
3760  end
3761  end
3762  end
3763 
3764  // Chip Select assignments
3765  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3766  <= #TCQ {nCS_PER_RANK{1'b0}};
3767 
3768  end
3769  // Two Slots - One slot with dual rank and other with single rank
3770  4'b11_10: begin
3771 
3772  //Rank2 Rtt_NOM
3773  tmp_mr1_r[2] <= #TCQ (RTT_NOM2 == "60") ? 3'b001 :
3774  (RTT_NOM2 == "120") ? 3'b010 :
3775  (RTT_NOM2 == "20") ? 3'b100 :
3776  (RTT_NOM2 == "30") ? 3'b101 :
3777  (RTT_NOM2 == "40") ? 3'b011:
3778  3'b000;
3779  tmp_mr2_r[2] <= #TCQ 2'b00;
3780  if ((RTT_WR == "OFF") ||
3781  ((WRLVL=="ON") && ~wrlvl_done &&
3782  (wrlvl_rank_cntr==3'd0))) begin
3783  //Rank0 Dynamic ODT disabled
3784  tmp_mr2_r[0] <= #TCQ 2'b00;
3785  //Rank0 Rtt_NOM
3786  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3787  (RTT_NOM_int == "60") ? 3'b001 :
3788  (RTT_NOM_int == "120") ? 3'b010 :
3789  3'b000;
3790  //Rank1 Dynamic ODT disabled
3791  tmp_mr2_r[1] <= #TCQ 2'b00;
3792  //Rank1 Rtt_NOM
3793  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3794  (RTT_NOM_int == "60") ? 3'b001 :
3795  (RTT_NOM_int == "120") ? 3'b010 :
3796  3'b000;
3797  end else begin
3798  //Rank1 Dynamic ODT defaults to 120 ohms
3799  tmp_mr2_r[1] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3800  2'b10;
3801  //Rank1 Rtt_NOM
3802  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3803  (RTT_NOM_int == "120") ? 3'b010 :
3804  (RTT_NOM_int == "20") ? 3'b100 :
3805  (RTT_NOM_int == "30") ? 3'b101 :
3806  (RTT_NOM_int == "40") ? 3'b011:
3807  3'b000;
3808  //Rank0 Dynamic ODT defaults to 120 ohms
3809  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3810  2'b10;
3811  //Rank0 Rtt_NOM after write leveling completes
3812  tmp_mr1_r[0] <= #TCQ 3'b000;
3813  end
3814 
3815  if(DRAM_TYPE == "DDR2")begin
3816  if(chip_cnt_r[1] == 1'b1)begin
3817  phy_tmp_odt_r <=
3818  #TCQ 4'b0001;
3819  end else begin
3820  phy_tmp_odt_r
3821  <= #TCQ 4'b0100; // rank 2 ODT asserted
3822  end
3823  end else begin
3824  if (// wrlvl_odt ||
3825  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3826  (init_state_r == INIT_WRCAL_WRITE) ||
3827  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3828 
3829  if (chip_cnt_r[1] == 1'b1) begin
3830  phy_tmp_odt_r
3831  <= #TCQ 4'b0110;
3832  end else begin
3833  phy_tmp_odt_r <=
3834  #TCQ 4'b0101;
3835  end
3836  end else if ((init_state_r == INIT_RDLVL_STG1_READ) ||
3837  (init_state_r == INIT_PI_PHASELOCK_READS) ||
3838  (init_state_r == INIT_RDLVL_STG2_READ) ||
3839  (init_state_r == INIT_OCLKDELAY_READ) ||
3840  (init_state_r == INIT_WRCAL_READ) ||
3841  (init_state_r == INIT_WRCAL_MULT_READS)) begin
3842 
3843  if (chip_cnt_r[1] == 1'b1) begin
3844  phy_tmp_odt_r[(1*nCS_PER_RANK) +: nCS_PER_RANK]
3845  <= #TCQ 4'b0010;
3846  end else begin
3847  phy_tmp_odt_r
3848  <= #TCQ 4'b0100;
3849  end
3850  end
3851  end
3852 
3853  // Chip Select assignments
3854  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3855  <= #TCQ {nCS_PER_RANK{1'b0}};
3856  end
3857  // Two Slots - two ranks per slot
3858  4'b11_11: begin
3859  //Rank2 Rtt_NOM
3860  tmp_mr1_r[2] <= #TCQ (RTT_NOM2 == "60") ? 3'b001 :
3861  (RTT_NOM2 == "120") ? 3'b010 :
3862  (RTT_NOM2 == "20") ? 3'b100 :
3863  (RTT_NOM2 == "30") ? 3'b101 :
3864  (RTT_NOM2 == "40") ? 3'b011 :
3865  3'b000;
3866  //Rank3 Rtt_NOM
3867  tmp_mr1_r[3] <= #TCQ (RTT_NOM3 == "60") ? 3'b001 :
3868  (RTT_NOM3 == "120") ? 3'b010 :
3869  (RTT_NOM3 == "20") ? 3'b100 :
3870  (RTT_NOM3 == "30") ? 3'b101 :
3871  (RTT_NOM3 == "40") ? 3'b011 :
3872  3'b000;
3873  tmp_mr2_r[2] <= #TCQ 2'b00;
3874  tmp_mr2_r[3] <= #TCQ 2'b00;
3875  if ((RTT_WR == "OFF") ||
3876  ((WRLVL=="ON") && ~wrlvl_done &&
3877  (wrlvl_rank_cntr==3'd0))) begin
3878  //Rank0 Dynamic ODT disabled
3879  tmp_mr2_r[0] <= #TCQ 2'b00;
3880  //Rank0 Rtt_NOM
3881  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3882  (RTT_NOM_int == "60") ? 3'b001 :
3883  (RTT_NOM_int == "120") ? 3'b010 :
3884  3'b000;
3885  //Rank1 Dynamic ODT disabled
3886  tmp_mr2_r[1] <= #TCQ 2'b00;
3887  //Rank1 Rtt_NOM
3888  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3889  (RTT_NOM_int == "60") ? 3'b001 :
3890  (RTT_NOM_int == "120") ? 3'b010 :
3891  3'b000;
3892  end else begin
3893  //Rank1 Dynamic ODT defaults to 120 ohms
3894  tmp_mr2_r[1] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3895  2'b10;
3896  //Rank1 Rtt_NOM after write leveling completes
3897  tmp_mr1_r[1] <= #TCQ 3'b000;
3898  //Rank0 Dynamic ODT defaults to 120 ohms
3899  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3900  2'b10;
3901  //Rank0 Rtt_NOM after write leveling completes
3902  tmp_mr1_r[0] <= #TCQ 3'b000;
3903  end
3904 
3905  if(DRAM_TYPE == "DDR2")begin
3906  if(chip_cnt_r[1] == 1'b1)begin
3907  phy_tmp_odt_r
3908  <= #TCQ 4'b0001;
3909  end else begin
3910  phy_tmp_odt_r
3911  <= #TCQ 4'b0100;
3912  end
3913  end else begin
3914  if (//wrlvl_odt ||
3915  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
3916  (init_state_r == INIT_WRCAL_WRITE) ||
3917  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
3918  //Slot1 Rank1 or Rank3 is being written
3919  if (chip_cnt_r[0] == 1'b1) begin
3920  phy_tmp_odt_r
3921  <= #TCQ 4'b0110;
3922  //Slot0 Rank0 or Rank2 is being written
3923  end else begin
3924  phy_tmp_odt_r
3925  <= #TCQ 4'b1001;
3926  end
3927  end else if ((init_state_r == INIT_RDLVL_STG1_READ) ||
3928  (init_state_r == INIT_PI_PHASELOCK_READS) ||
3929  (init_state_r == INIT_RDLVL_STG2_READ) ||
3930  (init_state_r == INIT_OCLKDELAY_READ) ||
3931  (init_state_r == INIT_WRCAL_READ) ||
3932  (init_state_r == INIT_WRCAL_MULT_READS))begin
3933  //Slot1 Rank1 or Rank3 is being read
3934  if (chip_cnt_r[0] == 1'b1) begin
3935  phy_tmp_odt_r
3936  <= #TCQ 4'b0100;
3937  //Slot0 Rank0 or Rank2 is being read
3938  end else begin
3939  phy_tmp_odt_r
3940  <= #TCQ 4'b1000;
3941  end
3942  end
3943  end
3944 
3945  // Chip Select assignments
3946  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3947  <= #TCQ {nCS_PER_RANK{1'b0}};
3948  end
3949  default: begin
3950  phy_tmp_odt_r <= #TCQ 4'b1111;
3951  // Chip Select assignments
3952  phy_tmp_cs1_r[(chip_cnt_r*nCS_PER_RANK) +: nCS_PER_RANK]
3953  <= #TCQ {nCS_PER_RANK{1'b0}};
3954  if ((RTT_WR == "OFF") ||
3955  ((WRLVL=="ON") && ~wrlvl_done)) begin
3956  //Rank0 Dynamic ODT disabled
3957  tmp_mr2_r[0] <= #TCQ 2'b00;
3958  //Rank0 Rtt_NOM
3959  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3960  (RTT_NOM_int == "60") ? 3'b001 :
3961  (RTT_NOM_int == "120") ? 3'b010 :
3962  3'b000;
3963  //Rank1 Dynamic ODT disabled
3964  tmp_mr2_r[1] <= #TCQ 2'b00;
3965  //Rank1 Rtt_NOM
3966  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "40") ? 3'b011 :
3967  (RTT_NOM_int == "60") ? 3'b001 :
3968  (RTT_NOM_int == "60") ? 3'b010 :
3969  3'b000;
3970  end else begin
3971  //Rank0 Dynamic ODT defaults to 120 ohms
3972  tmp_mr2_r[0] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3973  2'b10;
3974  //Rank0 Rtt_NOM
3975  tmp_mr1_r[0] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3976  (RTT_NOM_int == "120") ? 3'b010 :
3977  (RTT_NOM_int == "20") ? 3'b100 :
3978  (RTT_NOM_int == "30") ? 3'b101 :
3979  (RTT_NOM_int == "40") ? 3'b011 :
3980  3'b000;
3981  //Rank1 Dynamic ODT defaults to 120 ohms
3982  tmp_mr2_r[1] <= #TCQ (RTT_WR == "60") ? 2'b01 :
3983  2'b10;
3984  //Rank1 Rtt_NOM
3985  tmp_mr1_r[1] <= #TCQ (RTT_NOM_int == "60") ? 3'b001 :
3986  (RTT_NOM_int == "120") ? 3'b010 :
3987  (RTT_NOM_int == "20") ? 3'b100 :
3988  (RTT_NOM_int == "30") ? 3'b101 :
3989  (RTT_NOM_int == "40") ? 3'b011 :
3990  3'b000;
3991  end
3992  end
3993  endcase
3994  end
3995  end
3996  end
3997  endgenerate
3998 
3999 
4000  // PHY only supports two ranks.
4001  // calib_aux_out[0] is CKE for rank 0 and calib_aux_out[1] is ODT for rank 0
4002  // calib_aux_out[2] is CKE for rank 1 and calib_aux_out[3] is ODT for rank 1
4003 
4004 generate
4005 if(CKE_ODT_AUX == "FALSE") begin
4006  if ((nSLOTS == 1) && (RANKS < 2)) begin
4007  always @(posedge clk)
4008  if (rst) begin
4009  calib_cke <= #TCQ {nCK_PER_CLK{1'b0}} ;
4010  calib_odt <= 2'b00 ;
4011  end else begin
4012  if (cnt_pwron_cke_done_r /*&& ~cnt_pwron_cke_done_r1**/)begin
4013  calib_cke <= #TCQ {nCK_PER_CLK{1'b1}};
4014  end else begin
4015  calib_cke <= #TCQ {nCK_PER_CLK{1'b0}};
4016  end
4017  if ((((RTT_NOM == "DISABLED") && (RTT_WR == "OFF"))/* ||
4018  wrlvl_rank_done || wrlvl_rank_done_r1 ||
4019  (wrlvl_done && !wrlvl_done_r)**/) && (DRAM_TYPE == "DDR3")) begin
4020  calib_odt[0] <= #TCQ 1'b0;
4021  calib_odt[1] <= #TCQ 1'b0;
4022  end else if (((DRAM_TYPE == "DDR3")
4023  ||((RTT_NOM != "DISABLED") && (DRAM_TYPE == "DDR2")))
4024  && (((init_state_r == INIT_WRLVL_WAIT) && wrlvl_odt ) ||
4025  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4026  (init_state_r == INIT_RDLVL_STG1_WRITE_READ) ||
4027  (init_state_r == INIT_WRCAL_WRITE) ||
4028  (init_state_r == INIT_WRCAL_WRITE_READ) ||
4029  (init_state_r == INIT_OCLKDELAY_WRITE)||
4030  (init_state_r == INIT_OCLKDELAY_WRITE_WAIT))) begin
4031  // Quad rank in a single slot
4032  calib_odt[0] <= #TCQ phy_tmp_odt_r[0];
4033  calib_odt[1] <= #TCQ phy_tmp_odt_r[1];
4034  end else begin
4035  calib_odt[0] <= #TCQ 1'b0;
4036  calib_odt[1] <= #TCQ 1'b0;
4037  end
4038  end
4039  end else if ((nSLOTS == 1) && (RANKS <= 2)) begin
4040  always @(posedge clk)
4041  if (rst) begin
4042  calib_cke <= #TCQ {nCK_PER_CLK{1'b0}} ;
4043  calib_odt <= 2'b00 ;
4044  end else begin
4045  if (cnt_pwron_cke_done_r /*&& ~cnt_pwron_cke_done_r1**/)begin
4046  calib_cke <= #TCQ {nCK_PER_CLK{1'b1}};
4047  end else begin
4048  calib_cke <= #TCQ {nCK_PER_CLK{1'b0}};
4049  end
4050  if ((((RTT_NOM == "DISABLED") && (RTT_WR == "OFF"))/* ||
4051  wrlvl_rank_done_r2 ||
4052  (wrlvl_done && !wrlvl_done_r)**/) && (DRAM_TYPE == "DDR3")) begin
4053  calib_odt[0] <= #TCQ 1'b0;
4054  calib_odt[1] <= #TCQ 1'b0;
4055  end else if (((DRAM_TYPE == "DDR3")
4056  ||((RTT_NOM != "DISABLED") && (DRAM_TYPE == "DDR2")))
4057  && (((init_state_r == INIT_WRLVL_WAIT) && wrlvl_odt)||
4058  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4059  (init_state_r == INIT_RDLVL_STG1_WRITE_READ) ||
4060  (init_state_r == INIT_WRCAL_WRITE) ||
4061  (init_state_r == INIT_WRCAL_WRITE_READ) ||
4062  (init_state_r == INIT_OCLKDELAY_WRITE)||
4063  (init_state_r == INIT_OCLKDELAY_WRITE_WAIT))) begin
4064  // Dual rank in a single slot
4065  calib_odt[0] <= #TCQ phy_tmp_odt_r[0];
4066  calib_odt[1] <= #TCQ phy_tmp_odt_r[1];
4067  end else begin
4068  calib_odt[0] <= #TCQ 1'b0;
4069  calib_odt[1] <= #TCQ 1'b0;
4070  end
4071  end
4072  end else if ((nSLOTS == 2) && (RANKS == 2)) begin
4073  always @(posedge clk)
4074  if (rst)begin
4075  calib_cke <= #TCQ {nCK_PER_CLK{1'b0}} ;
4076  calib_odt <= 2'b00 ;
4077  end else begin
4078  if (cnt_pwron_cke_done_r /*&& ~cnt_pwron_cke_done_r1**/)begin
4079  calib_cke <= #TCQ {nCK_PER_CLK{1'b1}};
4080  end else begin
4081  calib_cke <= #TCQ {nCK_PER_CLK{1'b0}};
4082  end
4083  if (((DRAM_TYPE == "DDR2") && (RTT_NOM == "DISABLED")) ||
4084  ((DRAM_TYPE == "DDR3") &&
4085  (RTT_NOM == "DISABLED") && (RTT_WR == "OFF"))) begin
4086  calib_odt[0] <= #TCQ 1'b0;
4087  calib_odt[1] <= #TCQ 1'b0;
4088  end else if (((init_state_r == INIT_WRLVL_WAIT) && wrlvl_odt) ||
4089  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4090  (init_state_r == INIT_WRCAL_WRITE) ||
4091  (init_state_r == INIT_OCLKDELAY_WRITE)) begin
4092  // Quad rank in a single slot
4093  if (nCK_PER_CLK == 2) begin
4094  calib_odt[0]
4095  <= #TCQ (!calib_odt[0]) ? phy_tmp_odt_r[0] : 1'b0;
4096  calib_odt[1]
4097  <= #TCQ (!calib_odt[1]) ? phy_tmp_odt_r[1] : 1'b0;
4098  end else begin
4099  calib_odt[0] <= #TCQ phy_tmp_odt_r[0];
4100  calib_odt[1] <= #TCQ phy_tmp_odt_r[1];
4101  end
4102  // Turn on for idle rank during read if dynamic ODT is enabled in DDR3
4103  end else if(((DRAM_TYPE == "DDR3") && (RTT_WR != "OFF")) &&
4104  ((init_state_r == INIT_PI_PHASELOCK_READS) ||
4105  (init_state_r == INIT_MPR_READ) ||
4106  (init_state_r == INIT_RDLVL_STG1_READ) ||
4107  (init_state_r == INIT_RDLVL_STG2_READ) ||
4108  (init_state_r == INIT_OCLKDELAY_READ) ||
4109  (init_state_r == INIT_WRCAL_READ) ||
4110  (init_state_r == INIT_WRCAL_MULT_READS))) begin
4111  if (nCK_PER_CLK == 2) begin
4112  calib_odt[0]
4113  <= #TCQ (!calib_odt[0]) ? phy_tmp_odt_r[0] : 1'b0;
4114  calib_odt[1]
4115  <= #TCQ (!calib_odt[1]) ? phy_tmp_odt_r[1] : 1'b0;
4116  end else begin
4117  calib_odt[0] <= #TCQ phy_tmp_odt_r[0];
4118  calib_odt[1] <= #TCQ phy_tmp_odt_r[1];
4119  end
4120  // disable well before next command and before disabling write leveling
4121  end else if(cnt_cmd_done_m7_r ||
4122  (init_state_r == INIT_WRLVL_WAIT && ~wrlvl_odt))
4123  calib_odt <= #TCQ 2'b00;
4124  end
4125  end
4126 end else begin//USE AUX OUTPUT for routing CKE and ODT.
4127  if ((nSLOTS == 1) && (RANKS < 2)) begin
4128  always @(posedge clk)
4129  if (rst) begin
4130  calib_aux_out <= #TCQ 4'b0000;
4131  end else begin
4132  if (cnt_pwron_cke_done_r && ~cnt_pwron_cke_done_r1)begin
4133  calib_aux_out[0] <= #TCQ 1'b1;
4134  calib_aux_out[2] <= #TCQ 1'b1;
4135  end else begin
4136  calib_aux_out[0] <= #TCQ 1'b0;
4137  calib_aux_out[2] <= #TCQ 1'b0;
4138  end
4139  if ((((RTT_NOM == "DISABLED") && (RTT_WR == "OFF")) ||
4140  wrlvl_rank_done || wrlvl_rank_done_r1 ||
4141  (wrlvl_done && !wrlvl_done_r)) && (DRAM_TYPE == "DDR3")) begin
4142  calib_aux_out[1] <= #TCQ 1'b0;
4143  calib_aux_out[3] <= #TCQ 1'b0;
4144  end else if (((DRAM_TYPE == "DDR3")
4145  ||((RTT_NOM != "DISABLED") && (DRAM_TYPE == "DDR2")))
4146  && (((init_state_r == INIT_WRLVL_WAIT) && wrlvl_odt) ||
4147  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4148  (init_state_r == INIT_WRCAL_WRITE) ||
4149  (init_state_r == INIT_OCLKDELAY_WRITE))) begin
4150  // Quad rank in a single slot
4151  calib_aux_out[1] <= #TCQ phy_tmp_odt_r[0];
4152  calib_aux_out[3] <= #TCQ phy_tmp_odt_r[1];
4153  end else begin
4154  calib_aux_out[1] <= #TCQ 1'b0;
4155  calib_aux_out[3] <= #TCQ 1'b0;
4156  end
4157  end
4158  end else if ((nSLOTS == 1) && (RANKS <= 2)) begin
4159  always @(posedge clk)
4160  if (rst) begin
4161  calib_aux_out <= #TCQ 4'b0000;
4162  end else begin
4163  if (cnt_pwron_cke_done_r && ~cnt_pwron_cke_done_r1)begin
4164  calib_aux_out[0] <= #TCQ 1'b1;
4165  calib_aux_out[2] <= #TCQ 1'b1;
4166  end else begin
4167  calib_aux_out[0] <= #TCQ 1'b0;
4168  calib_aux_out[2] <= #TCQ 1'b0;
4169  end
4170  if ((((RTT_NOM == "DISABLED") && (RTT_WR == "OFF")) ||
4171  wrlvl_rank_done_r2 ||
4172  (wrlvl_done && !wrlvl_done_r)) && (DRAM_TYPE == "DDR3")) begin
4173  calib_aux_out[1] <= #TCQ 1'b0;
4174  calib_aux_out[3] <= #TCQ 1'b0;
4175  end else if (((DRAM_TYPE == "DDR3")
4176  ||((RTT_NOM != "DISABLED") && (DRAM_TYPE == "DDR2")))
4177  && (((init_state_r == INIT_WRLVL_WAIT) && wrlvl_odt) ||
4178  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4179  (init_state_r == INIT_WRCAL_WRITE) ||
4180  (init_state_r == INIT_OCLKDELAY_WRITE))) begin
4181  // Dual rank in a single slot
4182  calib_aux_out[1] <= #TCQ phy_tmp_odt_r[0];
4183  calib_aux_out[3] <= #TCQ phy_tmp_odt_r[1];
4184  end else begin
4185  calib_aux_out[1] <= #TCQ 1'b0;
4186  calib_aux_out[3] <= #TCQ 1'b0;
4187  end
4188  end
4189  end else if ((nSLOTS == 2) && (RANKS == 2)) begin
4190  always @(posedge clk)
4191  if (rst)
4192  calib_aux_out <= #TCQ 4'b0000;
4193  else begin
4194  if (cnt_pwron_cke_done_r && ~cnt_pwron_cke_done_r1)begin
4195  calib_aux_out[0] <= #TCQ 1'b1;
4196  calib_aux_out[2] <= #TCQ 1'b1;
4197  end else begin
4198  calib_aux_out[0] <= #TCQ 1'b0;
4199  calib_aux_out[2] <= #TCQ 1'b0;
4200  end
4201  if ((((RTT_NOM == "DISABLED") && (RTT_WR == "OFF")) ||
4202  wrlvl_rank_done_r2 ||
4203  (wrlvl_done && !wrlvl_done_r)) && (DRAM_TYPE == "DDR3")) begin
4204  calib_aux_out[1] <= #TCQ 1'b0;
4205  calib_aux_out[3] <= #TCQ 1'b0;
4206  end else if (((DRAM_TYPE == "DDR3")
4207  ||((RTT_NOM != "DISABLED") && (DRAM_TYPE == "DDR2")))
4208  && (((init_state_r == INIT_WRLVL_WAIT) && wrlvl_odt) ||
4209  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4210  (init_state_r == INIT_WRCAL_WRITE) ||
4211  (init_state_r == INIT_OCLKDELAY_WRITE))) begin
4212  // Quad rank in a single slot
4213  if (nCK_PER_CLK == 2) begin
4214  calib_aux_out[1]
4215  <= #TCQ (!calib_aux_out[1]) ? phy_tmp_odt_r[0] : 1'b0;
4216  calib_aux_out[3]
4217  <= #TCQ (!calib_aux_out[3]) ? phy_tmp_odt_r[1] : 1'b0;
4218  end else begin
4219  calib_aux_out[1] <= #TCQ phy_tmp_odt_r[0];
4220  calib_aux_out[3] <= #TCQ phy_tmp_odt_r[1];
4221  end
4222  end else begin
4223  calib_aux_out[1] <= #TCQ 1'b0;
4224  calib_aux_out[3] <= #TCQ 1'b0;
4225  end
4226  end
4227  end
4228 end
4229 endgenerate
4230 
4231  //*****************************************************************
4232  // memory address during init
4233  //*****************************************************************
4234 
4235  always @(posedge clk)
4236  phy_data_full_r <= #TCQ phy_data_full;
4237 
4238  always @(burst_addr_r or cnt_init_mr_r or chip_cnt_r or wrcal_wr_cnt
4239  or ddr2_refresh_flag_r or init_state_r or load_mr0 or phy_data_full_r
4240  or load_mr1 or load_mr2 or load_mr3 or new_burst_r or phy_address
4241  or mr1_r[0][0] or mr1_r[0][1] or mr1_r[0][2]
4242  or mr1_r[1][0] or mr1_r[1][1] or mr1_r[1][2]
4243  or mr1_r[2][0] or mr1_r[2][1] or mr1_r[2][2]
4244  or mr1_r[3][0] or mr1_r[3][1] or mr1_r[3][2]
4245  or mr2_r[chip_cnt_r] or reg_ctrl_cnt_r or stg1_wr_rd_cnt or oclk_wr_cnt
4246  or rdlvl_stg1_done or prbs_rdlvl_done or pi_dqs_found_done or rdlvl_wr_rd)begin
4247  // Bus 0 for address/bank never used
4248  address_w = 'b0;
4249  bank_w = 'b0;
4250  if ((init_state_r == INIT_PRECHARGE) ||
4251  (init_state_r == INIT_ZQCL) ||
4252  (init_state_r == INIT_DDR2_PRECHARGE)) begin
4253  // Set A10=1 for ZQ long calibration or Precharge All
4254  address_w = 'b0;
4255  address_w[10] = 1'b1;
4256  bank_w = 'b0;
4257  end else if (init_state_r == INIT_WRLVL_START) begin
4258  // Enable wrlvl in MR1
4259  bank_w[1:0] = 2'b01;
4260  address_w = load_mr1[ROW_WIDTH-1:0];
4261  address_w[2] = mr1_r[chip_cnt_r][0];
4262  address_w[6] = mr1_r[chip_cnt_r][1];
4263  address_w[9] = mr1_r[chip_cnt_r][2];
4264  address_w[7] = 1'b1;
4265  end else if (init_state_r == INIT_WRLVL_LOAD_MR) begin
4266  // Finished with write leveling, disable wrlvl in MR1
4267  // For single rank disable Rtt_Nom
4268  bank_w[1:0] = 2'b01;
4269  address_w = load_mr1[ROW_WIDTH-1:0];
4270  address_w[2] = mr1_r[chip_cnt_r][0];
4271  address_w[6] = mr1_r[chip_cnt_r][1];
4272  address_w[9] = mr1_r[chip_cnt_r][2];
4273  end else if (init_state_r == INIT_WRLVL_LOAD_MR2) begin
4274  // Set RTT_WR in MR2 after write leveling disabled
4275  bank_w[1:0] = 2'b10;
4276  address_w = load_mr2[ROW_WIDTH-1:0];
4277  address_w[10:9] = mr2_r[chip_cnt_r];
4278  end else if (init_state_r == INIT_MPR_READ) begin
4279  address_w = 'b0;
4280  bank_w = 'b0;
4281  end else if (init_state_r == INIT_MPR_RDEN) begin
4282  // Enable MPR read with LMR3 and A2=1
4283  bank_w[BANK_WIDTH-1:0] = 'd3;
4284  address_w = {ROW_WIDTH{1'b0}};
4285  address_w[2] = 1'b1;
4286  end else if (init_state_r == INIT_MPR_DISABLE) begin
4287  // Disable MPR read with LMR3 and A2=0
4288  bank_w[BANK_WIDTH-1:0] = 'd3;
4289  address_w = {ROW_WIDTH{1'b0}};
4290  end else if ((init_state_r == INIT_REG_WRITE)&
4291  (DRAM_TYPE == "DDR3"))begin
4292  // bank_w is assigned a 3 bit value. In some
4293  // DDR2 cases there will be only two bank bits.
4294  //Qualifying the condition with DDR3
4295  bank_w = 'b0;
4296  address_w = 'b0;
4297  case (reg_ctrl_cnt_r)
4298  REG_RC0[2:0]: address_w[4:0] = REG_RC0[4:0];
4299  REG_RC1[2:0]:begin
4300  address_w[4:0] = REG_RC1[4:0];
4301  bank_w = REG_RC1[7:5];
4302  end
4303  REG_RC2[2:0]: address_w[4:0] = REG_RC2[4:0];
4304  REG_RC3[2:0]: address_w[4:0] = REG_RC3[4:0];
4305  REG_RC4[2:0]: address_w[4:0] = REG_RC4[4:0];
4306  REG_RC5[2:0]: address_w[4:0] = REG_RC5[4:0];
4307  endcase
4308  end else if (init_state_r == INIT_LOAD_MR) begin
4309  // If loading mode register, look at cnt_init_mr to determine
4310  // which MR is currently being programmed
4311  address_w = 'b0;
4312  bank_w = 'b0;
4313  if(DRAM_TYPE == "DDR3")begin
4314  if(rdlvl_stg1_done && prbs_rdlvl_done && pi_dqs_found_done)begin
4315  // end of the calibration programming correct
4316  // burst length
4317  if (TEST_AL == "0") begin
4318  bank_w[1:0] = 2'b00;
4319  address_w = load_mr0[ROW_WIDTH-1:0];
4320  address_w[8]= 1'b0; //Don't reset DLL
4321  end else begin
4322  // programming correct AL value
4323  bank_w[1:0] = 2'b01;
4324  address_w = load_mr1[ROW_WIDTH-1:0];
4325  if (TEST_AL == "CL-1")
4326  address_w[4:3]= 2'b01; // AL="CL-1"
4327  else
4328  address_w[4:3]= 2'b10; // AL="CL-2"
4329  end
4330  end else begin
4331  case (cnt_init_mr_r)
4332  INIT_CNT_MR2: begin
4333  bank_w[1:0] = 2'b10;
4334  address_w = load_mr2[ROW_WIDTH-1:0];
4335  address_w[10:9] = mr2_r[chip_cnt_r];
4336  end
4337  INIT_CNT_MR3: begin
4338  bank_w[1:0] = 2'b11;
4339  address_w = load_mr3[ROW_WIDTH-1:0];
4340  end
4341  INIT_CNT_MR1: begin
4342  bank_w[1:0] = 2'b01;
4343  address_w = load_mr1[ROW_WIDTH-1:0];
4344  address_w[2] = mr1_r[chip_cnt_r][0];
4345  address_w[6] = mr1_r[chip_cnt_r][1];
4346  address_w[9] = mr1_r[chip_cnt_r][2];
4347  end
4348  INIT_CNT_MR0: begin
4349  bank_w[1:0] = 2'b00;
4350  address_w = load_mr0[ROW_WIDTH-1:0];
4351  // fixing it to BL8 for calibration
4352  address_w[1:0] = 2'b00;
4353  end
4354  default: begin
4355  bank_w = {BANK_WIDTH{1'bx}};
4356  address_w = {ROW_WIDTH{1'bx}};
4357  end
4358  endcase
4359  end
4360  end else begin // DDR2
4361  case (cnt_init_mr_r)
4362  INIT_CNT_MR2: begin
4363  if(~ddr2_refresh_flag_r)begin
4364  bank_w[1:0] = 2'b10;
4365  address_w = load_mr2[ROW_WIDTH-1:0];
4366  end else begin // second set of lm commands
4367  bank_w[1:0] = 2'b00;
4368  address_w = load_mr0[ROW_WIDTH-1:0];
4369  address_w[8]= 1'b0;
4370  //MRS command without resetting DLL
4371  end
4372  end
4373  INIT_CNT_MR3: begin
4374  if(~ddr2_refresh_flag_r)begin
4375  bank_w[1:0] = 2'b11;
4376  address_w = load_mr3[ROW_WIDTH-1:0];
4377  end else begin // second set of lm commands
4378  bank_w[1:0] = 2'b00;
4379  address_w = load_mr0[ROW_WIDTH-1:0];
4380  address_w[8]= 1'b0;
4381  //MRS command without resetting DLL. Repeted again
4382  // because there is an extra state.
4383  end
4384  end
4385  INIT_CNT_MR1: begin
4386  bank_w[1:0] = 2'b01;
4387  if(~ddr2_refresh_flag_r)begin
4388  address_w = load_mr1[ROW_WIDTH-1:0];
4389  end else begin // second set of lm commands
4390  address_w = load_mr1[ROW_WIDTH-1:0];
4391  address_w[9:7] = 3'b111;
4392  //OCD default state
4393  end
4394  end
4395  INIT_CNT_MR0: begin
4396  if(~ddr2_refresh_flag_r)begin
4397  bank_w[1:0] = 2'b00;
4398  address_w = load_mr0[ROW_WIDTH-1:0];
4399  end else begin // second set of lm commands
4400  bank_w[1:0] = 2'b01;
4401  address_w = load_mr1[ROW_WIDTH-1:0];
4402  if((chip_cnt_r == 2'd1) || (chip_cnt_r == 2'd3))begin
4403  // always disable odt for rank 1 and rank 3 as per SPEC
4404  address_w[2] = 'b0;
4405  address_w[6] = 'b0;
4406  end
4407  //OCD exit
4408  end
4409  end
4410  default: begin
4411  bank_w = {BANK_WIDTH{1'bx}};
4412  address_w = {ROW_WIDTH{1'bx}};
4413  end
4414  endcase
4415  end
4416  end else if ((init_state_r == INIT_PI_PHASELOCK_READS) ||
4417  (init_state_r == INIT_RDLVL_STG1_WRITE) ||
4418  (init_state_r == INIT_RDLVL_STG1_READ)) begin
4419  // Writing and reading PRBS pattern for read leveling stage 1
4420  // Need to support burst length 4 or 8. PRBS pattern will be
4421  // written to entire row and read back from the same row repeatedly
4422  bank_w = CALIB_BA_ADD[BANK_WIDTH-1:0];
4423  address_w[ROW_WIDTH-1:COL_WIDTH] = {ROW_WIDTH-COL_WIDTH{1'b0}};
4424  if (((stg1_wr_rd_cnt == NUM_STG1_WR_RD) && ~rdlvl_stg1_done) || (stg1_wr_rd_cnt == 'd128))
4425  address_w[COL_WIDTH-1:0] = {COL_WIDTH{1'b0}};
4426  else if (phy_data_full_r || (!new_burst_r))
4427  address_w[COL_WIDTH-1:0] = phy_address[COL_WIDTH-1:0];
4428  else if ((stg1_wr_rd_cnt >= 9'd0) && new_burst_r && ~phy_data_full_r)
4429  address_w[COL_WIDTH-1:0] = phy_address[COL_WIDTH-1:0] + ADDR_INC;
4430  end else if ((init_state_r == INIT_OCLKDELAY_WRITE) ||
4431  (init_state_r == INIT_OCLKDELAY_READ)) begin
4432  bank_w = CALIB_BA_ADD[BANK_WIDTH-1:0];
4433  address_w[ROW_WIDTH-1:COL_WIDTH] = {ROW_WIDTH-COL_WIDTH{1'b0}};
4434  if (oclk_wr_cnt == NUM_STG1_WR_RD)
4435  address_w[COL_WIDTH-1:0] = {COL_WIDTH{1'b0}};
4436  else if (phy_data_full_r || (!new_burst_r))
4437  address_w[COL_WIDTH-1:0] = phy_address[COL_WIDTH-1:0];
4438  else if ((oclk_wr_cnt >= 4'd0) && new_burst_r && ~phy_data_full_r)
4439  address_w[COL_WIDTH-1:0] = phy_address[COL_WIDTH-1:0] + ADDR_INC;
4440  end else if ((init_state_r == INIT_WRCAL_WRITE) ||
4441  (init_state_r == INIT_WRCAL_READ)) begin
4442  bank_w = CALIB_BA_ADD[BANK_WIDTH-1:0];
4443  address_w[ROW_WIDTH-1:COL_WIDTH] = {ROW_WIDTH-COL_WIDTH{1'b0}};
4444  if (wrcal_wr_cnt == NUM_STG1_WR_RD)
4445  address_w[COL_WIDTH-1:0] = {COL_WIDTH{1'b0}};
4446  else if (phy_data_full_r || (!new_burst_r))
4447  address_w[COL_WIDTH-1:0] = phy_address[COL_WIDTH-1:0];
4448  else if ((wrcal_wr_cnt >= 4'd0) && new_burst_r && ~phy_data_full_r)
4449  address_w[COL_WIDTH-1:0] = phy_address[COL_WIDTH-1:0] + ADDR_INC;
4450  end else if ((init_state_r == INIT_WRCAL_MULT_READS) ||
4451  (init_state_r == INIT_RDLVL_STG2_READ)) begin
4452  // when writing or reading back training pattern for read leveling stage2
4453  // need to support burst length of 4 or 8. This may mean issuing
4454  // multiple commands to cover the entire range of addresses accessed
4455  // during read leveling.
4456  // Hard coding A[12] to 1 so that it will always be burst length of 8
4457  // for DDR3. Does not have any effect on DDR2.
4458  bank_w = CALIB_BA_ADD[BANK_WIDTH-1:0];
4459  address_w[ROW_WIDTH-1:COL_WIDTH] = {ROW_WIDTH-COL_WIDTH{1'b0}};
4460  address_w[COL_WIDTH-1:0] =
4461  {CALIB_COL_ADD[COL_WIDTH-1:3],burst_addr_r, 3'b000};
4462  address_w[12] = 1'b1;
4463  end else if ((init_state_r == INIT_RDLVL_ACT) ||
4464  (init_state_r == INIT_WRCAL_ACT) ||
4465  (init_state_r == INIT_OCLKDELAY_ACT)) begin
4466 
4467  bank_w = CALIB_BA_ADD[BANK_WIDTH-1:0];
4468  address_w = CALIB_ROW_ADD[ROW_WIDTH-1:0];
4469  end else begin
4470  bank_w = {BANK_WIDTH{1'bx}};
4471  address_w = {ROW_WIDTH{1'bx}};
4472  end
4473  end
4474 
4475  // registring before sending out
4476  generate
4477  genvar r,s;
4478  if ((DRAM_TYPE != "DDR3") || (CA_MIRROR != "ON")) begin: gen_no_mirror
4479  for (r = 0; r < nCK_PER_CLK; r = r + 1) begin: div_clk_loop
4480  always @(posedge clk) begin
4481  phy_address[(r*ROW_WIDTH) +: ROW_WIDTH] <= #TCQ address_w;
4482  phy_bank[(r*BANK_WIDTH) +: BANK_WIDTH] <= #TCQ bank_w;
4483  end
4484  end
4485  end else begin: gen_mirror
4486  // Control/addressing mirroring (optional for DDR3 dual rank DIMMs)
4487  // Mirror for the 2nd rank only. Logic needs to be enhanced to account
4488  // for multiple slots, currently only supports one slot, 2-rank config
4489 
4490  for (r = 0; r < nCK_PER_CLK; r = r + 1) begin: gen_ba_div_clk_loop
4491  for (s = 0; s < BANK_WIDTH; s = s + 1) begin: gen_ba
4492 
4493  always @(posedge clk)
4494  if (chip_cnt_r == 2'b00) begin
4495  phy_bank[(r*BANK_WIDTH) + s] <= #TCQ bank_w[s];
4496  end else begin
4497  phy_bank[(r*BANK_WIDTH) + s] <= #TCQ bank_w[(s == 0) ? 1 : ((s == 1) ? 0 : s)];
4498  end
4499 
4500  end
4501  end
4502 
4503  for (r = 0; r < nCK_PER_CLK; r = r + 1) begin: gen_addr_div_clk_loop
4504  for (s = 0; s < ROW_WIDTH; s = s + 1) begin: gen_addr
4505  always @(posedge clk)
4506  if (chip_cnt_r == 2'b00) begin
4507  phy_address[(r*ROW_WIDTH) + s] <= #TCQ address_w[s];
4508  end else begin
4509  phy_address[(r*ROW_WIDTH) + s] <= #TCQ address_w[
4510  (s == 3) ? 4 :
4511  ((s == 4) ? 3 :
4512  ((s == 5) ? 6 :
4513  ((s == 6) ? 5 :
4514  ((s == 7) ? 8 :
4515  ((s == 8) ? 7 : s)))))];
4516  end
4517  end
4518  end
4519 
4520  end
4521  endgenerate
4522 
4523 endmodule