.. | .. |
---|
5 | 5 | * |
---|
6 | 6 | * GPL LICENSE SUMMARY |
---|
7 | 7 | * |
---|
8 | | - * Copyright(c) 2018 Intel Corporation |
---|
| 8 | + * Copyright(c) 2018 - 2021 Intel Corporation |
---|
9 | 9 | * |
---|
10 | 10 | * This program is free software; you can redistribute it and/or modify |
---|
11 | 11 | * it under the terms of version 2 of the GNU General Public License as |
---|
.. | .. |
---|
18 | 18 | * |
---|
19 | 19 | * BSD LICENSE |
---|
20 | 20 | * |
---|
21 | | - * Copyright(c) 2018 Intel Corporation |
---|
| 21 | + * Copyright(c) 2018 - 2020 Intel Corporation |
---|
22 | 22 | * All rights reserved. |
---|
23 | 23 | * |
---|
24 | 24 | * Redistribution and use in source and binary forms, with or without |
---|
.. | .. |
---|
55 | 55 | #include "internal.h" |
---|
56 | 56 | #include "iwl-prph.h" |
---|
57 | 57 | |
---|
| 58 | +static void |
---|
| 59 | +iwl_pcie_ctxt_info_dbg_enable(struct iwl_trans *trans, |
---|
| 60 | + struct iwl_prph_scratch_hwm_cfg *dbg_cfg, |
---|
| 61 | + u32 *control_flags) |
---|
| 62 | +{ |
---|
| 63 | + enum iwl_fw_ini_allocation_id alloc_id = IWL_FW_INI_ALLOCATION_ID_DBGC1; |
---|
| 64 | + struct iwl_fw_ini_allocation_tlv *fw_mon_cfg; |
---|
| 65 | + u32 dbg_flags = 0; |
---|
| 66 | + |
---|
| 67 | + if (!iwl_trans_dbg_ini_valid(trans)) { |
---|
| 68 | + struct iwl_dram_data *fw_mon = &trans->dbg.fw_mon; |
---|
| 69 | + |
---|
| 70 | + iwl_pcie_alloc_fw_monitor(trans, 0); |
---|
| 71 | + |
---|
| 72 | + if (fw_mon->size) { |
---|
| 73 | + dbg_flags |= IWL_PRPH_SCRATCH_EDBG_DEST_DRAM; |
---|
| 74 | + |
---|
| 75 | + IWL_DEBUG_FW(trans, |
---|
| 76 | + "WRT: Applying DRAM buffer destination\n"); |
---|
| 77 | + |
---|
| 78 | + dbg_cfg->hwm_base_addr = cpu_to_le64(fw_mon->physical); |
---|
| 79 | + dbg_cfg->hwm_size = cpu_to_le32(fw_mon->size); |
---|
| 80 | + } |
---|
| 81 | + |
---|
| 82 | + goto out; |
---|
| 83 | + } |
---|
| 84 | + |
---|
| 85 | + fw_mon_cfg = &trans->dbg.fw_mon_cfg[alloc_id]; |
---|
| 86 | + |
---|
| 87 | + switch (le32_to_cpu(fw_mon_cfg->buf_location)) { |
---|
| 88 | + case IWL_FW_INI_LOCATION_SRAM_PATH: |
---|
| 89 | + dbg_flags |= IWL_PRPH_SCRATCH_EDBG_DEST_INTERNAL; |
---|
| 90 | + IWL_DEBUG_FW(trans, |
---|
| 91 | + "WRT: Applying SMEM buffer destination\n"); |
---|
| 92 | + break; |
---|
| 93 | + |
---|
| 94 | + case IWL_FW_INI_LOCATION_NPK_PATH: |
---|
| 95 | + dbg_flags |= IWL_PRPH_SCRATCH_EDBG_DEST_TB22DTF; |
---|
| 96 | + IWL_DEBUG_FW(trans, |
---|
| 97 | + "WRT: Applying NPK buffer destination\n"); |
---|
| 98 | + break; |
---|
| 99 | + |
---|
| 100 | + case IWL_FW_INI_LOCATION_DRAM_PATH: |
---|
| 101 | + if (trans->dbg.fw_mon_ini[alloc_id].num_frags) { |
---|
| 102 | + struct iwl_dram_data *frag = |
---|
| 103 | + &trans->dbg.fw_mon_ini[alloc_id].frags[0]; |
---|
| 104 | + dbg_flags |= IWL_PRPH_SCRATCH_EDBG_DEST_DRAM; |
---|
| 105 | + dbg_cfg->hwm_base_addr = cpu_to_le64(frag->physical); |
---|
| 106 | + dbg_cfg->hwm_size = cpu_to_le32(frag->size); |
---|
| 107 | + IWL_DEBUG_FW(trans, |
---|
| 108 | + "WRT: Applying DRAM destination (alloc_id=%u, num_frags=%u)\n", |
---|
| 109 | + alloc_id, |
---|
| 110 | + trans->dbg.fw_mon_ini[alloc_id].num_frags); |
---|
| 111 | + } |
---|
| 112 | + break; |
---|
| 113 | + default: |
---|
| 114 | + IWL_ERR(trans, "WRT: Invalid buffer destination\n"); |
---|
| 115 | + } |
---|
| 116 | +out: |
---|
| 117 | + if (dbg_flags) |
---|
| 118 | + *control_flags |= IWL_PRPH_SCRATCH_EARLY_DEBUG_EN | dbg_flags; |
---|
| 119 | +} |
---|
| 120 | + |
---|
58 | 121 | int iwl_pcie_ctxt_info_gen3_init(struct iwl_trans *trans, |
---|
59 | 122 | const struct fw_img *fw) |
---|
60 | 123 | { |
---|
.. | .. |
---|
65 | 128 | struct iwl_prph_info *prph_info; |
---|
66 | 129 | u32 control_flags = 0; |
---|
67 | 130 | int ret; |
---|
| 131 | + int cmdq_size = max_t(u32, IWL_CMD_QUEUE_SIZE, |
---|
| 132 | + trans->cfg->min_txq_size); |
---|
| 133 | + |
---|
| 134 | + switch (trans_pcie->rx_buf_size) { |
---|
| 135 | + case IWL_AMSDU_DEF: |
---|
| 136 | + return -EINVAL; |
---|
| 137 | + case IWL_AMSDU_2K: |
---|
| 138 | + break; |
---|
| 139 | + case IWL_AMSDU_4K: |
---|
| 140 | + control_flags |= IWL_PRPH_SCRATCH_RB_SIZE_4K; |
---|
| 141 | + break; |
---|
| 142 | + case IWL_AMSDU_8K: |
---|
| 143 | + control_flags |= IWL_PRPH_SCRATCH_RB_SIZE_4K; |
---|
| 144 | + /* if firmware supports the ext size, tell it */ |
---|
| 145 | + control_flags |= IWL_PRPH_SCRATCH_RB_SIZE_EXT_8K; |
---|
| 146 | + break; |
---|
| 147 | + case IWL_AMSDU_12K: |
---|
| 148 | + control_flags |= IWL_PRPH_SCRATCH_RB_SIZE_4K; |
---|
| 149 | + /* if firmware supports the ext size, tell it */ |
---|
| 150 | + control_flags |= IWL_PRPH_SCRATCH_RB_SIZE_EXT_12K; |
---|
| 151 | + break; |
---|
| 152 | + } |
---|
68 | 153 | |
---|
69 | 154 | /* Allocate prph scratch */ |
---|
70 | 155 | prph_scratch = dma_alloc_coherent(trans->dev, sizeof(*prph_scratch), |
---|
.. | .. |
---|
80 | 165 | cpu_to_le16((u16)iwl_read32(trans, CSR_HW_REV)); |
---|
81 | 166 | prph_sc_ctrl->version.size = cpu_to_le16(sizeof(*prph_scratch) / 4); |
---|
82 | 167 | |
---|
83 | | - control_flags = IWL_PRPH_SCRATCH_RB_SIZE_4K | |
---|
84 | | - IWL_PRPH_SCRATCH_MTR_MODE | |
---|
85 | | - (IWL_PRPH_MTR_FORMAT_256B & |
---|
86 | | - IWL_PRPH_SCRATCH_MTR_FORMAT) | |
---|
87 | | - IWL_PRPH_SCRATCH_EARLY_DEBUG_EN | |
---|
88 | | - IWL_PRPH_SCRATCH_EDBG_DEST_DRAM; |
---|
89 | | - prph_sc_ctrl->control.control_flags = cpu_to_le32(control_flags); |
---|
| 168 | + control_flags |= IWL_PRPH_SCRATCH_MTR_MODE; |
---|
| 169 | + control_flags |= IWL_PRPH_MTR_FORMAT_256B & IWL_PRPH_SCRATCH_MTR_FORMAT; |
---|
90 | 170 | |
---|
91 | 171 | /* initialize RX default queue */ |
---|
92 | 172 | prph_sc_ctrl->rbd_cfg.free_rbd_addr = |
---|
93 | 173 | cpu_to_le64(trans_pcie->rxq->bd_dma); |
---|
94 | 174 | |
---|
95 | | - /* Configure debug, for integration */ |
---|
96 | | - iwl_pcie_alloc_fw_monitor(trans, 0); |
---|
97 | | - prph_sc_ctrl->hwm_cfg.hwm_base_addr = |
---|
98 | | - cpu_to_le64(trans_pcie->fw_mon_phys); |
---|
99 | | - prph_sc_ctrl->hwm_cfg.hwm_size = |
---|
100 | | - cpu_to_le32(trans_pcie->fw_mon_size); |
---|
| 175 | + iwl_pcie_ctxt_info_dbg_enable(trans, &prph_sc_ctrl->hwm_cfg, |
---|
| 176 | + &control_flags); |
---|
| 177 | + prph_sc_ctrl->control.control_flags = cpu_to_le32(control_flags); |
---|
101 | 178 | |
---|
102 | 179 | /* allocate ucode sections in dram and set addresses */ |
---|
103 | 180 | ret = iwl_pcie_init_fw_sec(trans, fw, &prph_scratch->dram); |
---|
.. | .. |
---|
143 | 220 | ctxt_info_gen3->tr_idx_arr_size = |
---|
144 | 221 | cpu_to_le16(IWL_NUM_OF_TRANSFER_RINGS); |
---|
145 | 222 | ctxt_info_gen3->mtr_base_addr = |
---|
146 | | - cpu_to_le64(trans_pcie->txq[trans_pcie->cmd_queue]->dma_addr); |
---|
| 223 | + cpu_to_le64(trans->txqs.txq[trans->txqs.cmd.q_id]->dma_addr); |
---|
147 | 224 | ctxt_info_gen3->mcr_base_addr = |
---|
148 | 225 | cpu_to_le64(trans_pcie->rxq->used_bd_dma); |
---|
149 | 226 | ctxt_info_gen3->mtr_size = |
---|
150 | | - cpu_to_le16(TFD_QUEUE_CB_SIZE(TFD_CMD_SLOTS)); |
---|
| 227 | + cpu_to_le16(TFD_QUEUE_CB_SIZE(cmdq_size)); |
---|
151 | 228 | ctxt_info_gen3->mcr_size = |
---|
152 | | - cpu_to_le16(RX_QUEUE_CB_SIZE(MQ_RX_TABLE_SIZE)); |
---|
| 229 | + cpu_to_le16(RX_QUEUE_CB_SIZE(trans->cfg->num_rbds)); |
---|
153 | 230 | |
---|
154 | 231 | trans_pcie->ctxt_info_gen3 = ctxt_info_gen3; |
---|
155 | 232 | trans_pcie->prph_info = prph_info; |
---|
.. | .. |
---|
174 | 251 | iwl_write64(trans, CSR_IML_DATA_ADDR, |
---|
175 | 252 | trans_pcie->iml_dma_addr); |
---|
176 | 253 | iwl_write32(trans, CSR_IML_SIZE_ADDR, trans->iml_len); |
---|
177 | | - iwl_set_bit(trans, CSR_CTXT_INFO_BOOT_CTRL, CSR_AUTO_FUNC_BOOT_ENA); |
---|
178 | | - iwl_set_bit(trans, CSR_GP_CNTRL, CSR_AUTO_FUNC_INIT); |
---|
| 254 | + |
---|
| 255 | + iwl_set_bit(trans, CSR_CTXT_INFO_BOOT_CTRL, |
---|
| 256 | + CSR_AUTO_FUNC_BOOT_ENA); |
---|
179 | 257 | |
---|
180 | 258 | return 0; |
---|
181 | 259 | |
---|
.. | .. |
---|
231 | 309 | trans_pcie->prph_info_dma_addr = 0; |
---|
232 | 310 | trans_pcie->prph_info = NULL; |
---|
233 | 311 | } |
---|
| 312 | + |
---|
| 313 | +int iwl_trans_pcie_ctx_info_gen3_set_pnvm(struct iwl_trans *trans, |
---|
| 314 | + const void *data, u32 len) |
---|
| 315 | +{ |
---|
| 316 | + struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); |
---|
| 317 | + struct iwl_prph_scratch_ctrl_cfg *prph_sc_ctrl = |
---|
| 318 | + &trans_pcie->prph_scratch->ctrl_cfg; |
---|
| 319 | + int ret; |
---|
| 320 | + |
---|
| 321 | + if (trans->trans_cfg->device_family < IWL_DEVICE_FAMILY_AX210) |
---|
| 322 | + return 0; |
---|
| 323 | + |
---|
| 324 | + /* only allocate the DRAM if not allocated yet */ |
---|
| 325 | + if (!trans->pnvm_loaded) { |
---|
| 326 | + if (WARN_ON(prph_sc_ctrl->pnvm_cfg.pnvm_size)) |
---|
| 327 | + return -EBUSY; |
---|
| 328 | + |
---|
| 329 | + ret = iwl_pcie_ctxt_info_alloc_dma(trans, data, len, |
---|
| 330 | + &trans_pcie->pnvm_dram); |
---|
| 331 | + if (ret < 0) { |
---|
| 332 | + IWL_DEBUG_FW(trans, "Failed to allocate PNVM DMA %d.\n", |
---|
| 333 | + ret); |
---|
| 334 | + return ret; |
---|
| 335 | + } |
---|
| 336 | + } |
---|
| 337 | + |
---|
| 338 | + prph_sc_ctrl->pnvm_cfg.pnvm_base_addr = |
---|
| 339 | + cpu_to_le64(trans_pcie->pnvm_dram.physical); |
---|
| 340 | + prph_sc_ctrl->pnvm_cfg.pnvm_size = |
---|
| 341 | + cpu_to_le32(trans_pcie->pnvm_dram.size); |
---|
| 342 | + |
---|
| 343 | + return 0; |
---|
| 344 | +} |
---|