*/
u32 cik_pciep_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
WREG32(PCIE_INDEX, reg);
(void)RREG32(PCIE_INDEX);
r = RREG32(PCIE_DATA);
+ spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
return r;
}
void cik_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
WREG32(PCIE_INDEX, reg);
(void)RREG32(PCIE_INDEX);
WREG32(PCIE_DATA, v);
(void)RREG32(PCIE_DATA);
+ spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
}
static const u32 spectre_rlc_save_restore_register_list[] =
static u32 dce6_endpoint_rreg(struct radeon_device *rdev,
u32 block_offset, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->end_idx_lock, flags);
WREG32(AZ_F0_CODEC_ENDPOINT_INDEX + block_offset, reg);
r = RREG32(AZ_F0_CODEC_ENDPOINT_DATA + block_offset);
+ spin_unlock_irqrestore(&rdev->end_idx_lock, flags);
+
return r;
}
static void dce6_endpoint_wreg(struct radeon_device *rdev,
u32 block_offset, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->end_idx_lock, flags);
if (ASIC_IS_DCE8(rdev))
WREG32(AZ_F0_CODEC_ENDPOINT_INDEX + block_offset, reg);
else
WREG32(AZ_F0_CODEC_ENDPOINT_INDEX + block_offset,
AZ_ENDPOINT_REG_WRITE_EN | AZ_ENDPOINT_REG_INDEX(reg));
WREG32(AZ_F0_CODEC_ENDPOINT_DATA + block_offset, v);
+ spin_unlock_irqrestore(&rdev->end_idx_lock, flags);
}
#define RREG32_ENDPOINT(block, reg) dce6_endpoint_rreg(rdev, (block), (reg))
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
uint32_t data;
+ spin_lock_irqsave(&rdev->pll_idx_lock, flags);
WREG8(RADEON_CLOCK_CNTL_INDEX, reg & 0x3f);
r100_pll_errata_after_index(rdev);
data = RREG32(RADEON_CLOCK_CNTL_DATA);
r100_pll_errata_after_data(rdev);
+ spin_unlock_irqrestore(&rdev->pll_idx_lock, flags);
return data;
}
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->pll_idx_lock, flags);
WREG8(RADEON_CLOCK_CNTL_INDEX, ((reg & 0x3f) | RADEON_PLL_WR_EN));
r100_pll_errata_after_index(rdev);
WREG32(RADEON_CLOCK_CNTL_DATA, v);
r100_pll_errata_after_data(rdev);
+ spin_unlock_irqrestore(&rdev->pll_idx_lock, flags);
}
static void r100_set_safe_registers(struct radeon_device *rdev)
u32 r420_mc_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_0001F8_MC_IND_INDEX, S_0001F8_MC_IND_ADDR(reg));
r = RREG32(R_0001FC_MC_IND_DATA);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
return r;
}
void r420_mc_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_0001F8_MC_IND_INDEX, S_0001F8_MC_IND_ADDR(reg) |
S_0001F8_MC_IND_WR_EN(1));
WREG32(R_0001FC_MC_IND_DATA, v);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
}
static void r420_debugfs(struct radeon_device *rdev)
uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
uint32_t r;
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg));
r = RREG32(R_0028FC_MC_DATA);
WREG32(R_0028F8_MC_INDEX, ~C_0028F8_MC_IND_ADDR);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
return r;
}
void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg) |
S_0028F8_MC_IND_WR_EN(1));
WREG32(R_0028FC_MC_DATA, v);
WREG32(R_0028F8_MC_INDEX, 0x7F);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
}
static void r600_mc_program(struct radeon_device *rdev)
*/
u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
(void)RREG32(PCIE_PORT_INDEX);
r = RREG32(PCIE_PORT_DATA);
+ spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
return r;
}
void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
(void)RREG32(PCIE_PORT_INDEX);
WREG32(PCIE_PORT_DATA, (v));
(void)RREG32(PCIE_PORT_DATA);
+ spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
}
/*
spinlock_t mmio_idx_lock;
/* protects concurrent SMC based register access */
spinlock_t smc_idx_lock;
+ /* protects concurrent PLL register access */
+ spinlock_t pll_idx_lock;
+ /* protects concurrent MC register access */
+ spinlock_t mc_idx_lock;
+ /* protects concurrent PCIE register access */
+ spinlock_t pcie_idx_lock;
+ /* protects concurrent PCIE_PORT register access */
+ spinlock_t pciep_idx_lock;
+ /* protects concurrent PIF register access */
+ spinlock_t pif_idx_lock;
+ /* protects concurrent CG register access */
+ spinlock_t cg_idx_lock;
+ /* protects concurrent UVD register access */
+ spinlock_t uvd_idx_lock;
+ /* protects concurrent RCU register access */
+ spinlock_t rcu_idx_lock;
+ /* protects concurrent DIDT register access */
+ spinlock_t didt_idx_lock;
+ /* protects concurrent ENDPOINT (audio) register access */
+ spinlock_t end_idx_lock;
void __iomem *rmmio;
radeon_rreg_t mc_rreg;
radeon_wreg_t mc_wreg;
*/
static inline uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
uint32_t r;
+ spin_lock_irqsave(&rdev->pcie_idx_lock, flags);
WREG32(RADEON_PCIE_INDEX, ((reg) & rdev->pcie_reg_mask));
r = RREG32(RADEON_PCIE_DATA);
+ spin_unlock_irqrestore(&rdev->pcie_idx_lock, flags);
return r;
}
static inline void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->pcie_idx_lock, flags);
WREG32(RADEON_PCIE_INDEX, ((reg) & rdev->pcie_reg_mask));
WREG32(RADEON_PCIE_DATA, (v));
+ spin_unlock_irqrestore(&rdev->pcie_idx_lock, flags);
}
static inline u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg)
static inline u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
r = RREG32(R600_RCU_DATA);
+ spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
return r;
}
static inline void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
WREG32(R600_RCU_DATA, (v));
+ spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
}
static inline u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->cg_idx_lock, flags);
WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
r = RREG32(EVERGREEN_CG_IND_DATA);
+ spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
return r;
}
static inline void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->cg_idx_lock, flags);
WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
WREG32(EVERGREEN_CG_IND_DATA, (v));
+ spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
}
static inline u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->pif_idx_lock, flags);
WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
r = RREG32(EVERGREEN_PIF_PHY0_DATA);
+ spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
return r;
}
static inline void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->pif_idx_lock, flags);
WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
+ spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
}
static inline u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->pif_idx_lock, flags);
WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
r = RREG32(EVERGREEN_PIF_PHY1_DATA);
+ spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
return r;
}
static inline void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->pif_idx_lock, flags);
WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
+ spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
}
static inline u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
r = RREG32(R600_UVD_CTX_DATA);
+ spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
return r;
}
static inline void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
WREG32(R600_UVD_CTX_DATA, (v));
+ spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
}
static inline u32 cik_didt_rreg(struct radeon_device *rdev, u32 reg)
{
+ unsigned long flags;
u32 r;
+ spin_lock_irqsave(&rdev->didt_idx_lock, flags);
WREG32(CIK_DIDT_IND_INDEX, (reg));
r = RREG32(CIK_DIDT_IND_DATA);
+ spin_unlock_irqrestore(&rdev->didt_idx_lock, flags);
return r;
}
static inline void cik_didt_wreg(struct radeon_device *rdev, u32 reg, u32 v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->didt_idx_lock, flags);
WREG32(CIK_DIDT_IND_INDEX, (reg));
WREG32(CIK_DIDT_IND_DATA, (v));
+ spin_unlock_irqrestore(&rdev->didt_idx_lock, flags);
}
void r100_pll_errata_after_index(struct radeon_device *rdev);
/* TODO: block userspace mapping of io register */
spin_lock_init(&rdev->mmio_idx_lock);
spin_lock_init(&rdev->smc_idx_lock);
+ spin_lock_init(&rdev->pll_idx_lock);
+ spin_lock_init(&rdev->mc_idx_lock);
+ spin_lock_init(&rdev->pcie_idx_lock);
+ spin_lock_init(&rdev->pciep_idx_lock);
+ spin_lock_init(&rdev->pif_idx_lock);
+ spin_lock_init(&rdev->cg_idx_lock);
+ spin_lock_init(&rdev->uvd_idx_lock);
+ spin_lock_init(&rdev->rcu_idx_lock);
+ spin_lock_init(&rdev->didt_idx_lock);
+ spin_lock_init(&rdev->end_idx_lock);
if (rdev->family >= CHIP_BONAIRE) {
rdev->rmmio_base = pci_resource_start(rdev->pdev, 5);
rdev->rmmio_size = pci_resource_len(rdev->pdev, 5);
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
uint32_t r;
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(RS480_NB_MC_INDEX, reg & 0xff);
r = RREG32(RS480_NB_MC_DATA);
WREG32(RS480_NB_MC_INDEX, 0xff);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
return r;
}
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(RS480_NB_MC_INDEX, ((reg) & 0xff) | RS480_NB_MC_IND_WR_EN);
WREG32(RS480_NB_MC_DATA, (v));
WREG32(RS480_NB_MC_INDEX, 0xff);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
}
#if defined(CONFIG_DEBUG_FS)
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
+ u32 r;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_000070_MC_IND_INDEX, S_000070_MC_IND_ADDR(reg) |
S_000070_MC_IND_CITF_ARB0(1));
- return RREG32(R_000074_MC_IND_DATA);
+ r = RREG32(R_000074_MC_IND_DATA);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
+ return r;
}
void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_000070_MC_IND_INDEX, S_000070_MC_IND_ADDR(reg) |
S_000070_MC_IND_CITF_ARB0(1) | S_000070_MC_IND_WR_EN(1));
WREG32(R_000074_MC_IND_DATA, v);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
}
static void rs600_debugfs(struct radeon_device *rdev)
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
uint32_t r;
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_000078_MC_INDEX, S_000078_MC_IND_ADDR(reg));
r = RREG32(R_00007C_MC_DATA);
WREG32(R_000078_MC_INDEX, ~C_000078_MC_IND_ADDR);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
return r;
}
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(R_000078_MC_INDEX, S_000078_MC_IND_ADDR(reg) |
S_000078_MC_IND_WR_EN(1));
WREG32(R_00007C_MC_DATA, v);
WREG32(R_000078_MC_INDEX, 0x7F);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
}
static void rs690_mc_program(struct radeon_device *rdev)
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg)
{
+ unsigned long flags;
uint32_t r;
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(MC_IND_INDEX, 0x7f0000 | (reg & 0xffff));
r = RREG32(MC_IND_DATA);
WREG32(MC_IND_INDEX, 0);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
+
return r;
}
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
{
+ unsigned long flags;
+
+ spin_lock_irqsave(&rdev->mc_idx_lock, flags);
WREG32(MC_IND_INDEX, 0xff0000 | ((reg) & 0xffff));
WREG32(MC_IND_DATA, (v));
WREG32(MC_IND_INDEX, 0);
+ spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
}
#if defined(CONFIG_DEBUG_FS)