| /drivers/net/ethernet/smsc/ |
| A D | smsc9420.c | 298 netif_info(pd, hw, pd->dev, in smsc9420_eeprom_send_cmd() 425 netif_dbg(pd, probe, pd->dev, in smsc9420_check_mac_address() 442 netif_dbg(pd, probe, pd->dev, in smsc9420_check_mac_address() 448 netif_dbg(pd, probe, pd->dev, in smsc9420_check_mac_address() 585 netif_warn(pd, ifdown, pd->dev, in smsc9420_stop_rx() 703 free_irq(pd->pdev->irq, pd); in smsc9420_stop() 1149 pd->mii_bus->priv = pd; in smsc9420_mii_init() 1274 netif_warn(pd, ifup, pd->dev, in smsc9420_open() 1368 netif_warn(pd, ifup, pd->dev, in smsc9420_open() 1376 netif_warn(pd, ifup, pd->dev, in smsc9420_open() [all …]
|
| /drivers/i2c/busses/ |
| A D | i2c-sh_mobile.c | 245 pd->iccl, pd->icch); in sh_mobile_i2c_check_timing() 261 dev_dbg(pd->dev, "timing values: L/H=0x%x/0x%x\n", pd->iccl, pd->icch); in sh_mobile_i2c_check_timing() 322 iic_wr(pd, ICDR, pd->msg->buf[pd->pos]); in i2c_op() 357 if (pd->pos == pd->msg->len) { in sh_mobile_i2c_isr_tx() 414 pd->pos, pd->msg->len); in sh_mobile_i2c_isr() 449 ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_cleanup_dma() 456 pd->msg->len, pd->dma_direction); in sh_mobile_i2c_cleanup_dma() 466 pd->pos = pd->msg->len; in sh_mobile_i2c_dma_callback() 535 sg_dma_len(&pd->sg) = pd->msg->len; in sh_mobile_i2c_xfer_dma() 573 iic_wr(pd, ICCL, pd->iccl & 0xff); in start_ch() [all …]
|
| A D | i2c-simtec.c | 68 if (pd == NULL) in simtec_i2c_probe() 99 pd->adap.algo_data = &pd->bit; in simtec_i2c_probe() 102 strscpy(pd->adap.name, "Simtec I2C", sizeof(pd->adap.name)); in simtec_i2c_probe() 104 pd->bit.data = pd; in simtec_i2c_probe() 109 pd->bit.timeout = HZ; in simtec_i2c_probe() 110 pd->bit.udelay = 20; in simtec_i2c_probe() 119 iounmap(pd->reg); in simtec_i2c_probe() 125 kfree(pd); in simtec_i2c_probe() 135 iounmap(pd->reg); in simtec_i2c_remove() 136 release_mem_region(pd->ioarea->start, resource_size(pd->ioarea)); in simtec_i2c_remove() [all …]
|
| /drivers/pmdomain/mediatek/ |
| A D | mtk-pm-domains.c | 246 ret = clk_bulk_prepare_enable(pd->num_clks, pd->clks); in scpsys_power_on() 306 clk_bulk_disable_unprepare(pd->num_clks, pd->clks); in scpsys_power_on() 346 clk_bulk_disable_unprepare(pd->num_clks, pd->clks); in scpsys_power_off() 384 pd = devm_kzalloc(scpsys->dev, sizeof(*pd), GFP_KERNEL); in scpsys_add_one_domain() 385 if (!pd) in scpsys_add_one_domain() 438 pd->clks = devm_kcalloc(scpsys->dev, pd->num_clks, sizeof(*pd->clks), GFP_KERNEL); in scpsys_add_one_domain() 505 pd->genpd.name = pd->data->name; in scpsys_add_one_domain() 523 clk_bulk_put(pd->num_subsys_clks, pd->subsys_clks); in scpsys_add_one_domain() 525 clk_bulk_put(pd->num_clks, pd->clks); in scpsys_add_one_domain() 599 clk_bulk_put(pd->num_clks, pd->clks); in scpsys_remove_one_domain() [all …]
|
| /drivers/media/pci/dt3155/ |
| A D | dt3155.c | 129 unsigned size = pd->width * pd->height; in dt3155_queue_setup() 156 iowrite32(pd->width, pd->regs + EVEN_DMA_STRIDE); in dt3155_start_streaming() 157 iowrite32(pd->width, pd->regs + ODD_DMA_STRIDE); in dt3155_start_streaming() 165 write_i2c_reg(pd->regs, CONFIG, pd->config); in dt3155_start_streaming() 181 write_i2c_reg_nowait(pd->regs, CSR2, pd->csr2); in dt3155_stop_streaming() 495 if (!pd) in dt3155_probe() 502 pd->vdev.v4l2_dev = &pd->v4l2_dev; in dt3155_probe() 517 pd->vidq.drv_priv = pd; in dt3155_probe() 522 pd->vdev.queue = &pd->vidq; in dt3155_probe() 553 free_irq(pd->pdev->irq, pd); in dt3155_probe() [all …]
|
| /drivers/clk/sifive/ |
| A D | sifive-prci.c | 206 struct __prci_data *pd = pc->pd; in sifive_prci_wrpll_set_rate() local 227 struct __prci_data *pd = pc->pd; in sifive_clk_is_enabled() local 242 struct __prci_data *pd = pc->pd; in sifive_prci_clock_enable() local 259 struct __prci_data *pd = pc->pd; in sifive_prci_clock_disable() local 277 struct __prci_data *pd = pc->pd; in sifive_prci_tlclksel_recalc_rate() local 294 struct __prci_data *pd = pc->pd; in sifive_prci_hfpclkplldiv_recalc_rate() local 455 struct __prci_data *pd = pc->pd; in sifive_prci_pcie_aux_clock_is_enabled() local 469 struct __prci_data *pd = pc->pd; in sifive_prci_pcie_aux_clock_enable() local 484 struct __prci_data *pd = pc->pd; in sifive_prci_pcie_aux_clock_disable() local 527 pic->pd = pd; in __prci_register_clocks() [all …]
|
| /drivers/gpu/drm/gma500/ |
| A D | mmu.c | 161 struct psb_mmu_pd *pd = kmalloc(sizeof(*pd), GFP_KERNEL); in psb_mmu_alloc_pd() local 165 if (!pd) in psb_mmu_alloc_pd() 169 if (!pd->p) in psb_mmu_alloc_pd() 179 pd->invalid_pde = psb_mmu_mask_pte(page_to_pfn(pd->dummy_pt), in psb_mmu_alloc_pd() 211 return pd; in psb_mmu_alloc_pd() 220 kfree(pd); in psb_mmu_alloc_pd() 257 kfree(pd); in psb_mmu_free_pagedir() 289 if (pd->driver->has_clflush && pd->hw_context != -1) { in psb_mmu_alloc_pt() 301 pt->pd = pd; in psb_mmu_alloc_pt() 366 struct psb_mmu_pd *pd = pt->pd; in psb_mmu_pt_unmap_unlock() local [all …]
|
| /drivers/pmdomain/ti/ |
| A D | ti_sci_pm_domains.c | 78 pd->idx, val); in ti_sci_pd_set_lat_constraint() 244 pd = devm_kzalloc(dev, sizeof(*pd), GFP_KERNEL); in ti_sci_pm_domain_probe() 245 if (!pd) in ti_sci_pm_domain_probe() 248 pd->pd.name = devm_kasprintf(dev, GFP_KERNEL, in ti_sci_pm_domain_probe() 251 if (!pd->pd.name) in ti_sci_pm_domain_probe() 254 pd->pd.power_off = ti_sci_pd_power_off; in ti_sci_pm_domain_probe() 255 pd->pd.power_on = ti_sci_pd_power_on; in ti_sci_pm_domain_probe() 256 pd->pd.flags |= GENPD_FLAG_ACTIVE_WAKEUP; in ti_sci_pm_domain_probe() 265 pd->pd.domain.ops.suspend = ti_sci_pd_suspend; in ti_sci_pm_domain_probe() 267 pm_genpd_init(&pd->pd, NULL, true); in ti_sci_pm_domain_probe() [all …]
|
| /drivers/pmdomain/qcom/ |
| A D | rpmpd.c | 968 pd->res_type, pd->res_id, &req, sizeof(req)); in rpmpd_send_enable() 974 .key = pd->key, in rpmpd_send_corner() 1034 pd->enabled = true; in rpmpd_power_on() 1036 if (pd->corner) in rpmpd_power_on() 1071 if (!pd->enabled && pd->key != cpu_to_le32(KEY_FLOOR_CORNER) && in rpmpd_set_performance() 1143 struct rpmpd *pd; in rpmpd_sync_state() local 1151 pd = rpmpds[i]; in rpmpd_sync_state() 1152 if (!pd) in rpmpd_sync_state() 1157 if (!pd->enabled) in rpmpd_sync_state() 1158 pd->corner = 0; in rpmpd_sync_state() [all …]
|
| A D | rpmhpd.c | 93 .parent = &mx.pd, 101 .parent = &mx_ao.pd, 143 .parent = &cx.pd, 151 .parent = &cx_ao.pd, 806 .addr = pd->addr, in rpmhpd_send_corner() 897 corner = max(pd->corner, pd->enable_corner); in rpmhpd_power_on() 952 pd->corner = i; in rpmhpd_set_performance_state() 1079 pd = rpmhpds[i]; in rpmhpd_sync_state() 1080 if (!pd) in rpmhpd_sync_state() 1084 if (pd->enabled) in rpmhpd_sync_state() [all …]
|
| /drivers/pmdomain/rockchip/ |
| A D | pm-domains.c | 283 ret = clk_bulk_enable(pd->num_clks, pd->clks); in rockchip_pmu_block() 300 clk_bulk_disable(pd->num_clks, pd->clks); in rockchip_pmu_block() 324 clk_bulk_disable(pd->num_clks, pd->clks); in rockchip_pmu_unblock() 625 ret = clk_bulk_enable(pd->num_clks, pd->clks); in rockchip_pd_power() 657 clk_bulk_disable(pd->num_clks, pd->clks); in rockchip_pd_power() 788 if (!pd) in rockchip_pm_add_one_domain() 861 pd->genpd.name = pd->info->name; in rockchip_pm_add_one_domain() 879 clk_bulk_unprepare(pd->num_clks, pd->clks); in rockchip_pm_add_one_domain() 881 clk_bulk_put(pd->num_clks, pd->clks); in rockchip_pm_add_one_domain() 898 clk_bulk_unprepare(pd->num_clks, pd->clks); in rockchip_pm_remove_one_domain() [all …]
|
| /drivers/pmdomain/samsung/ |
| A D | exynos-pm-domains.c | 43 pd = container_of(domain, struct exynos_pm_domain, pd); in exynos_pd_power() 114 pd = devm_kzalloc(dev, sizeof(*pd), GFP_KERNEL); in exynos_pd_probe() 115 if (!pd) in exynos_pd_probe() 118 pd->pd.name = exynos_get_domain_name(np); in exynos_pd_probe() 119 if (!pd->pd.name) in exynos_pd_probe() 124 kfree_const(pd->pd.name); in exynos_pd_probe() 128 pd->pd.power_off = exynos_pd_power_off; in exynos_pd_probe() 129 pd->pd.power_on = exynos_pd_power_on; in exynos_pd_probe() 132 on = readl_relaxed(pd->base + 0x4) & pd->local_pwr_cfg; in exynos_pd_probe() 134 pm_genpd_init(&pd->pd, NULL, !on); in exynos_pd_probe() [all …]
|
| /drivers/pmdomain/xilinx/ |
| A D | zynqmp-pm-domains.c | 113 if (!pd->requested) { in zynqmp_gpd_power_off() 115 pd->node_id); in zynqmp_gpd_power_off() 170 pd->requested = true; in zynqmp_gpd_attach_dev() 196 pd->node_id, ret); in zynqmp_gpd_detach_dev() 200 pd->requested = false; in zynqmp_gpd_detach_dev() 220 if (pd[i].node_id == idx) in zynqmp_gpd_xlate() 230 pd[i].node_id = idx; in zynqmp_gpd_xlate() 250 pd = devm_kcalloc(dev, ZYNQMP_NUM_DOMAINS, sizeof(*pd), GFP_KERNEL); in zynqmp_gpd_probe() 251 if (!pd) in zynqmp_gpd_probe() 270 pd->node_id = 0; in zynqmp_gpd_probe() [all …]
|
| /drivers/media/tuners/ |
| A D | tda18271-maps.c | 13 u8 pd; /* post div */ member 25 { .lomax = 32000, .pd = 0x5f, .d = 0xf0 }, 26 { .lomax = 35000, .pd = 0x5e, .d = 0xe0 }, 27 { .lomax = 37000, .pd = 0x5d, .d = 0xd0 }, 28 { .lomax = 41000, .pd = 0x5c, .d = 0xc0 }, 29 { .lomax = 44000, .pd = 0x5b, .d = 0xb0 }, 30 { .lomax = 49000, .pd = 0x5a, .d = 0xa0 }, 31 { .lomax = 54000, .pd = 0x59, .d = 0x90 }, 32 { .lomax = 61000, .pd = 0x58, .d = 0x80 }, 33 { .lomax = 65000, .pd = 0x4f, .d = 0x78 }, [all …]
|
| /drivers/pmdomain/renesas/ |
| A D | rcar-sysc.c | 98 iowrite32(BIT(pd->chan_bit), rcar_sysc_base + pd->chan_offs + reg_offs); in rcar_sysc_pwr_on_off() 210 if (pd->flags & PD_CPU) { in rcar_sysc_pd_setup() 397 struct rcar_sysc_pd *pd; in rcar_sysc_pd_init() local 401 pd = kzalloc(sizeof(*pd) + n, GFP_KERNEL); in rcar_sysc_pd_init() 402 if (!pd) { in rcar_sysc_pd_init() 408 pd->genpd.name = pd->name; in rcar_sysc_pd_init() 412 pd->flags = area->flags; in rcar_sysc_pd_init() 424 &pd->genpd); in rcar_sysc_pd_init() 460 struct rcar_sysc_pd *pd; in rcar_sysc_power_cpu() local 471 pd = to_rcar_pd(genpd); in rcar_sysc_power_cpu() [all …]
|
| A D | rmobile-sysc.c | 136 struct device_node *pd; member 156 if (!pd) in add_special_pd() 160 if (pd == special_pds[i].pd && type == special_pds[i].type) { in add_special_pd() 161 of_node_put(pd); in add_special_pd() 167 of_node_put(pd); in add_special_pd() 173 special_pds[num_special_pds].pd = pd; in add_special_pd() 209 if (pd == special_pds[i].pd) in pd_type() 279 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in rmobile_add_pm_domains() 280 if (!pd) in rmobile_add_pm_domains() 284 pd->base = base; in rmobile_add_pm_domains() [all …]
|
| /drivers/gpu/drm/i915/gt/ |
| A D | intel_ppgtt.c | 39 pd = kzalloc(sizeof(*pd), I915_GFP_ALLOW_FAIL); in __alloc_pd() 40 if (unlikely(!pd)) in __alloc_pd() 43 pd->entry = kcalloc(count, sizeof(*pd->entry), I915_GFP_ALLOW_FAIL); in __alloc_pd() 45 kfree(pd); in __alloc_pd() 50 return pd; in __alloc_pd() 58 if (unlikely(!pd)) in alloc_pd() 63 kfree(pd->entry); in alloc_pd() 64 kfree(pd); in alloc_pd() 68 return pd; in alloc_pd() 78 kfree(pd->entry); in free_px() [all …]
|
| /drivers/dma/ |
| A D | pch_dma.c | 739 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs() 740 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs() 741 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs() 742 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs() 762 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs() 763 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs() 764 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs() 765 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs() 783 if (pd) in pch_dma_suspend() 793 if (pd) in pch_dma_resume() [all …]
|
| /drivers/pmdomain/thead/ |
| A D | th1520-pm-domains.c | 55 return th1520_aon_power_update(pd->aon_chan, pd->rsrc, true); in th1520_pd_power_on() 62 return th1520_aon_power_update(pd->aon_chan, pd->rsrc, false); in th1520_pd_power_off() 80 domain = &pd->genpd; in th1520_pd_xlate() 94 pd = devm_kzalloc(dev, sizeof(*pd), GFP_KERNEL); in th1520_add_pm_domain() 95 if (!pd) in th1520_add_pm_domain() 98 pd->rsrc = pi->rsrc; in th1520_add_pm_domain() 107 return pd; in th1520_add_pm_domain() 124 ret = th1520_aon_power_update(pd->aon_chan, pd->rsrc, false); in th1520_pd_init_all_off() 128 pd->genpd.name); in th1520_pd_init_all_off() 209 if (IS_ERR(pd)) { in th1520_pd_probe() [all …]
|
| /drivers/infiniband/hw/usnic/ |
| A D | usnic_uiom.c | 223 spin_lock(&pd->lock); in __usnic_uiom_reg_release() 250 struct usnic_uiom_pd *pd = uiomr->pd; in usnic_uiom_map_sorted_intervals() local 362 uiomr->pd = pd; in usnic_uiom_reg_get() 376 &pd->root, in usnic_uiom_reg_get() 442 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in usnic_uiom_alloc_pd() 443 if (!pd) in usnic_uiom_alloc_pd() 449 kfree(pd); in usnic_uiom_alloc_pd() 458 return pd; in usnic_uiom_alloc_pd() 464 kfree(pd); in usnic_uiom_dealloc_pd() 490 pd->dev_cnt++; in usnic_uiom_attach_dev_to_pd() [all …]
|
| /drivers/pmdomain/sunxi/ |
| A D | sun50i-h6-prcm-ppu.c | 86 bool bit = readl(pd->reg) & pd->gate_mask; in sun50i_h6_ppu_power_status() 88 return bit ^ pd->negated; in sun50i_h6_ppu_power_status() 94 u32 reg = readl(pd->reg); in sun50i_h6_ppu_pd_set_power() 97 writel(reg | pd->gate_mask, pd->reg); in sun50i_h6_ppu_pd_set_power() 99 writel(reg & ~pd->gate_mask, pd->reg); in sun50i_h6_ppu_pd_set_power() 108 return sun50i_h6_ppu_pd_set_power(pd, !pd->negated); in sun50i_h6_ppu_pd_power_on() 115 return sun50i_h6_ppu_pd_set_power(pd, pd->negated); in sun50i_h6_ppu_pd_power_off() 155 pd->genpd.name = desc->name; in sun50i_h6_ppu_probe() 162 pd->gate_mask = desc->mask; in sun50i_h6_ppu_probe() 164 ret = pm_genpd_init(&pd->genpd, NULL, in sun50i_h6_ppu_probe() [all …]
|
| A D | sun55i-pck600.c | 73 struct sunxi_pck600 *pck = pd->pck; in sunxi_pck600_pd_set_power() 80 reg = readl(pd->base + PPU_PWPR); in sunxi_pck600_pd_set_power() 82 writel(reg, pd->base + PPU_PWPR); in sunxi_pck600_pd_set_power() 85 reg = readl(pd->base + PPU_PWPR); in sunxi_pck600_pd_set_power() 101 return sunxi_pck600_pd_set_power(pd, true); in sunxi_pck600_power_on() 108 return sunxi_pck600_pd_set_power(pd, false); in sunxi_pck600_power_off() 163 struct sunxi_pck600_pd *pd = &pck->pds[i]; in sunxi_pck600_probe() local 165 pd->genpd.name = desc->pd_names[i]; in sunxi_pck600_probe() 168 pd->base = base + PPU_REG_SIZE * i; in sunxi_pck600_probe() 170 sunxi_pck600_pd_setup(pd, desc); in sunxi_pck600_probe() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_lmtt.c | 113 if (IS_ERR(pd)) in lmtt_init_pd() 116 lmtt->pd = pd; in lmtt_init_pd() 122 struct xe_lmtt_pt *pd = lmtt->pd; in lmtt_fini_pd() local 130 lmtt->pd = NULL; in lmtt_fini_pd() 131 lmtt_pt_free(pd); in lmtt_fini_pd() 140 if (!lmtt->pd) in fini_lmtt() 220 if (!lmtt->pd) in xe_lmtt_init_hw() 318 lmtt_pt_free(pd); in lmtt_destroy_pt() 323 struct xe_lmtt_pt *pd = lmtt->pd; in lmtt_drop_pages() local 383 struct xe_lmtt_pt *pd = lmtt->pd; in lmtt_alloc_range() local [all …]
|
| /drivers/pmdomain/actions/ |
| A D | owl-sps.c | 53 ack_mask = BIT(pd->info->ack_bit); in owl_sps_set_power() 54 pwr_mask = BIT(pd->info->pwr_bit); in owl_sps_set_power() 63 dev_dbg(pd->sps->dev, "%s power on", pd->info->name); in owl_sps_power_on() 65 return owl_sps_set_power(pd, true); in owl_sps_power_on() 72 dev_dbg(pd->sps->dev, "%s power off", pd->info->name); in owl_sps_power_off() 79 struct owl_sps_domain *pd; in owl_sps_init_domain() local 81 pd = devm_kzalloc(sps->dev, sizeof(*pd), GFP_KERNEL); in owl_sps_init_domain() 82 if (!pd) in owl_sps_init_domain() 86 pd->sps = sps; in owl_sps_init_domain() 88 pd->genpd.name = pd->info->name; in owl_sps_init_domain() [all …]
|
| /drivers/cpuidle/ |
| A D | cpuidle-psci-domain.c | 34 struct genpd_power_state *state = &pd->states[pd->state_idx]; in psci_pd_power_off() 42 psci_set_domain_state(pd, pd->state_idx, *pd_state); in psci_pd_power_off() 49 struct generic_pm_domain *pd; in psci_pd_init() local 55 if (!pd) in psci_pd_init() 70 pd->power_off = psci_pd_power_off; in psci_pd_init() 71 pd->flags |= GENPD_FLAG_ACTIVE_WAKEUP; in psci_pd_init() 73 pd->flags |= GENPD_FLAG_RPM_ALWAYS_ON; in psci_pd_init() 75 pd->flags |= GENPD_FLAG_ALWAYS_ON; in psci_pd_init() 81 ret = pm_genpd_init(pd, pd_gov, false); in psci_pd_init() 96 pm_genpd_remove(pd); in psci_pd_init() [all …]
|