Home
last modified time | relevance | path

Searched refs:md (Results 1 – 25 of 162) sorted by relevance

1234567

/drivers/video/fbdev/omap/
A Dlcd_mipid.c153 set_data_lines(md, md->panel.data_lines); in send_init_string()
159 md->hw_guard_end = jiffies + md->hw_guard_wait; in hw_guard_start()
397 md->esd_check(md); in mipid_esd_work()
413 md->enabled = 1; in mipid_enable()
442 md->enabled = 0; in mipid_disable()
470 md->enabled = panel_enabled(md); in mipid_init()
545 md->panel.name, md->revision, md->panel.data_lines); in mipid_detect()
555 md = kzalloc(sizeof(*md), GFP_KERNEL); in mipid_spi_probe()
568 md->spi = spi; in mipid_spi_probe()
581 kfree(md); in mipid_spi_probe()
[all …]
/drivers/md/
A Ddm.c598 io->md = md; in alloc_io()
680 queue_work(md->wq, &md->work); in queue_io()
874 struct mapped_device *md = io->md; in dm_requeue_add_io() local
891 queue_work(md->wq, &md->work); in dm_kick_requeue()
904 struct mapped_device *md = io->md; in dm_handle_requeue() local
948 struct mapped_device *md = io->md; in __dm_io_complete() local
1092 struct mapped_device *md = io->md; in clone_endio() local
1401 struct mapped_device *md = io->md; in __map_bio() local
2313 md->queue = md->disk->queue; in alloc_dev()
2331 md->disk->private_data = md; in alloc_dev()
[all …]
A Ddm-era-target.c415 md->writeset_tree_info.tm = md->tm; in setup_writeset_tree_info()
451 &md->tm, &md->sm); in create_fresh_metadata()
595 &md->tm, &md->sm); in open_metadata()
821 md->current_writeset = &md->writesets[0]; in metadata_open()
899 r = dm_bitset_flush(&md->bitset_info, md->current_writeset->md.root, in metadata_era_archive()
900 &md->current_writeset->md.root); in metadata_era_archive()
906 ws_pack(&md->current_writeset->md, &value); in metadata_era_archive()
927 &md->writesets[1] : &md->writesets[0]; in next_writeset()
989 r = dm_bitset_flush(&md->bitset_info, md->current_writeset->md.root, in metadata_commit()
990 &md->current_writeset->md.root); in metadata_commit()
[all …]
A Ddm-ima.c120 dev_name, dev_uuid, md->disk->major, md->disk->first_minor, in dm_ima_alloc_and_copy_device_data()
171 memset(&(md->ima), 0, sizeof(md->ima)); in dm_ima_reset_data()
340 if (table->md->ima.active_table.hash != table->md->ima.inactive_table.hash) in dm_ima_measure_on_table_load()
392 if (md->ima.active_table.hash != md->ima.inactive_table.hash) in dm_ima_measure_on_device_resume()
407 md->ima.active_table.hash = md->ima.inactive_table.hash; in dm_ima_measure_on_device_resume()
408 md->ima.active_table.hash_len = md->ima.inactive_table.hash_len; in dm_ima_measure_on_device_resume()
418 md->ima.active_table.num_targets = md->ima.inactive_table.num_targets; in dm_ima_measure_on_device_resume()
591 if (md->ima.active_table.hash != md->ima.inactive_table.hash) in dm_ima_measure_on_device_remove()
661 md->ima.inactive_table.hash != md->ima.active_table.hash) in dm_ima_measure_on_table_clear()
676 md->ima.inactive_table.hash = md->ima.active_table.hash; in dm_ima_measure_on_table_clear()
[all …]
A Ddm-rq.c149 dm_put(md); in rq_completed()
160 struct mapped_device *md = tio->md; in dm_end_request() local
190 struct mapped_device *md = tio->md; in dm_requeue_original_request() local
258 struct mapped_device *md = tio->md; in dm_softirq_done() local
341 tio->md = md; in init_tio()
366 struct mapped_device *md = tio->md; in map_request() local
454 dm_get(md); in dm_start_request()
467 tio->md = md; in dm_mq_init_request()
482 struct mapped_device *md = tio->md; in dm_mq_queue_rq() local
549 md->tag_set->numa_node = md->numa_node_id; in dm_mq_init_request_queue()
[all …]
A Ddm-ioctl.c199 if (!md) in __get_dev_cell()
244 hc->md = md; in alloc_cell()
351 md = hc->md; in dm_hash_remove_all()
504 md = hc->md; in dm_hash_rename()
971 md = hc->md; in find_device()
993 md = hc->md; in dev_remove()
1084 if (!md) in dev_set_geometry()
1127 if (!md) in do_suspend()
1167 md = hc->md; in do_resume()
1268 if (!md) in dev_status()
[all …]
A Ddm.h79 void dm_lock_md_type(struct mapped_device *md);
80 void dm_unlock_md_type(struct mapped_device *md);
117 #define dm_has_zone_plugs(md) ((md)->disk->zone_wplugs_hash != NULL) argument
124 #define dm_has_zone_plugs(md) false argument
144 int dm_deleting_md(struct mapped_device *md);
149 int dm_suspended_md(struct mapped_device *md);
180 int dm_sysfs_init(struct mapped_device *md);
181 void dm_sysfs_exit(struct mapped_device *md);
204 void dm_destroy(struct mapped_device *md);
206 int dm_open_count(struct mapped_device *md);
[all …]
A Ddm-sysfs.c27 struct mapped_device *md; in dm_attr_show() local
34 md = dm_get_from_kobject(kobj); in dm_attr_show()
35 if (!md) in dm_attr_show()
38 ret = dm_attr->show(md, page); in dm_attr_show()
39 dm_put(md); in dm_attr_show()
52 struct mapped_device *md; in dm_attr_store() local
59 md = dm_get_from_kobject(kobj); in dm_attr_store()
60 if (!md) in dm_attr_store()
63 ret = dm_attr->store(md, page, count); in dm_attr_store()
64 dm_put(md); in dm_attr_store()
[all …]
A Ddm-zone.c23 struct gendisk *disk = md->disk; in dm_blk_do_report_zones()
69 if (dm_suspended_md(md)) in dm_blk_report_zones()
161 struct mapped_device *md = t->md; in dm_revalidate_zones() local
173 if (dm_has_zone_plugs(md)) in dm_revalidate_zones()
184 md->zone_revalidate_map = t; in dm_revalidate_zones()
341 struct mapped_device *md = t->md; in dm_set_zones_restrictions() local
436 struct mapped_device *md = t->md; in dm_finalize_zone_settings() local
445 md->nr_zones = 0; in dm_finalize_zone_settings()
446 md->disk->nr_zones = 0; in dm_finalize_zone_settings()
456 struct mapped_device *md = io->md; in dm_zone_endio() local
[all …]
/drivers/net/wwan/t7xx/
A Dt7xx_modem_ops.c83 struct t7xx_modem *md = t7xx_dev->md; in t7xx_pci_mhccif_isr() local
117 queue_work(md->handshake_wq, &md->handshake_work); in t7xx_pci_mhccif_isr()
623 queue_work(md->handshake_wq, &md->handshake_work); in t7xx_md_event_notify()
630 queue_work(md->handshake_wq, &md->ap_handshake_work); in t7xx_md_event_notify()
683 md = devm_kzalloc(dev, sizeof(*md), GFP_KERNEL); in t7xx_md_alloc()
684 if (!md) in t7xx_md_alloc()
688 t7xx_dev->md = md; in t7xx_md_alloc()
705 return md; in t7xx_md_alloc()
710 struct t7xx_modem *md = t7xx_dev->md; in t7xx_md_reset() local
739 if (!md) in t7xx_md_init()
[all …]
A Dt7xx_state_monitor.c104 fsm_state_notify(ctl->md, state); in t7xx_fsm_broadcast_state()
225 struct t7xx_modem *md = ctl->md; in t7xx_lk_stage_event_handling() local
231 dev = &md->t7xx_dev->pdev->dev; in t7xx_lk_stage_event_handling()
320 struct t7xx_modem *md = ctl->md; in fsm_routine_ready() local
330 struct t7xx_modem *md = ctl->md; in fsm_routine_starting() local
339 (md->core_md.ready && md->core_ap.ready) || in fsm_routine_starting()
346 if (!md->core_md.ready) { in fsm_routine_starting()
369 struct t7xx_modem *md = ctl->md; in fsm_routine_start() local
374 if (!md) in fsm_routine_start()
619 md->fsm_ctl = ctl; in t7xx_fsm_init()
[all …]
/drivers/net/mdio/
A Dmdio-mux-bcm-iproc.c69 if (md->core_clk) { in mdio_mux_iproc_config()
217 md = devm_kzalloc(&pdev->dev, sizeof(*md), GFP_KERNEL); in mdio_mux_iproc_probe()
218 if (!md) in mdio_mux_iproc_probe()
220 md->dev = &pdev->dev; in mdio_mux_iproc_probe()
223 if (IS_ERR(md->base)) in mdio_mux_iproc_probe()
235 if (!md->mii_bus) { in mdio_mux_iproc_probe()
243 md->core_clk = NULL; in mdio_mux_iproc_probe()
253 bus = md->mii_bus; in mdio_mux_iproc_probe()
254 bus->priv = md; in mdio_mux_iproc_probe()
273 rc = mdio_mux_init(md->dev, md->dev->of_node, mdio_mux_iproc_switch_fn, in mdio_mux_iproc_probe()
[all …]
A Dmdio-mux-bcm6368.c48 if (md->ext_phy) in bcm6368_mdiomux_read()
69 if (md->ext_phy) in bcm6368_mdiomux_write()
96 md = devm_kzalloc(&pdev->dev, sizeof(*md), GFP_KERNEL); in bcm6368_mdiomux_probe()
97 if (!md) in bcm6368_mdiomux_probe()
110 if (!md->base) { in bcm6368_mdiomux_probe()
116 if (!md->mii_bus) { in bcm6368_mdiomux_probe()
121 bus = md->mii_bus; in bcm6368_mdiomux_probe()
122 bus->priv = md; in bcm6368_mdiomux_probe()
139 rc = mdio_mux_init(md->dev, md->dev->of_node, in bcm6368_mdiomux_probe()
140 bcm6368_mdiomux_switch_fn, &md->mux_handle, md, in bcm6368_mdiomux_probe()
[all …]
/drivers/clk/qcom/
A Dclk-regmap-mux-div.c29 val = (div << md->hid_shift) | (src << md->src_shift); in mux_div_set_src_div()
30 mask = ((BIT(md->hid_width) - 1) << md->hid_shift) | in mux_div_set_src_div()
31 ((BIT(md->src_width) - 1) << md->src_shift); in mux_div_set_src_div()
33 ret = regmap_update_bits(md->clkr.regmap, CFG_RCGR + md->reg_offset, in mux_div_set_src_div()
38 ret = regmap_update_bits(md->clkr.regmap, CMD_RCGR + md->reg_offset, in mux_div_set_src_div()
45 ret = regmap_read(md->clkr.regmap, CMD_RCGR + md->reg_offset, in mux_div_set_src_div()
65 regmap_read(md->clkr.regmap, CMD_RCGR + md->reg_offset, &val); in mux_div_get_src_div()
72 regmap_read(md->clkr.regmap, CFG_RCGR + md->reg_offset, &val); in mux_div_get_src_div()
156 md->div = best_div; in __mux_div_set_rate_and_parent()
157 md->src = best_src; in __mux_div_set_rate_and_parent()
[all …]
/drivers/soundwire/
A Dmaster.c65 md->bus->prop.clk_freq[i]); in clock_frequencies_show()
107 kfree(md); in sdw_master_device_release()
136 md = kzalloc(sizeof(*md), GFP_KERNEL); in sdw_master_device_add()
137 if (!md) in sdw_master_device_add()
140 md->dev.bus = &sdw_bus_type; in sdw_master_device_add()
142 md->dev.parent = parent; in sdw_master_device_add()
145 md->dev.fwnode = fwnode; in sdw_master_device_add()
157 put_device(&md->dev); in sdw_master_device_add()
162 md->bus = bus; in sdw_master_device_add()
163 bus->dev = &md->dev; in sdw_master_device_add()
[all …]
/drivers/rapidio/devices/
A Drio_mport_cdev.c365 map->md = md; in rio_mport_create_outbound_mapping()
444 struct mport_dev *md = priv->md; in rio_mport_obw_free() local
479 struct mport_dev *md = priv->md; in maint_hdid_set() local
501 struct mport_dev *md = priv->md; in maint_comptag_set() local
796 struct mport_dev *md = priv->md; in rio_dma_transfer() local
1087 map->md = md; in rio_mport_create_dma_mapping()
1100 struct mport_dev *md = priv->md; in rio_mport_alloc_dma() local
1127 struct mport_dev *md = priv->md; in rio_mport_free_dma() local
1214 map->md = md; in rio_mport_create_inbound_mapping()
1946 md = priv->md; in mport_cdev_release_dma()
[all …]
/drivers/dma/
A Duniphier-mdmac.c114 sg = &md->sgl[md->sg_cur]; in uniphier_mdmac_handle()
150 if (md) in uniphier_mdmac_start()
203 md = mc->md; in uniphier_mdmac_interrupt()
204 if (!md) in uniphier_mdmac_interrupt()
209 if (md->sg_cur >= md->sg_len) { in uniphier_mdmac_interrupt()
212 if (!md) in uniphier_mdmac_interrupt()
241 md = kzalloc(sizeof(*md), GFP_NOWAIT); in uniphier_mdmac_prep_slave_sg()
242 if (!md) in uniphier_mdmac_prep_slave_sg()
304 if (mc->md && mc->md->vd.tx.cookie == cookie) { in uniphier_mdmac_tx_status()
308 md = mc->md; in uniphier_mdmac_tx_status()
[all …]
A Dmilbeaut-hdmac.c122 sg = &md->sgl[md->sg_cur]; in milbeaut_chan_start()
167 if (md) in milbeaut_hdmac_start()
187 md = mc->md; in milbeaut_hdmac_interrupt()
188 if (!md) in milbeaut_hdmac_interrupt()
193 if (md->sg_cur >= md->sg_len) { in milbeaut_hdmac_interrupt()
268 md = kzalloc(sizeof(*md), GFP_NOWAIT); in milbeaut_hdmac_prep_slave_sg()
269 if (!md) in milbeaut_hdmac_prep_slave_sg()
345 if (mc->md && mc->md->vd.tx.cookie == cookie) { in milbeaut_hdmac_tx_status()
349 md = mc->md; in milbeaut_hdmac_tx_status()
350 sg = &md->sgl[md->sg_cur]; in milbeaut_hdmac_tx_status()
[all …]
A Dmilbeaut-xdmac.c100 mc->md = NULL; in milbeaut_xdmac_next_desc()
108 return mc->md; in milbeaut_xdmac_next_desc()
121 val = md->src; in milbeaut_chan_start()
124 val = md->dst; in milbeaut_chan_start()
155 if (md) in milbeaut_xdmac_start()
171 md = mc->md; in milbeaut_xdmac_interrupt()
172 if (!md) in milbeaut_xdmac_interrupt()
195 md = kzalloc(sizeof(*md), GFP_NOWAIT); in milbeaut_xdmac_prep_memcpy()
196 if (!md) in milbeaut_xdmac_prep_memcpy()
199 md->len = len; in milbeaut_xdmac_prep_memcpy()
[all …]
/drivers/firmware/efi/
A Defi-init.c41 efi_memory_desc_t *md; in efi_to_phys() local
43 for_each_efi_memory_desc(md) { in efi_to_phys()
46 if (md->virt_addr == 0) in efi_to_phys()
49 if (md->virt_addr <= addr && in efi_to_phys()
50 (addr - md->virt_addr) < (md->num_pages << EFI_PAGE_SHIFT)) in efi_to_phys()
51 return md->phys_addr + addr - md->virt_addr; in efi_to_phys()
138 switch (md->type) { in is_usable_memory()
160 efi_memory_desc_t *md; in reserve_regions() local
175 paddr = md->phys_addr; in reserve_regions()
176 npages = md->num_pages; in reserve_regions()
[all …]
A Dmemattr.c76 efi_memory_desc_t *md; in entry_is_valid() local
100 for_each_efi_memory_desc(md) { in entry_is_valid()
101 u64 md_paddr = md->phys_addr; in entry_is_valid()
106 if (md->virt_addr == 0 && md->phys_addr != 0) { in entry_is_valid()
123 if (md->type != in->type) { in entry_is_valid()
178 efi_memory_desc_t md; in efi_memattr_apply_permissions() local
184 &md); in efi_memattr_apply_permissions()
185 size = md.num_pages << EFI_PAGE_SHIFT; in efi_memattr_apply_permissions()
188 valid ? "" : "!", md.phys_addr, in efi_memattr_apply_permissions()
189 md.phys_addr + size - 1, in efi_memattr_apply_permissions()
[all …]
/drivers/mmc/core/
A Dblock.c237 if (md && !kref_get_unless_zero(&md->kref)) in mmc_blk_get()
241 return md; in mmc_blk_get()
416 if (md) { in mmc_blk_open()
896 if (!md) in mmc_blk_alternative_gpt_sector()
2607 md = kzalloc(sizeof(*md), GFP_KERNEL); in mmc_blk_alloc_req()
2648 md->queue.blkdata = md; in mmc_blk_alloc_req()
2655 md->disk->private_data = md; in mmc_blk_alloc_req()
2884 struct mmc_blk_data *md = rpmb->md; in mmc_route_rpmb_frames() local
3017 rpmb->md = md; in mmc_blk_alloc_rpmb_part()
3378 if (md->part_curr != md->part_type) { in mmc_blk_remove()
[all …]
/drivers/video/fbdev/matrox/
A Dmatroxfb_maven.c490 g = maven_compute_gamma(md); in maven_init_TVdata()
770 maven_init_TVdata(md, m); in maven_compute_timming()
1127 maven_set_reg(md->client, 0x93, maven_compute_deflicker(md)); in maven_set_control()
1165 return maven_resync(md); in maven_out_start()
1207 md->primary_head = minfo; in maven_init_client()
1208 md->client = clnt; in maven_init_client()
1212 minfo->outputs[1].data = md; in maven_init_client()
1216 md->version = MGATVO_B; in maven_init_client()
1219 md->version = MGATVO_C; in maven_init_client()
1238 if (md->primary_head) { in maven_shutdown_client()
[all …]
/drivers/firmware/efi/libstub/
A Drandomalloc.c25 if (md->type != EFI_CONVENTIONAL_MEMORY) in get_entry_num_slots()
28 if (md->attribute & EFI_MEMORY_HOT_PLUGGABLE) in get_entry_num_slots()
32 (md->attribute & EFI_MEMORY_SP)) in get_entry_num_slots()
35 region_end = min(md->phys_addr + md->num_pages * EFI_PAGE_SIZE - 1, in get_entry_num_slots()
55 #define MD_NUM_SLOTS(md) ((md)->virt_addr) argument
86 efi_memory_desc_t *md = (void *)map->map + map_offset; in efi_random_alloc() local
91 MD_NUM_SLOTS(md) = slots; in efi_random_alloc()
93 if (md->attribute & EFI_MEMORY_MORE_RELIABLE) in efi_random_alloc()
122 !(md->attribute & EFI_MEMORY_MORE_RELIABLE)) in efi_random_alloc()
125 if (target_slot >= MD_NUM_SLOTS(md)) { in efi_random_alloc()
[all …]
/drivers/w1/
A Dw1.c72 kfree(md); in w1_master_release()
209 mutex_lock(&md->mutex); in w1_master_attribute_show_name()
228 mutex_lock(&md->mutex); in w1_master_attribute_store_search()
229 md->search_count = tmp; in w1_master_attribute_store_search()
245 mutex_lock(&md->mutex); in w1_master_attribute_show_search()
264 mutex_lock(&md->mutex); in w1_master_attribute_store_pullup()
278 mutex_lock(&md->mutex); in w1_master_attribute_show_pullup()
290 mutex_lock(&md->mutex); in w1_master_attribute_show_pointer()
316 mutex_lock(&md->mutex); in w1_master_attribute_store_max_slave_count()
330 mutex_lock(&md->mutex); in w1_master_attribute_show_max_slave_count()
[all …]

Completed in 90 milliseconds

1234567