Lines Matching refs:context

124 	spin_lock_irqsave(&mm->context.lock, flags);  in flush_tsb_user()
127 base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb; in flush_tsb_user()
128 nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries; in flush_tsb_user()
140 else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) { in flush_tsb_user()
141 base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb; in flush_tsb_user()
142 nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries; in flush_tsb_user()
149 spin_unlock_irqrestore(&mm->context.lock, flags); in flush_tsb_user()
157 spin_lock_irqsave(&mm->context.lock, flags); in flush_tsb_user_page()
160 base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb; in flush_tsb_user_page()
161 nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries; in flush_tsb_user_page()
174 else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) { in flush_tsb_user_page()
175 base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb; in flush_tsb_user_page()
176 nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries; in flush_tsb_user_page()
183 spin_unlock_irqrestore(&mm->context.lock, flags); in flush_tsb_user_page()
199 mm->context.tsb_block[tsb_idx].tsb_nentries = in setup_tsb_params()
216 tsb_paddr = __pa(mm->context.tsb_block[tsb_idx].tsb); in setup_tsb_params()
277 mm->context.tsb_block[tsb_idx].tsb_reg_val = tsb_reg; in setup_tsb_params()
278 mm->context.tsb_block[tsb_idx].tsb_map_vaddr = 0; in setup_tsb_params()
279 mm->context.tsb_block[tsb_idx].tsb_map_pte = 0; in setup_tsb_params()
285 mm->context.tsb_block[tsb_idx].tsb_reg_val = tsb_reg; in setup_tsb_params()
286 mm->context.tsb_block[tsb_idx].tsb_map_vaddr = base; in setup_tsb_params()
287 mm->context.tsb_block[tsb_idx].tsb_map_pte = tte; in setup_tsb_params()
292 struct hv_tsb_descr *hp = &mm->context.tsb_descr[tsb_idx]; in setup_tsb_params()
432 if (mm->context.tsb_block[tsb_index].tsb == NULL && in tsb_grow()
443 if (mm->context.tsb_block[tsb_index].tsb != NULL) in tsb_grow()
444 mm->context.tsb_block[tsb_index].tsb_rss_limit = ~0UL; in tsb_grow()
473 spin_lock_irqsave(&mm->context.lock, flags); in tsb_grow()
475 old_tsb = mm->context.tsb_block[tsb_index].tsb; in tsb_grow()
477 (mm->context.tsb_block[tsb_index].tsb_reg_val & 0x7UL); in tsb_grow()
478 old_size = (mm->context.tsb_block[tsb_index].tsb_nentries * in tsb_grow()
487 (rss < mm->context.tsb_block[tsb_index].tsb_rss_limit))) { in tsb_grow()
488 spin_unlock_irqrestore(&mm->context.lock, flags); in tsb_grow()
494 mm->context.tsb_block[tsb_index].tsb_rss_limit = new_rss_limit; in tsb_grow()
514 mm->context.tsb_block[tsb_index].tsb = new_tsb; in tsb_grow()
517 spin_unlock_irqrestore(&mm->context.lock, flags); in tsb_grow()
545 spin_lock_init(&mm->context.lock); in init_new_context()
547 mm->context.sparc64_ctx_val = 0UL; in init_new_context()
549 mm->context.tag_store = NULL; in init_new_context()
550 spin_lock_init(&mm->context.tag_lock); in init_new_context()
557 saved_hugetlb_pte_count = mm->context.hugetlb_pte_count; in init_new_context()
558 saved_thp_pte_count = mm->context.thp_pte_count; in init_new_context()
559 mm->context.hugetlb_pte_count = 0; in init_new_context()
560 mm->context.thp_pte_count = 0; in init_new_context()
570 mm->context.tsb_block[i].tsb = NULL; in init_new_context()
584 if (unlikely(!mm->context.tsb_block[MM_TSB_BASE].tsb)) in init_new_context()
607 tsb_destroy_one(&mm->context.tsb_block[i]); in destroy_context()
611 if (CTX_VALID(mm->context)) { in destroy_context()
612 unsigned long nr = CTX_NRBITS(mm->context); in destroy_context()
619 if (mm->context.tag_store) { in destroy_context()
624 tag_desc = mm->context.tag_store; in destroy_context()
632 kfree(mm->context.tag_store); in destroy_context()
633 mm->context.tag_store = NULL; in destroy_context()