9
/* Put the processor into a state where MTRRs can be safely set */
10
void set_mtrr_prepare_save(struct set_mtrr_context *ctxt)
14
/* Disable interrupts locally */
15
local_irq_save(ctxt->flags);
17
if (use_intel() || is_cpu(CYRIX)) {
19
/* Save value of CR4 and clear Page Global Enable (bit 7) */
21
ctxt->cr4val = read_cr4();
22
write_cr4(ctxt->cr4val & (unsigned char) ~(1 << 7));
25
/* Disable and flush caches. Note that wbinvd flushes the TLBs as
27
cr0 = read_cr0() | 0x40000000;
34
rdmsr(MTRRdefType_MSR, ctxt->deftype_lo, ctxt->deftype_hi);
36
/* Cyrix ARRs - everything else were excluded at the top */
37
ctxt->ccr3 = getCx86(CX86_CCR3);
41
void set_mtrr_cache_disable(struct set_mtrr_context *ctxt)
44
/* Disable MTRRs, and set the default type to uncached */
45
mtrr_wrmsr(MTRRdefType_MSR, ctxt->deftype_lo & 0xf300UL,
47
else if (is_cpu(CYRIX))
48
/* Cyrix ARRs - everything else were excluded at the top */
49
setCx86(CX86_CCR3, (ctxt->ccr3 & 0x0f) | 0x10);
52
/* Restore the processor after a set_mtrr_prepare */
53
void set_mtrr_done(struct set_mtrr_context *ctxt)
55
if (use_intel() || is_cpu(CYRIX)) {
57
/* Flush caches and TLBs */
60
/* Restore MTRRdefType */
62
/* Intel (P6) standard MTRRs */
63
mtrr_wrmsr(MTRRdefType_MSR, ctxt->deftype_lo, ctxt->deftype_hi);
65
/* Cyrix ARRs - everything else was excluded at the top */
66
setCx86(CX86_CCR3, ctxt->ccr3);
69
write_cr0(read_cr0() & 0xbfffffff);
71
/* Restore value of CR4 */
73
write_cr4(ctxt->cr4val);
75
/* Re-enable interrupts locally (if enabled previously) */
76
local_irq_restore(ctxt->flags);