~fluidity-core/fluidity/adjoint

« back to all changes in this revision

Viewing changes to tests/burgers_mms_steady_adjoint_gradient_src_revolve/burgers_mms_steady_adjoint_gradient_src_revolve.xml

Merge the fluidity_revolve branch.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
<?xml version="1.0" encoding="UTF-8" ?>
 
2
<!DOCTYPE testproblem SYSTEM "regressiontest.dtd">
 
3
<testproblem>
 
4
  <name>Burgers Equation MMS</name>
 
5
  <owner userid="pef"/>
 
6
  <tags>burgers adjoint revolve</tags>
 
7
  <problem_definition length="medium" nprocs="1">
 
8
    <command_line>../../bin/optimality op_A.oml; ../../bin/optimality op_B.oml; ../../bin/optimality op_C.oml; burgers_equation mms_A.bml; burgers_equation mms_B.bml; burgers_equation mms_C.bml; burgers_equation mms_D.bml; burgers_equation mms_E.bml</command_line>
 
9
  </problem_definition>
 
10
  <variables>
 
11
    <variable name="gradient_conv" language="python">
 
12
from fluidity_tools import stat_parser
 
13
import glob
 
14
gradient_conv = [stat_parser(x)["time_integral_ad_gradient_error"]["convergence"][-1] for x in sorted(glob.glob("op_?.stat"))]
 
15
    </variable>
 
16
    <variable name="functional_value_conv" language="python">
 
17
from fluidity_tools import stat_parser
 
18
import glob
 
19
import math
 
20
 
 
21
functional_errors = [abs(stat_parser(x)["time_integral_ad"]["value"][-1] - 10.0) for x in sorted(glob.glob("mms_adjoint_?.stat"))]
 
22
functional_value_conv = [math.log(functional_errors[i]/functional_errors[i+1], 2) for i in range(0, len(functional_errors)-1)]
 
23
    </variable>
 
24
  </variables>
 
25
  <pass_tests>
 
26
    <test name="functional_convergence" language="python">
 
27
assert min(functional_value_conv) &gt; 1.9
 
28
    </test>
 
29
    <test name="gradient_convergence" language="python">
 
30
assert min(gradient_conv) &gt; 1.8 # more tolerant because it's stochastic
 
31
    </test>
 
32
  </pass_tests>
 
33
  <warn_tests>
 
34
  </warn_tests>
 
35
</testproblem>
 
36