Data Analysis Mathematics Linear Algebra Statistics
New Version 8.1!

Supports .NET 6.0. Try it for free with our fully functional 60-day trial version. QuickStart Samples

# Nonlinear Systems QuickStart Sample (IronPython)

Illustrates the use of the NewtonRaphsonSystemSolver and DoglegSystemSolver classes for solving systems of nonlinear equations in IronPython.

```import numerics

from math import exp, sin, cos

from System import Array, Func

# The equation solver classes reside in the
# Extreme.Mathematics.EquationSolvers namespace.
from Extreme.Mathematics.EquationSolvers import *
# Function delegates reside in the Extreme.Mathematics
# namespace.
from Extreme.Mathematics import *
# Vectors reside in the Extreme.Mathematics.LinearAlgebra
# namespace.
from Extreme.Mathematics.LinearAlgebra import *

#/ Illustrates solving systems of non-linear equations using
#/ classes in the Extreme.Mathematics.EquationSolvers namespace
#/ of the Extreme Optimization Numerical Libraries for .NET.

#
# Target function
#

# The function we are trying to solve can be provided
# on one of two ways. The first is as an array of
# Func<Vector, double> delegates. See the end of this
# sample for definitions of the methods that are referenced here.
f = Array[Func[Vector, float]]([ \
lambda x: exp(x)*cos(x) - x*x + x*x, \
lambda x: exp(x)*sin(x) - 2*x*x ])
# We can also supply the Jacobian, which is the matrix of partial
# derivatives. We do so by providing the gradient of each target
# function as a delegate that takes a second argument:
# the vector that is to hold the return value. This avoids unnecessary
# creation of new Vector instances.
def df1(x,y):
y = exp(x)*cos(x) - 2*x
y = -exp(x)*sin(x) + 2*x
return y
def df2(x,y):
y = exp(x)*sin(x) - 2*x
y = exp(x)*cos(x) - 2*x
return y

df = Array[Func[Vector,Vector,Vector]]([ df1, df2 ])
# The initial values are supplied as a vector:
initialGuess = Vector([0.5, 0.5])

#
# Newton-Raphson Method
#

# The Newton-Raphson method is implemented by
# the NewtonRaphsonSystemSolver class.
solver = NewtonRaphsonSystemSolver(f, df, initialGuess)

# and call the Solve method to obtain the solution:
solution = solver.Solve()

print "N-dimensional Newton-Raphson Solver:"
print "exp(x)*cos(y) - x^2 + y^2 = 0"
print "exp(x)*sin(y) - 2xy = 0"
print "  Initial guess: {0:F2}", initialGuess
# The Status property indicates
# the result of running the algorithm.
print "  Status:", solver.Status
# The result is also available through the
# Result property.
print "  Solution:", solver.Result
print "  Function value:", solver.ValueTest.Error
# You can find out the estimated error of the result
# through the EstimatedError property:
print "  Estimated error:", solver.EstimatedError
print "  # iterations:", solver.IterationsNeeded
print "  # evaluations:", solver.EvaluationsNeeded

### Symbolic derivatives are currently not supported ###
## In .NET 4.0, you can use Automatic Differentiation to compute the Jacobian.
## To do so, set the target functions using the SetSymbolicTargetFunctions
## method:
#solver = NewtonRaphsonSystemSolver()
#solver.InitialGuess = initialGuess
#
#solver.SetSymbolicTargetFunctions( \
#    lambda x: exp(x) * cos(x) - x * x + x * x, \
#    lambda x: exp(x) * sin(x) - 2 * x * x)
#
#solution = solver.Solve()
#print "Using Automatic Differentiation:"
#print "  Solution:", solver.Result
#print "  Function value:", solver.ValueTest.Error
#print "  # iterations:", solver.IterationsNeeded

# When you don't have the Jacobian for the target functions
# and you don't use Automatic Differentiation, the equation solver
# will use a numerical approximation.

#
# Controlling the process
#
print "Same with modified parameters:"
# You can set the maximum # of iterations:
# If the solution cannot be found in time, the
# Status will return a value of
# IterationStatus.IterationLimitExceeded
solver.MaxIterations = 10

# The ValueTest property returns the convergence
# test based on the function value. We can set
# its tolerance property:
solver.ValueTest.Tolerance = 1e-10
# Its Norm property determines how the error
# is calculated. Here, we choose the maximum
# of the function values:
solver.ValueTest.Norm = Algorithms.VectorConvergenceNorm.Maximum

# The SolutionTest property returns the test
# on the change in location of the solution.
solver.SolutionTest.Tolerance = 1e-8
# You can specify how convergence is to be tested
# through the ConvergenceCriterion property:
solver.SolutionTest.ConvergenceCriterion = ConvergenceCriterion.WithinRelativeTolerance

solver.InitialGuess = initialGuess
solution = solver.Solve()
print "  Status:", solver.Status
print "  Solution:", solver.Result
# The estimated error will be less than 5e-14
print "  Estimated error:", solver.SolutionTest.Error
print "  # iterations:", solver.IterationsNeeded
print "  # evaluations:", solver.EvaluationsNeeded

#
# Powell's dogleg method
#

# The dogleg method is more robust than Newton's method.
# It will converge often when Newton's method fails.
dogleg = DoglegSystemSolver(f, df, initialGuess)

# Unique to the dogleg method is the TrustRegionRadius property.
# Any step of the algorithm is not larger than this value.
# It is adjusted at each iteration.

# Call the Solve method to obtain the solution:
solution = dogleg.Solve()

print "Powell's Dogleg Solver:"
print "  Initial guess: {0:F2}", initialGuess
print "  Status:", dogleg.Status
print "  Solution:", dogleg.Result
print "  Estimated error:", dogleg.EstimatedError
print "  # iterations:", dogleg.IterationsNeeded
print "  # evaluations:", dogleg.EvaluationsNeeded

# The dogleg method can work without derivatives. Care is taken
# to keep the number of evaluations down to a minimum.
dogleg.JacobianFunction = None
# Call the Solve method to obtain the solution:
solution = dogleg.Solve()

print "Powell's Dogleg Solver (no derivatives):"
print "  Initial guess: {0:F2}", initialGuess
print "  Status:", dogleg.Status
print "  Solution:", dogleg.Result
print "  Estimated error:", dogleg.EstimatedError
print "  # iterations:", dogleg.IterationsNeeded
print "  # evaluations:", dogleg.EvaluationsNeeded```