New Version 6.0!

Try it for free with our fully functional 60-day trial version.

Download now!

QuickStart Samples

Nonlinear Systems QuickStart Sample (Visual Basic)

Illustrates the use of the NewtonRaphsonSystemSolver and DoglegSystemSolver classes for solving systems of nonlinear equations in Visual Basic.

C# code F# code IronPython code Back to QuickStart Samples

Option Infer On

' The optimization classes reside in the
' Extreme.Mathematics.Optimization namespace.
Imports Extreme.Mathematics.EquationSolvers
' Vectors and Function delegates reside in the Extreme.Mathematics
' namespace.
Imports Extreme.Mathematics
Imports Extreme.Mathematics.Algorithms

Namespace Extreme.Numerics.QuickStart.VB
    ' Illustrates solving systems of non-linear equations using 
    ' classes in the Extreme.Mathematics.EquationSolvers namespace 
    ' of the Extreme Optimization Numerical Libraries for .NET.
    Module NonlinearSystems

        Sub Main()
            '
            ' Target function
            '

            ' The function we are trying to solve can be provided
            ' on one of two ways. The first is as an array of 
            ' Func(Of Vector, Double) delegates. See the end of this
            ' sample for definitions of the methods that are referenced here.
            Dim f As Func(Of Vector(Of Double), Double)() = {AddressOf f1, AddressOf f2}
            ' We can also supply the Jacobian, which is the matrix of partial
            ' derivatives. We do so by providing the gradient of each target
            ' function as a Func(Of Vector, Vector, Vector) delegate.
            '
            ' The Func(Of Vector, Vector, Vector) takes a second argument:
            ' the vector that is to hold the return value. This avoids unnecessary
            ' creation of new Vector instances.
            Dim df As Func(Of Vector(Of Double), Vector(Of Double), Vector(Of Double))() =
                {AddressOf df1, AddressOf df2}

            ' The initial values are supplied as a vector:
            Dim initialGuess = Vector.Create(0.5, 0.5)

            '
            ' Newton-Raphson Method
            '

            ' The Newton-Raphson method is implemented by
            ' the NewtonRaphsonSystemSolver class.
            Dim solver As New NewtonRaphsonSystemSolver(f, df, initialGuess)

            ' and call the Solve method to obtain the solution:
            Dim solution = solver.Solve()

            Console.WriteLine("N-dimensional Newton-Raphson Solver:")
            Console.WriteLine("exp(x)*cos(y) - x^2 + y^2 = 0")
            Console.WriteLine("exp(x)*sin(y) - 2xy = 0")
            Console.WriteLine("  Initial guess: {0:F2}", initialGuess)
            ' The Status property indicates
            ' the result of running the algorithm.
            Console.WriteLine("  Status: {0}", solver.Status)
            ' The result is also available through the
            ' Result property.
            Console.WriteLine("  Solution: {0}", solver.Result)
            Console.WriteLine("  Function value: {0}", solver.ValueTest.Error)
            ' You can find out the estimated error of the result
            ' through the EstimatedError property:
            Console.WriteLine("  Estimated error: {0}", solver.EstimatedError)
            Console.WriteLine("  # iterations: {0}", solver.IterationsNeeded)
            Console.WriteLine("  # evaluations: {0}", solver.EvaluationsNeeded)

#If NET40 Then
#Else
            ' When you don't have the Jacobian for the target functions,
            ' the equation solver will use a numerical approximation.
#End If

            '
            ' Controlling the process
            '
            Console.WriteLine("Same with modified parameters:")
            ' You can set the maximum # of iterations:
            ' If the solution cannot be found in time, the
            ' Status will return a value of
            ' IterationStatus.IterationLimitExceeded
            solver.MaxIterations = 10

            ' The ValueTest property returns the convergence
            ' test based on the function value. We can set
            ' its tolerance property:
            solver.ValueTest.Tolerance = 0.0000000001
            ' Its Norm property determines how the error
            ' is calculated. Here, we choose the maximum
            ' of the function values:
            solver.ValueTest.Norm = VectorConvergenceNorm.Maximum

            ' The SolutionTest property returns the test
            ' on the change in location of the solution.
            solver.SolutionTest.Tolerance = 0.0000000001
            ' You can specify how convergence is to be tested
            ' through the ConvergenceCriterion property:
            solver.SolutionTest.ConvergenceCriterion = ConvergenceCriterion.WithinRelativeTolerance

            solver.InitialGuess = initialGuess
            solution = solver.Solve()
            Console.WriteLine("  Status: {0}", solver.Status)
            Console.WriteLine("  Solution: {0}", solver.Result)
            ' The estimated error will be less than 5e-14
            Console.WriteLine("  Estimated error: {0}", solver.SolutionTest.Error)
            Console.WriteLine("  # iterations: {0}", solver.IterationsNeeded)
            Console.WriteLine("  # evaluations: {0}", solver.EvaluationsNeeded)

            '
            ' Powell's dogleg method
            '

            ' The dogleg method is more robust than Newton's method.
            ' It will converge often when Newton's method fails.
            Dim dogleg As New DoglegSystemSolver(f, df, initialGuess)

            ' Unique to the dogleg method is the TrustRegionRadius property.
            ' Any step of the algorithm is not larger than this value.
            ' It is adjusted at each iteration.
            dogleg.TrustRegionRadius = 0.5

            ' Call the Solve method to obtain the solution:
            solution = dogleg.Solve()

            Console.WriteLine("Powell's Dogleg Solver:")
            Console.WriteLine("  Initial guess: {0:F2}", initialGuess)
            Console.WriteLine("  Status: {0}", dogleg.Status)
            Console.WriteLine("  Solution: {0}", dogleg.Result)
            Console.WriteLine("  Estimated error: {0}", dogleg.EstimatedError)
            Console.WriteLine("  # iterations: {0}", dogleg.IterationsNeeded)
            Console.WriteLine("  # evaluations: {0}", dogleg.EvaluationsNeeded)

            ' The dogleg method can work without derivatives. Care is taken
            ' to keep the number of evaluations down to a minimum.
            dogleg.JacobianFunction = Nothing
            ' Call the Solve method to obtain the solution:
            solution = dogleg.Solve()

            Console.WriteLine("Powell's Dogleg Solver (no derivatives):")
            Console.WriteLine("  Initial guess: {0:F2}", initialGuess)
            Console.WriteLine("  Status: {0}", dogleg.Status)
            Console.WriteLine("  Solution: {0}", dogleg.Result)
            Console.WriteLine("  Estimated error: {0}", dogleg.EstimatedError)
            Console.WriteLine("  # iterations: {0}", dogleg.IterationsNeeded)
            Console.WriteLine("  # evaluations: {0}", dogleg.EvaluationsNeeded)

            Console.Write("Press Enter key to exit...")
            Console.ReadLine()
        End Sub

        ' First set of functions.
        Function f1(ByVal x As Vector(Of Double)) As Double
            Return Math.Exp(x(0)) * Math.Cos(x(1)) - x(0) * x(0) + x(1) * x(1)
        End Function

        Function f2(ByVal x As Vector(Of Double)) As Double
            Return Math.Exp(x(0)) * Math.Sin(x(1)) - 2 * x(0) * x(1)
        End Function

        ' Gradient of the first set of functions.
        Function df1(ByVal x As Vector(Of Double), ByVal df As Vector(Of Double)) As Vector(Of Double)
            df(0) = Math.Exp(x(0)) * Math.Cos(x(1)) - 2 * x(0)
            df(1) = -Math.Exp(x(0)) * Math.Sin(x(1)) + 2 * x(1)
            Return df
        End Function

        Function df2(ByVal x As Vector(Of Double), ByVal df As Vector(Of Double)) As Vector(Of Double)
            df(0) = Math.Exp(x(0)) * Math.Sin(x(1)) - 2 * x(1)
            df(1) = Math.Exp(x(0)) * Math.Cos(x(1)) - 2 * x(0)
            Return df
        End Function

    End Module

End Namespace