mirror of
https://github.com/TheAlgorithms/Python.git
synced 2024-11-24 05:21:09 +00:00
c909da9b08
* pre-commit: Upgrade psf/black for stable style 2023 Updating https://github.com/psf/black ... updating 22.12.0 -> 23.1.0 for their `2023 stable style`. * https://github.com/psf/black/blob/main/CHANGES.md#2310 > This is the first [psf/black] release of 2023, and following our stability policy, it comes with a number of improvements to our stable style… Also, add https://github.com/tox-dev/pyproject-fmt and https://github.com/abravalheri/validate-pyproject to pre-commit. I only modified `.pre-commit-config.yaml` and all other files were modified by pre-commit.ci and psf/black. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
54 lines
1.8 KiB
Python
54 lines
1.8 KiB
Python
"""
|
|
Author: P Shreyas Shetty
|
|
Implementation of Newton-Raphson method for solving equations of kind
|
|
f(x) = 0. It is an iterative method where solution is found by the expression
|
|
x[n+1] = x[n] + f(x[n])/f'(x[n])
|
|
If no solution exists, then either the solution will not be found when iteration
|
|
limit is reached or the gradient f'(x[n]) approaches zero. In both cases, exception
|
|
is raised. If iteration limit is reached, try increasing maxiter.
|
|
"""
|
|
import math as m
|
|
|
|
|
|
def calc_derivative(f, a, h=0.001):
|
|
"""
|
|
Calculates derivative at point a for function f using finite difference
|
|
method
|
|
"""
|
|
return (f(a + h) - f(a - h)) / (2 * h)
|
|
|
|
|
|
def newton_raphson(f, x0=0, maxiter=100, step=0.0001, maxerror=1e-6, logsteps=False):
|
|
a = x0 # set the initial guess
|
|
steps = [a]
|
|
error = abs(f(a))
|
|
f1 = lambda x: calc_derivative(f, x, h=step) # noqa: E731 Derivative of f(x)
|
|
for _ in range(maxiter):
|
|
if f1(a) == 0:
|
|
raise ValueError("No converging solution found")
|
|
a = a - f(a) / f1(a) # Calculate the next estimate
|
|
if logsteps:
|
|
steps.append(a)
|
|
if error < maxerror:
|
|
break
|
|
else:
|
|
raise ValueError("Iteration limit reached, no converging solution found")
|
|
if logsteps:
|
|
# If logstep is true, then log intermediate steps
|
|
return a, error, steps
|
|
return a, error
|
|
|
|
|
|
if __name__ == "__main__":
|
|
from matplotlib import pyplot as plt
|
|
|
|
f = lambda x: m.tanh(x) ** 2 - m.exp(3 * x) # noqa: E731
|
|
solution, error, steps = newton_raphson(
|
|
f, x0=10, maxiter=1000, step=1e-6, logsteps=True
|
|
)
|
|
plt.plot([abs(f(x)) for x in steps])
|
|
plt.xlabel("step")
|
|
plt.ylabel("error")
|
|
plt.show()
|
|
print(f"solution = {{{solution:f}}}, error = {{{error:f}}}")
|