Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
I
is2a4-ano-2024
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Theo Danjoux
is2a4-ano-2024
Commits
8b24cd84
Commit
8b24cd84
authored
6 months ago
by
Theo Danjoux
Browse files
Options
Downloads
Patches
Plain Diff
hey
parent
1b438722
Branches
Branches containing commit
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
tp2/graphe_R2_dans_R2.py
+61
-4
61 additions, 4 deletions
tp2/graphe_R2_dans_R2.py
tp2/tp2.py
+108
-0
108 additions, 0 deletions
tp2/tp2.py
with
169 additions
and
4 deletions
tp2/graphe_R2_dans_R2.py
+
61
−
4
View file @
8b24cd84
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 27 10:21:53 2024
@author: pmontala
"""
#**************************************
# TP2 RESEAU DE NEURONES
# 27/11/2024
#**************************************
import
autograd
as
ag
import
autograd.numpy
as
np
import
numpy
as
np
import
scipy.linalg
as
nla
import
matplotlib.pyplot
as
plt
#!/bin/python3
from
mpl_toolkits.mplot3d
import
axes3d
...
...
@@ -6,10 +25,10 @@ import autograd as ag
import
autograd.numpy
as
np
def
f
(
a
,
b
):
return
a
**
2
+
2
*
a
*
b
-
1
return
a
**
3
*
b
-
3
*
(
a
**
2
)
*
(
b
-
1
)
+
b
**
2
-
1
def
g
(
a
,
b
):
return
a
**
2
*
b
**
2
-
b
-
1
return
a
**
2
*
b
**
2
-
2
fig
=
plt
.
figure
(
figsize
=
(
20
,
20
))
ax
=
fig
.
add_subplot
(
1
,
1
,
1
,
projection
=
'
3d
'
)
...
...
@@ -18,8 +37,14 @@ ax.set_xlabel('$a$', labelpad=20)
ax
.
set_ylabel
(
'
$b$
'
,
labelpad
=
20
)
ax
.
set_zlabel
(
'
$f(a,b)$ et $g(a,b)$
'
,
labelpad
=
20
)
aplot
=
np
.
arange
(
-
2
,
1.7
,
0.1
)
bplot
=
np
.
arange
(
-
1.5
,
0.8
,
0.1
)
#ON AGRANDIT LA FENETER POUR TROUVER POINT D INTERSECTION
#entre le bleue et la rouge
# se mettre bien au dessus du graph
aplot
=
np
.
arange
(
-
4
,
0
,
0.1
)
bplot
=
np
.
arange
(
-
1
,
3
,
0.1
)
# ON TROUVE A : -0.7, -2.9
#B : 1.9, 0.5
##### Graphe de f #####
A
,
B
=
np
.
meshgrid
(
aplot
,
bplot
)
...
...
@@ -39,4 +64,36 @@ ax.contour(A, B, Z, 0, colors="red", levels=np.array([0], dtype=np.float64), li
plt
.
show
()
# dans le terminal : python3 tp2.py
# les coordonnées des 2 racines observés graphiquement
# a b
u
=
np
.
array
([
-
0.7
,
1.9
])
u
=
np
.
array
([
-
2.9
,
0.5
])
"""
# F
def F(u) :
a, b = u
return np.array ( [ f(a,b) , g(a,b) ])
"""
J_F
=
ag
.
jacobian
(
u
)
#Jacobienne
def
Jac_F
(
u
)
:
a
,
b
=
u
return
np
.
array
([[
3
*
a
**
2
*
b
-
6
*
a
*
b
+
6
*
a
,
a
**
3
-
3
*
a
**
2
+
2
*
b
],
[
2
*
a
*
b
**
2
,
a
**
2
*
2
*
b
]])
#print("Jacobienne : \n", Jac_F (1))
#print('\n')
"""
#u = np.array ([-2.9, 0.5]) pn fait l
'
un puis l
'
autre
u = np.array ([-0.7, 1.9])
for n in range (6) :
print(
'
u[%d] =
'
%n, u)
F_u = F(u)
J_u = Jac_F(u)
h = np.linalg.solve (-J_u, F_u)
u = u + h
"""
#F_u s'annule en u
\ No newline at end of file
This diff is collapsed.
Click to expand it.
tp2/tp2.py
0 → 100644
+
108
−
0
View file @
8b24cd84
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 27 10:21:52 2024
@author: tdanjoux
"""
import
numpy
as
np
import
matplotlib.pyplot
as
plt
import
autograd.numpy
as
np
from
autograd
import
jacobian
# Définir les fonctions f(a, b) et g(a, b)
def
f
(
a
,
b
):
return
a
**
3
*
b
-
3
*
a
**
2
*
(
b
-
1
)
+
b
**
2
-
1
def
g
(
a
,
b
):
return
a
**
2
*
b
**
2
-
2
# Créer une grille pour a et b
a
=
np
.
linspace
(
-
2
,
2
,
400
)
b
=
np
.
linspace
(
-
2
,
2
,
400
)
A
,
B
=
np
.
meshgrid
(
a
,
b
)
# Calculer les valeurs de f et g sur la grille
F
=
f
(
A
,
B
)
G
=
g
(
A
,
B
)
# Tracer les courbes de niveau
plt
.
figure
(
figsize
=
(
8
,
6
))
plt
.
contour
(
A
,
B
,
F
,
levels
=
[
0
],
colors
=
'
blue
'
,
label
=
'
f(a, b) = 0
'
)
plt
.
contour
(
A
,
B
,
G
,
levels
=
[
0
],
colors
=
'
red
'
,
label
=
'
g(a, b) = 0
'
)
plt
.
title
(
"
Solutions des équations f(a, b) = 0 et g(a, b) = 0
"
)
plt
.
xlabel
(
"
a
"
)
plt
.
ylabel
(
"
b
"
)
plt
.
grid
()
plt
.
legend
([
'
f(a, b) = 0
'
,
'
g(a, b) = 0
'
])
plt
.
show
()
def
Jac_F
(
a
,
b
):
J
=
np
.
array
([
[
3
*
a
**
2
*
b
-
6
*
a
*
(
b
-
1
),
a
**
3
+
2
*
b
],
[
2
*
a
*
b
**
2
,
2
*
a
**
2
*
b
]
])
return
J
def
newton_method
(
F
,
Jac_F
,
a0
,
b0
,
tol
=
1e-6
,
max_iter
=
100
):
x
=
np
.
array
([
a0
,
b0
])
for
i
in
range
(
max_iter
):
J
=
Jac_F
(
x
[
0
],
x
[
1
])
F_val
=
F
(
x
[
0
],
x
[
1
])
delta
=
np
.
linalg
.
solve
(
J
,
-
F_val
)
# Résolution de J * delta = -F
x
=
x
+
delta
if
np
.
linalg
.
norm
(
delta
)
<
tol
:
return
x
,
i
raise
ValueError
(
"
Méthode de Newton n
'
a pas convergé
"
)
# Fonction vectorisée pour F
def
F_vectorized
(
a
,
b
):
return
np
.
array
([
f
(
a
,
b
),
g
(
a
,
b
)
])
# Utilisation de la méthode de Newton
solution
,
iterations
=
newton_method
(
F_vectorized
,
Jac_F
,
a0
=
1.0
,
b0
=
1.0
)
print
(
"
Solution trouvée :
"
,
solution
)
print
(
"
Nombre d
'
itérations :
"
,
iterations
)
# Redéfinir F pour accepter un tableau en entrée
def
F_autograd
(
u
):
a
,
b
=
u
return
np
.
array
([
a
**
3
*
b
-
3
*
a
**
2
*
(
b
-
1
)
+
b
**
2
-
1
,
a
**
2
*
b
**
2
-
2
])
# Calcul automatique du Jacobien
Jac_F_autograd
=
jacobian
(
F_autograd
)
# Utilisation de la méthode de Newton avec autograd
def
newton_autograd
(
F
,
Jac_F
,
u0
,
tol
=
1e-6
,
max_iter
=
100
):
u
=
np
.
array
(
u0
)
for
i
in
range
(
max_iter
):
J
=
Jac_F
(
u
)
F_val
=
F
(
u
)
delta
=
np
.
linalg
.
solve
(
J
,
-
F_val
)
u
=
u
+
delta
if
np
.
linalg
.
norm
(
delta
)
<
tol
:
return
u
,
i
raise
ValueError
(
"
Méthode de Newton avec autograd n
'
a pas convergé
"
)
# Initialisation et résolution
u0
=
[
1.0
,
1.0
]
solution_autograd
,
iterations_autograd
=
newton_autograd
(
F_autograd
,
Jac_F_autograd
,
u0
)
print
(
"
Solution avec autograd :
"
,
solution_autograd
)
print
(
"
Nombre d
'
itérations :
"
,
iterations_autograd
)
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment