calibrated
Before Width: | Height: | Size: 2.3 MiB |
@ -1,58 +0,0 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.image as mpimg
|
||||
import numpy as np
|
||||
|
||||
image_path = "cameratransform/google.png"
|
||||
|
||||
from scipy.ndimage import map_coordinates
|
||||
|
||||
def simulate_top_view(image_path, inclination=0, rotation=0):
|
||||
# Charger l'image
|
||||
img = mpimg.imread(image_path)
|
||||
|
||||
# Dimensions de l'image
|
||||
height, width, _ = img.shape
|
||||
|
||||
# Créer une grille d'indices pour les pixels de l'image
|
||||
y, x = np.indices((height, width))
|
||||
|
||||
# Convertir les coordonnées x, y en coordonnées polaires
|
||||
r = np.sqrt((x - width / 2) ** 2 + (y - height / 2) ** 2)
|
||||
theta = np.arctan2(y - height / 2, x - width / 2)
|
||||
|
||||
# Ajuster l'inclinaison et la rotation
|
||||
r_adjusted = r * np.cos(np.deg2rad(inclination))
|
||||
theta_adjusted = theta + np.deg2rad(rotation)
|
||||
|
||||
# Convertir les coordonnées polaires ajustées en coordonnées cartésiennes
|
||||
x_adjusted = width / 2 + r_adjusted * np.cos(theta_adjusted)
|
||||
y_adjusted = height / 2 + r_adjusted * np.sin(theta_adjusted)
|
||||
|
||||
# Interpolation bilinéaire pour obtenir les nouvelles valeurs de pixel
|
||||
coordinates = np.vstack((y_adjusted.flatten(), x_adjusted.flatten()))
|
||||
simulated_img = np.zeros_like(img)
|
||||
for c in range(3): # Canal de couleur (R, G, B)
|
||||
simulated_img[:, :, c] = map_coordinates(img[:, :, c], coordinates, order=1).reshape(img.shape[:2])
|
||||
|
||||
return simulated_img
|
||||
|
||||
# Chemin vers votre image
|
||||
|
||||
# Paramètres de simulation (inclinaison et rotation en degrés)
|
||||
inclination_degrees = 30
|
||||
rotation_degrees = 45
|
||||
|
||||
# Simulation de la vue de dessus avec les paramètres donnés
|
||||
simulated_image = simulate_top_view(image_path, inclination=inclination_degrees, rotation=rotation_degrees)
|
||||
|
||||
# Afficher l'image originale et l'image simulée côte à côte
|
||||
fig, axes = plt.subplots(1, 2, figsize=(10, 5))
|
||||
axes[0].imshow(mpimg.imread(image_path))
|
||||
axes[0].set_title("Image originale")
|
||||
axes[0].axis("off")
|
||||
|
||||
axes[1].imshow(simulated_image)
|
||||
axes[1].set_title("Vue de dessus simulée")
|
||||
axes[1].axis("off")
|
||||
|
||||
plt.show()
|
110
cams/new/extract + calibrated.py
Normal file
@ -0,0 +1,110 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import cv2
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.optimize import least_squares
|
||||
from imageio import imread
|
||||
import cameratransform as ct
|
||||
|
||||
cap = cv2.VideoCapture('cams/new/cut2.mp4')
|
||||
folder_path = "track/expgood/labels/"
|
||||
name = 'cut2'
|
||||
fps = 780
|
||||
|
||||
allfiles = []
|
||||
for i in range(1, fps+1):
|
||||
allfiles.append(folder_path + name + '_' + str(i) + '.txt')
|
||||
|
||||
# Set the desired dimensions for displaying the video
|
||||
display_width = 1280
|
||||
display_height = 720
|
||||
|
||||
display_width = 1920
|
||||
display_height = 1080
|
||||
|
||||
width = 1920
|
||||
height = 1080
|
||||
|
||||
frame_nb = 0
|
||||
|
||||
bleu = (255, 0, 0)
|
||||
vert = (0, 255, 0)
|
||||
|
||||
# # Cam part
|
||||
# img = cv2.imread("track/Sylvain/stage_Noham/stage_Noham/image_vide_pts.png")
|
||||
# nh,nw,_ = img.shape
|
||||
|
||||
res = np.array([ 3.99594676, 3.53413555, 4.55 , 16.41739973, 74.96395791, 49.11271189, 2.79384615])
|
||||
image_size = (width,height)
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=res[0], sensor=(res[1],res[2]), image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=res[3], tilt_deg=res[4], heading_deg = res[5], roll_deg = res[6] ) )
|
||||
|
||||
|
||||
if not cap.isOpened():
|
||||
print("Error opening video stream or file")
|
||||
|
||||
while cap.isOpened():
|
||||
ret, frame = cap.read()
|
||||
if ret:
|
||||
df = pd.read_csv(allfiles[frame_nb], header=None, sep=' ')
|
||||
ind_px_ground_pts = []
|
||||
for index, row in df.iterrows():
|
||||
class_id, center_x, center_y, bbox_width, bbox_height, object_id = row
|
||||
|
||||
center_x = int(center_x * width)
|
||||
center_y = int(center_y * height)
|
||||
bbox_width = int(bbox_width * width)
|
||||
bbox_height = int(bbox_height * height)
|
||||
|
||||
top_left_x = int(center_x - bbox_width / 2)
|
||||
top_left_y = int(center_y - bbox_height / 2)
|
||||
bottom_right_x = int(center_x + bbox_width / 2)
|
||||
bottom_right_y = int(center_y + bbox_height / 2)
|
||||
|
||||
# (19;112) à (636;714) et (86;86) à (1087;715)
|
||||
if (((112-714)/(19-636)) * top_left_x + 112 - ((112-714)/(19-636)) *19 > top_left_y ) and (((86-715)/(86-1097)) * bottom_right_x + 112 - ((86-715)/(86-1097)) *86 < bottom_right_y ):
|
||||
|
||||
label = f'Class: {int(class_id)}, Object ID: {int(object_id)}'
|
||||
cv2.putText(frame, label, (top_left_x, top_left_y - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, vert, 1)
|
||||
|
||||
# obetnir le centre du rectangle
|
||||
center_x = (top_left_x + bottom_right_x) // 2
|
||||
center_y = (top_left_y + bottom_right_y) // 2
|
||||
cv2.circle(frame, (center_x, center_y), 5, vert, -1)
|
||||
|
||||
ind_px_ground_pts += [center_x, center_y]
|
||||
|
||||
|
||||
else :
|
||||
pass
|
||||
ind_px_ground_pts = np.array(ind_px_ground_pts)
|
||||
|
||||
px_ground_pts = np.vstack([ind_px_ground_pts[1],ind_px_ground_pts[0]]).T
|
||||
print(px_ground_pts)
|
||||
|
||||
space_pts = []
|
||||
for pt in px_ground_pts:
|
||||
space_pts.append(cam.spaceFromImage(pt))
|
||||
space_pts = np.array(space_pts)
|
||||
|
||||
# resized_frame = cv2.resize(frame, (display_width, display_height))
|
||||
# cv2.imshow('Frame', resized_frame)
|
||||
# plt.figure()
|
||||
|
||||
#######################
|
||||
plt.scatter(space_pts[:,0], space_pts[:,1], color="red", s=2)
|
||||
plt.plot([28.569, 51.681],[26.665, 89.904], color='blue', linestyle='-', linewidth=1)
|
||||
# plt.axis("equal")
|
||||
plt.xlim([0, 100])
|
||||
plt.ylim([0, 150])
|
||||
plt.draw()
|
||||
plt.pause(0.0000000000001)
|
||||
plt.clf()
|
||||
######################
|
||||
|
||||
if cv2.waitKey(25) & 0xFF == ord('q'):break
|
||||
frame_nb = frame_nb + 1
|
||||
else:break
|
||||
|
||||
cap.release()
|
||||
cv2.destroyAllWindows()
|
BIN
track/Sylvain/ImmersionTemplate.zip
Normal file
BIN
track/Sylvain/ImmersionTemplate/ImmersionTemplate/logo-ups.png
Normal file
After Width: | Height: | Size: 21 KiB |
BIN
track/Sylvain/ImmersionTemplate/ImmersionTemplate/rapport.pdf
Normal file
182
track/Sylvain/ImmersionTemplate/ImmersionTemplate/rapport.tex
Normal file
@ -0,0 +1,182 @@
|
||||
\documentclass[a4paper]{article}
|
||||
\usepackage[margin=25mm]{geometry}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{amsfonts}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{graphicx}
|
||||
\pagenumbering{gobble}
|
||||
\usepackage{verbatim}
|
||||
\usepackage[utf8]{inputenc}
|
||||
\usepackage[french,english]{babel}
|
||||
\usepackage{tikz}
|
||||
\usepackage{xcolor}
|
||||
|
||||
\newtheorem{theorem}{Th\'eor\`eme}[subsection]
|
||||
\newtheorem{proposition}{Proposition}[subsection]
|
||||
\newtheorem{definition}{D\'efinition}[subsection]
|
||||
|
||||
\newtheorem{lemma}{Lemme}[subsection]
|
||||
\newtheorem{model}{Mod\`ele}[subsection]
|
||||
\newtheorem{algorithm}{Algorithme}[subsection]
|
||||
\newtheorem{problem}{Probl\`eme}[subsection]
|
||||
\newtheorem{remark}{Remarque}[subsection]
|
||||
|
||||
%\newcommand{\Id}{\mathbf{Id}}
|
||||
%\newcommand{\ie}{$i. e.\ $}
|
||||
%\newcommand{\eg}{$e. g.\ $}
|
||||
%\newcommand{\st}{ such that }
|
||||
%\newcommand{\Div}{\mbox{div }}
|
||||
%\newcommand{\Curl}{\mbox{curl }}
|
||||
|
||||
% Keywords command
|
||||
\providecommand{\keywords}[1]
|
||||
{
|
||||
\small
|
||||
\textbf{\textit{Keywords---}} #1
|
||||
}
|
||||
|
||||
\title{Titre du rapport}
|
||||
|
||||
\author{Premier Auteur$^{1}$, Second Auteur$^{2}$ \\
|
||||
\small $^{1}$L3 LDD Informatique, Mathématiques, Université Paris-Saclay, 91405 Orsay, France \\
|
||||
\small $^{2}$L3 LDD Mathématiques, Physique, Université Paris-Saclay, 91405 Orsay, France \\
|
||||
}
|
||||
|
||||
\date{} % Comment this line to show today's date
|
||||
|
||||
|
||||
\makeindex
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
\begin{document}
|
||||
\selectlanguage{french}
|
||||
|
||||
\maketitle
|
||||
|
||||
\begin{tikzpicture}[overlay,yshift=5cm, xshift=13.4cm]
|
||||
\pgftext{\includegraphics[width=90pt]{logo-ups.png}}
|
||||
\end{tikzpicture}
|
||||
|
||||
\begin{abstract}
|
||||
{\color{blue}Résumé en français...}
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum pretium libero non odio tincidunt semper. Vivamus sollicitudin egestas mattis. Sed vitae risus vel ex tincidunt molestie nec vel leo. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Maecenas quis massa tincidunt, faucibus magna non, fringilla sapien. In ullamcorper justo a scelerisque egestas. Ut maximus, elit a rutrum viverra, lectus sapien varius est, vel tempor neque mi et augue. Fusce ornare venenatis nunc nec feugiat. Proin a enim mauris. Mauris dignissim vulputate erat, vitae cursus risus elementum at. Cras luctus pharetra congue. Aliquam id est dictum, finibus ligula sed, tempus arcu.
|
||||
\end{abstract}
|
||||
\hspace{10pt}
|
||||
|
||||
\selectlanguage{english}
|
||||
\begin{abstract}
|
||||
{\color{blue}Abstract in English... }
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum pretium libero non odio tincidunt semper. Vivamus sollicitudin egestas mattis. Sed vitae risus vel ex tincidunt molestie nec vel leo. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Maecenas quis massa tincidunt, faucibus magna non, fringilla sapien. In ullamcorper justo a scelerisque egestas. Ut maximus, elit a rutrum viverra, lectus sapien varius est, vel tempor neque mi et augue. Fusce ornare venenatis nunc nec feugiat. Proin a enim mauris. Mauris dignissim vulputate erat, vitae cursus risus elementum at. Cras luctus pharetra congue. Aliquam id est dictum, finibus ligula sed, tempus arcu.
|
||||
\end{abstract}
|
||||
\selectlanguage{french}
|
||||
|
||||
|
||||
%TC:ignore
|
||||
\keywords{mot clé; mot clé; mot clé}
|
||||
|
||||
\clearpage
|
||||
|
||||
\section{Introduction}
|
||||
|
||||
Aenean tellus orci, accumsan $i$ nec neque at, vestibulum eleifend elit \cite{helbing09,SchadCA09} ({\color{blue}bien cité dans le texte de l'article toute référence présente dans la bibliographie}) Sed luctus enim dui, in fermentum $j$ dui pharetra at. Fusce vel nisl et diam feugiat porttitor et at libero. Maecenas scelerisque varius mauris non euismod. Nulla eget cursus leo. Integer interdum lacus vel ligula maximus, at feugiat orci porttitor. Suspendisse egestas, lorem a \index{elementum} lobortis, tellus mauris hendrerit nunc, sed vestibulum mi velit quis risus. Mauris gravida mi et ullamcorper blandit. Aenean lacinia, quam id tempus interdum, massa orci rhoncus turpis, eu finibus nisi lectus id sem. Vivamus ut mauris sed diam porta viverra sit amet quis risus (\cite{Zuriguel09}).
|
||||
|
||||
Nam id ornare dolor. Nulla metus enim, venenatis vel dui ac, accumsan vehicula est. Suspendisse luctus eros et velit eleifend, nec finibus ante rutrum. Interdum et malesuada fames ac ante ipsum primis {\em systemic} in faucibus. Vivamus tempor lorem turpis, nec venenatis turpis venenatis nec. Integer hendrerit at mi nec aliquet. Vestibulum auctor arcu scelerisque lacus rhoncus ornare. Vivamus convallis libero nulla, vitae ullamcorper mauris venenatis nec. Donec elementum ligula non tortor \index{pellentesque} finibus.
|
||||
|
||||
Vestibulum mauris odio, scelerisque ut nisi ut, tincidunt maximus eros. Fusce tempor ex non mi commodo consectetur. Sed sit amet massa id elit commodo bibendum. Nunc id neque tempus erat tempus dictum. Fusce mi leo, hendrerit in egestas sed, faucibus vel ex. In hac habitasse platea dictumst. Vivamus eget odio arcu. Ut finibus et lacus ac interdum. Donec consectetur dolor neque, vel condimentum nunc varius nec. Mauris sapien dolor, aliquam nec vulputate at, fermentum vel nulla. Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nam posuere vulputate vestibulum.
|
||||
|
||||
\section{Première section}
|
||||
|
||||
Integer iaculis vitae nisi mollis congue. Cras sed facilisis tortor. Aliquam quis neque ipsum. Proin et accumsan arcu. Donec sit amet nibh lacus. Vestibulum mattis arcu sed ante \index{vestibulum} condimentum. Nunc auctor ligula vel velit finibus imperdiet. Cras consequat ipsum quis rhoncus consequat. Etiam luctus purus turpis, quis tempor massa posuere non. Donec vitae $\Phi$ ex in ligula ultricies feugiat. Sed urna sem, rutrum at tempus vel, mollis vel magna. Etiam ex est, pulvinar et risus at, facilisis efficitur turpis. Etiam egestas est a erat elementum, vitae porta lectus finibus. Donec ac consequat sapien. Aenean sed eros a est blandit dictum.\\
|
||||
{\color{blue}Equation numérotée pouvant être citée (\ref{eq:eq1}) : }
|
||||
\begin{equation}
|
||||
\label{eq:eq1}
|
||||
( a + b )^2 = a^2 + b^2 + 2 a b.
|
||||
\end{equation}
|
||||
|
||||
\noindent{\color{blue}Système d'équations : }
|
||||
\begin{eqnarray}
|
||||
\label{eq:eq2}
|
||||
( a + b )^2 &=& a^2 + b^2 + 2 a b,\\
|
||||
( a - b )^2 &=& a^2 + b^2 - 2 a b.
|
||||
\end{eqnarray}
|
||||
|
||||
|
||||
|
||||
Quisque in dui porttitor, finibus lacus quis, pretium dui. Nullam vitae augue ligula. Nulla vel nisl tincidunt, ullamcorper enim nec, sollicitudin justo. Praesent vitae ex elit. Sed placerat velit a lectus fringilla, in tempor lorem efficitur. Maecenas mattis $n = 1,\dots,m_i$, tellus ipsum, a laoreet quam aliquam eu. Donec eu interdum lectus. Morbi suscipit nibh (\ref{eq:eq1})sed enim interdum, eget aliquam odio ullamcorper. Sed at mauris maximus, mollis mi ut, dapibus mauris. Morbi efficitur ultricies massa, et vulputate est pellentesque nec $\alpha_i^n$. Curabitur rutrum ullamcorper efficitur. Curabitur vestibulum consequat orci quis dapibus. Ut a ullamcorper tellus. Proin fermentum malesuada dui ac mollis. Mauris volutpat finibus lacus et placerat. \\
|
||||
{\color{blue}Equation non numérotée : }
|
||||
\begin{equation*}
|
||||
( a - b )^2 = a^2 + b^2 - 2 a b.
|
||||
\end{equation*}
|
||||
|
||||
\section{Seconde section}
|
||||
|
||||
\subsection{Première sous-section}
|
||||
|
||||
Curabitur nulla libero, viverra at tempus vitae, ornare ac metus. Nullam sed imperdiet erat, a vestibulum arcu. Sed non nisi cursus, sagittis libero in, pellentesque est. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Interdum et malesuada fames ac ante ipsum primis in faucibus. Sed congue turpis ligula, et tristique neque scelerisque sit amet. Vivamus neque est, pharetra eu libero at, tincidunt feugiat augue.
|
||||
|
||||
\begin{definition}
|
||||
On appelle...
|
||||
\end{definition}
|
||||
|
||||
|
||||
\subsection{Seconde sous-section}
|
||||
|
||||
\begin{theorem}
|
||||
Soit une fonction $\Phi$...
|
||||
\end{theorem}
|
||||
|
||||
\begin{lemma}
|
||||
Soit $x \in \mathbf{R}$,...
|
||||
\end{lemma}
|
||||
|
||||
\begin{remark}
|
||||
On remarque que...
|
||||
\end{remark}
|
||||
|
||||
Morbi mollis sapien nisi, non fringilla felis placerat vitae. Donec ac enim justo. Cras placerat purus vel ex volutpat, eget placerat lorem fermentum. Duis quam risus, eleifend quis iaculis eu, efficitur at nisl. Pellentesque pharetra dui nisi, sit amet sodales mi hendrerit nec. Nullam et gravida lorem, ut faucibus dolor. Mauris bibendum pulvinar tortor, eget consequat nulla luctus eget.
|
||||
|
||||
\begin{figure}[t]
|
||||
\begin{center}
|
||||
\includegraphics[width=0.295\linewidth]{terre.png}
|
||||
|
||||
\end{center}
|
||||
\caption{La Terre}
|
||||
\label{fig:fig1}
|
||||
\end{figure}
|
||||
|
||||
{\color{blue}Bien penser à citer et à commenter toutes les figures du texte : (Figure \ref{fig:fig1})}
|
||||
Quisque in dui porttitor, finibus lacus quis, pretium dui. Nullam vitae augue ligula. Nulla vel nisl tincidunt, ullamcorper enim nec, sollicitudin justo. Praesent vitae ex elit. Sed placerat velit a lectus fringilla, in tempor lorem efficitur. Maecenas mattis tellus ipsum, a laoreet quam aliquam eu. Donec eu interdum lectus. Morbi suscipit nibh sed enim interdum, eget aliquam odio ullamcorper. Sed at
|
||||
|
||||
\section{Conclusion}
|
||||
|
||||
Curabitur nulla libero, viverra at tempus vitae, ornare ac metus. Nullam sed imperdiet erat, a vestibulum arcu. Sed non nisi cursus, sagittis libero in, pellentesque est. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Interdum et malesuada fames ac ante ipsum primis in faucibus. Sed congue turpis ligula, et tristique neque scelerisque sit amet. Vivamus neque est, pharetra eu libero at, tincidunt feugiat augue.
|
||||
|
||||
\section*{Remerciements}
|
||||
|
||||
Les auteurs de ce document remercient vivement...
|
||||
|
||||
|
||||
\begin{thebibliography}{99}
|
||||
|
||||
\bibitem{helbing09}
|
||||
D. Helbing,
|
||||
A. Johansson,
|
||||
Pedestrian, Crowd and Evacuation Dynamics,
|
||||
\emph{Encyclopedia of Complexity and Systems Science},
|
||||
pp. 6476--6495, Springer New York.
|
||||
|
||||
\bibitem{SchadCA09}
|
||||
A. Schadschneider, A. Seyfried, Empirical results for pedestrian dynamics and their implications for cellular automata models,
|
||||
in``Pedestrian Behavior'', Ed.: H. Timmermans, Emerald, p. 27 (2009).
|
||||
|
||||
\bibitem{Zuriguel09}
|
||||
I. Zuriguel, J. Olivares, J.M. Pastor, C. Mart\'in-G\'omez, L.M. Ferrer, J.J. Ramos, A. Garcimart\'in,
|
||||
Effect of obstacle position in the flow of sheep through a narrow door,
|
||||
\emph{Phys. Rev. E}, 94.
|
||||
|
||||
\end{thebibliography}
|
||||
|
||||
|
||||
|
||||
|
||||
\end{document}
|
BIN
track/Sylvain/ImmersionTemplate/ImmersionTemplate/terre.png
Normal file
After Width: | Height: | Size: 886 KiB |
BIN
track/Sylvain/stage_Noham.zip
Normal file
11
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/HomoGraphie/.idea/HomoGraphie.iml
generated
Normal file
@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="TestRunnerService">
|
||||
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
|
||||
</component>
|
||||
</module>
|
4
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/HomoGraphie/.idea/misc.xml
generated
Normal file
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.6" project-jdk-type="Python SDK" />
|
||||
</project>
|
8
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/HomoGraphie/.idea/modules.xml
generated
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/HomoGraphie.iml" filepath="$PROJECT_DIR$/.idea/HomoGraphie.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
289
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/HomoGraphie/.idea/workspace.xml
generated
Normal file
@ -0,0 +1,289 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="a37ae7f5-41bc-4c1b-82b9-2ebdaf094ce1" name="Default Changelist" comment="" />
|
||||
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="FileEditorManager">
|
||||
<leaf>
|
||||
<file pinned="false" current-in-tab="true">
|
||||
<entry file="file://$PROJECT_DIR$/test_homography.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="435">
|
||||
<caret line="179" column="5" selection-start-line="179" selection-start-column="5" selection-end-line="179" selection-end-column="5" />
|
||||
<folding>
|
||||
<element signature="e#0#42#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file pinned="false" current-in-tab="false">
|
||||
<entry file="file://$USER_HOME$/.local/lib/python3.6/site-packages/numpy/core/fromnumeric.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="126">
|
||||
<caret line="65" selection-start-line="65" selection-end-line="65" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file pinned="false" current-in-tab="false">
|
||||
<entry file="file://$USER_HOME$/.PyCharmCE2019.1/system/python_stubs/-1247971763/cv2/cv2/__init__.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="126">
|
||||
<caret line="8772" column="4" selection-start-line="8772" selection-start-column="4" selection-end-line="8772" selection-end-column="4" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file pinned="false" current-in-tab="false">
|
||||
<entry file="file://$PROJECT_DIR$/getPointsOnImage.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="323">
|
||||
<caret line="19" column="31" selection-start-line="19" selection-start-column="31" selection-end-line="19" selection-end-column="31" />
|
||||
<folding>
|
||||
<element signature="e#0#42#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
</leaf>
|
||||
</component>
|
||||
<component name="FileTemplateManagerImpl">
|
||||
<option name="RECENT_TEMPLATES">
|
||||
<list>
|
||||
<option value="Python Script" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="FindInProjectRecents">
|
||||
<findStrings>
|
||||
<find>pts_dst</find>
|
||||
</findStrings>
|
||||
</component>
|
||||
<component name="IdeDocumentHistory">
|
||||
<option name="CHANGED_PATHS">
|
||||
<list>
|
||||
<option value="$PROJECT_DIR$/getPointsOnImage.py" />
|
||||
<option value="$PROJECT_DIR$/test_homography.py" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectFrameBounds" extendedState="6">
|
||||
<option name="x" value="3667" />
|
||||
<option name="y" value="51" />
|
||||
<option name="width" value="1213" />
|
||||
<option name="height" value="999" />
|
||||
</component>
|
||||
<component name="ProjectView">
|
||||
<navigator proportions="" version="1">
|
||||
<foldersAlwaysOnTop value="true" />
|
||||
</navigator>
|
||||
<panes>
|
||||
<pane id="Scope" />
|
||||
<pane id="ProjectPane">
|
||||
<subPane>
|
||||
<expand>
|
||||
<path>
|
||||
<item name="HomoGraphie" type="b2602c69:ProjectViewProjectNode" />
|
||||
<item name="HomoGraphie" type="462c0819:PsiDirectoryNode" />
|
||||
</path>
|
||||
</expand>
|
||||
<select />
|
||||
</subPane>
|
||||
</pane>
|
||||
</panes>
|
||||
</component>
|
||||
<component name="PropertiesComponent">
|
||||
<property name="last_opened_file_path" value="$PROJECT_DIR$/../CSRNet-pytorch" />
|
||||
</component>
|
||||
<component name="RunDashboard">
|
||||
<option name="ruleStates">
|
||||
<list>
|
||||
<RuleState>
|
||||
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
<RuleState>
|
||||
<option name="name" value="StatusDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="RunManager" selected="Python.test_homography">
|
||||
<configuration name="getPointsOnImage" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
|
||||
<module name="HomoGraphie" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="/usr/bin/python3.6" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/getPointsOnImage.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration name="test_homography" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
|
||||
<module name="HomoGraphie" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="/usr/bin/python3.6" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test_homography.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<list>
|
||||
<item itemvalue="Python.getPointsOnImage" />
|
||||
<item itemvalue="Python.test_homography" />
|
||||
</list>
|
||||
</component>
|
||||
<component name="SvnConfiguration">
|
||||
<configuration />
|
||||
</component>
|
||||
<component name="TaskManager">
|
||||
<task active="true" id="Default" summary="Default task">
|
||||
<changelist id="a37ae7f5-41bc-4c1b-82b9-2ebdaf094ce1" name="Default Changelist" comment="" />
|
||||
<created>1563368550822</created>
|
||||
<option name="number" value="Default" />
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1563368550822</updated>
|
||||
</task>
|
||||
<servers />
|
||||
</component>
|
||||
<component name="ToolWindowManager">
|
||||
<frame x="3667" y="51" width="1213" height="999" extended-state="6" />
|
||||
<editor active="true" />
|
||||
<layout>
|
||||
<window_info id="Favorites" side_tool="true" />
|
||||
<window_info content_ui="combo" id="Project" order="0" visible="true" weight="0.08571429" />
|
||||
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
|
||||
<window_info anchor="bottom" id="Version Control" />
|
||||
<window_info anchor="bottom" id="Python Console" weight="0.32912844" />
|
||||
<window_info anchor="bottom" id="Terminal" weight="0.32912844" />
|
||||
<window_info anchor="bottom" id="Event Log" side_tool="true" />
|
||||
<window_info anchor="bottom" id="Message" order="0" />
|
||||
<window_info anchor="bottom" id="Find" order="1" />
|
||||
<window_info active="true" anchor="bottom" id="Run" order="2" visible="true" weight="0.32912844" />
|
||||
<window_info anchor="bottom" id="Debug" order="3" weight="0.43348625" />
|
||||
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
|
||||
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
|
||||
<window_info anchor="bottom" id="TODO" order="6" />
|
||||
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
|
||||
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
|
||||
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
|
||||
</layout>
|
||||
</component>
|
||||
<component name="XDebuggerManager">
|
||||
<breakpoint-manager>
|
||||
<breakpoints>
|
||||
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
|
||||
<url>file://$PROJECT_DIR$/test_homography.py</url>
|
||||
<line>170</line>
|
||||
<option name="timeStamp" value="8" />
|
||||
</line-breakpoint>
|
||||
</breakpoints>
|
||||
</breakpoint-manager>
|
||||
<watches-manager>
|
||||
<configuration name="PythonConfigurationType">
|
||||
<watch expression="hm.values()" language="Python" />
|
||||
<watch expression="hm." language="Python" />
|
||||
<watch expression="p[0][:]" language="Python" />
|
||||
</configuration>
|
||||
</watches-manager>
|
||||
</component>
|
||||
<component name="debuggerHistoryManager">
|
||||
<expressions id="watch">
|
||||
<expression>
|
||||
<expression-string>p[0][:]</expression-string>
|
||||
<language-id>Python</language-id>
|
||||
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||
</expression>
|
||||
<expression>
|
||||
<expression-string>hm.</expression-string>
|
||||
<language-id>Python</language-id>
|
||||
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||
</expression>
|
||||
<expression>
|
||||
<expression-string>hm['test']</expression-string>
|
||||
<language-id>Python</language-id>
|
||||
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||
</expression>
|
||||
<expression>
|
||||
<expression-string>hm.values('test')</expression-string>
|
||||
<language-id>Python</language-id>
|
||||
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||
</expression>
|
||||
<expression>
|
||||
<expression-string>hm.data</expression-string>
|
||||
<language-id>Python</language-id>
|
||||
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||
</expression>
|
||||
<expression>
|
||||
<expression-string>hm.values()</expression-string>
|
||||
<language-id>Python</language-id>
|
||||
<evaluation-mode>EXPRESSION</evaluation-mode>
|
||||
</expression>
|
||||
</expressions>
|
||||
</component>
|
||||
<component name="editorHistoryManager">
|
||||
<entry file="file://$PROJECT_DIR$/getPointsOnImage.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="323">
|
||||
<caret line="19" column="31" selection-start-line="19" selection-start-column="31" selection-end-line="19" selection-end-column="31" />
|
||||
<folding>
|
||||
<element signature="e#0#42#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$USER_HOME$/.local/lib/python3.6/site-packages/numpy/core/fromnumeric.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="126">
|
||||
<caret line="65" selection-start-line="65" selection-end-line="65" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$USER_HOME$/.PyCharmCE2019.1/system/python_stubs/-1247971763/cv2/cv2/__init__.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="126">
|
||||
<caret line="8772" column="4" selection-start-line="8772" selection-start-column="4" selection-end-line="8772" selection-end-column="4" />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/test_homography.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="435">
|
||||
<caret line="179" column="5" selection-start-line="179" selection-start-column="5" selection-end-line="179" selection-end-column="5" />
|
||||
<folding>
|
||||
<element signature="e#0#42#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</component>
|
||||
</project>
|
After Width: | Height: | Size: 2.9 MiB |
@ -0,0 +1,27 @@
|
||||
import cv2 # import the OpenCV library
|
||||
import numpy as np # import the numpy library
|
||||
import pyproj
|
||||
import smopy
|
||||
|
||||
global pts_pix,img1
|
||||
pts_pix = []
|
||||
|
||||
|
||||
def draw_circle(event,x,y,flags,param):
|
||||
global pts_pix,img1
|
||||
|
||||
if event == cv2.EVENT_LBUTTONDOWN:
|
||||
cv2.circle(img1, (x, y), 2, (255, 0, 0), -1)
|
||||
cv2.imshow("img", img1)
|
||||
pts_pix.append([x,y])
|
||||
|
||||
|
||||
|
||||
img1 = cv2.imread('calibresult2.png',1)
|
||||
|
||||
cv2.namedWindow('img')
|
||||
cv2.setMouseCallback('img', draw_circle)
|
||||
cv2.imshow('img',img1)
|
||||
cv2.waitKey(0)
|
||||
|
||||
print(pts_pix)
|
After Width: | Height: | Size: 3.5 MiB |
After Width: | Height: | Size: 336 KiB |
@ -0,0 +1,241 @@
|
||||
import cv2 # import the OpenCV library
|
||||
import numpy as np # import the numpy library
|
||||
import pyproj
|
||||
import smopy
|
||||
|
||||
# https://www.engineeringtoolbox.com/utm-latitude-longitude-d_1370.html
|
||||
# http://boulter.com/gps/#48.856676%202.35166
|
||||
|
||||
print("On récupère la carte... besoin de réseau !")
|
||||
# map = Map((lat_min, lon_min, lat_max, lon_max), z=z, tileserver="")
|
||||
# where the first argument is a box in geographical coordinates, and z
|
||||
# is the zoom level (from minimum zoom 1 to maximum zoom 19).
|
||||
map = smopy.Map((48.856421, 2.351325, 48.857103, 2.352135), z=18)
|
||||
map.save_png("map.png")
|
||||
print("carte sauvée dans map.png")
|
||||
|
||||
WGS84=pyproj.Proj("+init=EPSG:4326") ## systeme des coord GPS usuelles
|
||||
UTM31N = pyproj.Proj("+init=EPSG:32631")
|
||||
|
||||
# provide points from camera image to map
|
||||
pts_src = np.array([[ 1028, 739], [ 184, 377],[ 232, 620], [ 767, 212]])
|
||||
pts_dst = np.array([[452419, 5411751], [452478, 5411773],[452441, 5411773], [452475, 5411732]])
|
||||
|
||||
|
||||
|
||||
pts_pix = np.array([
|
||||
[278 , 561],
|
||||
[306 , 497],
|
||||
[373 , 617],
|
||||
[395 , 543],
|
||||
[414 , 480],
|
||||
[431 , 430],
|
||||
[509 , 598],
|
||||
[520 , 525],
|
||||
[531 , 463],
|
||||
[540 , 412],
|
||||
[659 , 674],
|
||||
[656 , 582],
|
||||
[652 , 506],
|
||||
[652 , 445],
|
||||
[652 , 399],
|
||||
[856 , 760],
|
||||
[827 , 655],
|
||||
[807 , 563],
|
||||
[788 , 491],
|
||||
[773 , 429],
|
||||
[764 , 383],
|
||||
[1028 , 738],
|
||||
[983 , 627],
|
||||
[946 , 544],
|
||||
[917 , 476],
|
||||
[890 , 418],
|
||||
[1211 , 687],
|
||||
[1141 , 616],
|
||||
[1089 , 528],
|
||||
[1049 , 461],
|
||||
[1012 , 405],
|
||||
[1181 , 606],
|
||||
[1282 , 591],
|
||||
[1220 , 511],
|
||||
[1168 , 449],
|
||||
[1125 , 395],
|
||||
[1408 , 570],
|
||||
[1346 , 497],
|
||||
[1283 , 436],
|
||||
[1544 , 550],
|
||||
[1479 , 483],
|
||||
[1415 , 429],
|
||||
[1357 , 378],
|
||||
[1617 , 466],
|
||||
[1553 , 416],
|
||||
[1494 , 371],
|
||||
[1706 , 461],
|
||||
[1726 , 459],
|
||||
[1663 , 412],
|
||||
[1603 , 370],
|
||||
[1548 , 334],
|
||||
[1663 , 370],
|
||||
[1608 , 336]
|
||||
])
|
||||
|
||||
|
||||
pts_pix = np.array([
|
||||
[330, 573], [355, 515], [420, 623], [443, 555], [461, 500], [477, 457], [549, 606], [559, 543], [566, 488], [574, 443], [684, 670], [678, 591], [676, 531], [673, 472], [674, 430], [848, 751], [826, 653], [810, 576], [793, 510], [780, 457], [771, 421], [998, 728], [962, 632], [930, 559], [906, 501], [883, 448], [1160, 684], [1097, 618], [1053, 547], [1017, 489], [987, 440], [1130, 611], [1221, 598], [1168, 530], [1120, 478], [1084, 430], [1329, 582], [1274, 519], [1223, 468], [1466, 566], [1398, 506], [1341, 455], [1289, 414], [1540, 491], [1479, 444], [1420, 404],[1630, 470], [1649, 479], [1585, 436], [1524, 400], [1474, 369], [1570, 399], [1516, 368]
|
||||
])
|
||||
pts_latlon = np.array([
|
||||
[48.857062, 2.351683],
|
||||
[48.857069, 2.351758],
|
||||
[48.857007, 2.351577],
|
||||
[48.857035, 2.351653],
|
||||
[48.857019, 2.351728],
|
||||
[48.856999, 2.351804],
|
||||
[48.857004, 2.351549],
|
||||
[48.856987, 2.351624],
|
||||
[48.856971, 2.351700],
|
||||
[48.856950, 2.351774],
|
||||
[48.856973, 2.351448],
|
||||
[48.856957, 2.351526],
|
||||
[48.856936, 2.351600],
|
||||
[48.856918, 2.351673],
|
||||
[48.856903, 2.351749],
|
||||
[48.856940, 2.351351],
|
||||
[48.856924, 2.351421],
|
||||
[48.856906, 2.351499],
|
||||
[48.856887, 2.351579],
|
||||
[48.856869, 2.351648],
|
||||
[48.856855, 2.351725],
|
||||
[48.856895, 2.351325],
|
||||
[48.856877, 2.351399],
|
||||
[48.856860, 2.351473],
|
||||
[48.856843, 2.351548],
|
||||
[48.856825, 2.351625],
|
||||
[48.856838, 2.351303],
|
||||
[48.856830, 2.351374],
|
||||
[48.856812, 2.351445],
|
||||
[48.856793, 2.351521],
|
||||
[48.856775, 2.351597],
|
||||
[48.856814, 2.351365],
|
||||
[48.856778, 2.351346],
|
||||
[48.856762, 2.351420],
|
||||
[48.856745, 2.351497],
|
||||
[48.856727, 2.351570],
|
||||
[48.856732, 2.351316],
|
||||
[48.856713, 2.351392],
|
||||
[48.856698, 2.351466],
|
||||
[48.856667, 2.351285],
|
||||
[48.856651, 2.351357],
|
||||
[48.856631, 2.351434],
|
||||
[48.856614, 2.351505],
|
||||
[48.856552, 2.351303],
|
||||
[48.856547, 2.351386],
|
||||
[48.856531, 2.351463],#
|
||||
[48.856506, 2.351277],
|
||||
[48.856491, 2.351276],
|
||||
[48.856472, 2.351342],
|
||||
[48.856455, 2.351420],
|
||||
[48.856438, 2.351491],
|
||||
[48.856405, 2.351392],
|
||||
[48.856388, 2.351467]
|
||||
])
|
||||
|
||||
pts_coord = []
|
||||
for ll in pts_latlon:
|
||||
pts_coord.append(pyproj.transform(WGS84, UTM31N, ll[1], ll[0]))
|
||||
pts_coord = np.array(pts_coord)
|
||||
|
||||
|
||||
# calculate matrix H
|
||||
h, status = cv2.findHomography(pts_pix, pts_coord)
|
||||
#h, status = cv2.findHomography(pts_src, pts_dst) ## Pas assez de points pour être précis
|
||||
print(h)
|
||||
#sys.exit()
|
||||
|
||||
|
||||
|
||||
hm = np.load("/home/mpiidf/TraitementImage/CSRNet-pytorch/heatmap.npz")
|
||||
hm=hm["hm"]
|
||||
p=np.where(hm>0.5)
|
||||
|
||||
#a=np.concatenate((p[0].reshape((-1,1)),p[1].reshape((-1,1))),axis=1).astype("float32")
|
||||
#a=np.array([a])
|
||||
|
||||
|
||||
img2 = cv2.imread('map.png',1)
|
||||
i=0
|
||||
|
||||
for ll in pts_latlon:
|
||||
xn, yn = map.to_pixels(ll[0], ll[1])
|
||||
cv2.circle(img2,(int(np.round(xn)),int(np.round(yn))),2,(0,255,0),-1)
|
||||
|
||||
for j in range(0,len(p[0])) :
|
||||
temp = np.array([[p[0][j], p[1][j]]], dtype='float32')
|
||||
temp = np.array([temp])
|
||||
b = cv2.perspectiveTransform(temp, h)
|
||||
i=i+1
|
||||
lonlat = np.array(pyproj.transform(UTM31N, WGS84, b[0][0][0], b[0][0][1]))
|
||||
xn, yn = map.to_pixels(lonlat[1], lonlat[0])
|
||||
cv2.circle(img2,(int(np.round(xn)),int(np.round(yn))),2,(0,0,255),-1)
|
||||
#if i>1000:
|
||||
# break
|
||||
print(i,xn,yn,lonlat)
|
||||
|
||||
cv2.imshow('image1',img2)
|
||||
cv2.waitKey(0)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# mouse callback function
|
||||
def get_position(event,x,y,flags,param):
|
||||
if event == cv2.EVENT_MOUSEMOVE: #EVENT_MBUTTONDBLCLK: #EVENT_LBUTTONDBLCLK:
|
||||
cv2.circle(img1,(x,y),2,(255,0,0),-1)
|
||||
a = np.array([[x, y]], dtype='float32')
|
||||
a = np.array([a])
|
||||
|
||||
|
||||
pt = cv2.perspectiveTransform(a, h)
|
||||
|
||||
# #pt = cam.project_pixel_to_camera_frame(np.array([[x,y]]),distance=-19)
|
||||
# #pt3d = cam.project_camera_frame_to_3d(pt)
|
||||
# pt3d = cam.project_pixel_to_3d_ray(np.array([[x,y]]), distorted=True, distance=-20.0 )
|
||||
lonlat = np.array(pyproj.transform(UTM31N, WGS84, pt[0][0][0], pt[0][0][1]))
|
||||
# #test = cam.project_camera_frame_to_3d(np.array([[x,y,0]]))
|
||||
# #print("mouse position = (",x,",",y,")"," pt=",pt," pt3d=",pt3d," lonlat=",lonlat )
|
||||
# print("mouse position = (",x,",",y,")"," pt3d=",pt3d," lonlat=",lonlat )
|
||||
# #print("mouse position = (",x,",",y,")"," test=",test)
|
||||
xn, yn = map.to_pixels(lonlat[1], lonlat[0])
|
||||
print("mouse position = (",x,",",y,")"," pt = ",pt[0][0]," lonlat=",lonlat)
|
||||
# # #print(xn,yn,int(np.round(xn)),int(np.round(yn)))
|
||||
cv2.circle(img2,(int(np.round(xn)),int(np.round(yn))),2,(255,0,0),-1)
|
||||
|
||||
|
||||
# Create a black image, a window and bind the function to window
|
||||
#img = np.zeros((512,512,3), np.uint8)
|
||||
|
||||
img1 = cv2.imread('calibresult2.png',1)
|
||||
img2 = cv2.imread('map.png',1)
|
||||
|
||||
|
||||
#while(1):
|
||||
# cv2.imshow('image2',img2)
|
||||
# for p in pts_src:
|
||||
# cv2.circle(img1,(p[0],p[1]),5,(0,0,255),-1)
|
||||
# cv2.imshow('image1',img1)
|
||||
# for p in pts_pix:
|
||||
# cv2.circle(img1,(p[0],p[1]),5,(0,255,0),-1)
|
||||
# for p in pts_dst:
|
||||
# lonlat = np.array(pyproj.transform(UTM31N, WGS84, p[0], p[1]))
|
||||
# xn, yn = map.to_pixels(lonlat[1], lonlat[0])
|
||||
# cv2.circle(img2,(int(np.round(xn)),int(np.round(yn))),2,(0,0,255),-1)
|
||||
# for ll in pts_latlon:
|
||||
# xn, yn = map.to_pixels(ll[0], ll[1])
|
||||
# cv2.circle(img2,(int(np.round(xn)),int(np.round(yn))),2,(0,255,0),-1)
|
||||
|
||||
|
||||
# cv2.imshow('image1',img1)
|
||||
# cv2.setMouseCallback('image1',get_position)
|
||||
# if cv2.waitKey(20) & 0xFF == 27:
|
||||
# break
|
||||
#cv2.destroyAllWindows()
|
4
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/testCalibration/.idea/misc.xml
generated
Normal file
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7" project-jdk-type="Python SDK" />
|
||||
</project>
|
8
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/testCalibration/.idea/modules.xml
generated
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/testCalibration.iml" filepath="$PROJECT_DIR$/.idea/testCalibration.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="TestRunnerService">
|
||||
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
|
||||
</component>
|
||||
</module>
|
152
track/Sylvain/stage_Noham/stage_Noham/HomographyCalibration2/testCalibration/.idea/workspace.xml
generated
Normal file
@ -0,0 +1,152 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="321c7d5d-35f2-484d-886e-b87b35fcb56c" name="Default Changelist" comment="" />
|
||||
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="FileEditorManager">
|
||||
<leaf>
|
||||
<file pinned="false" current-in-tab="true">
|
||||
<entry file="file://$PROJECT_DIR$/calibration.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="145">
|
||||
<caret line="17" column="17" selection-start-line="17" selection-start-column="17" selection-end-line="17" selection-end-column="17" />
|
||||
<folding>
|
||||
<element signature="e#0#18#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
</leaf>
|
||||
</component>
|
||||
<component name="FileTemplateManagerImpl">
|
||||
<option name="RECENT_TEMPLATES">
|
||||
<list>
|
||||
<option value="Python Script" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="IdeDocumentHistory">
|
||||
<option name="CHANGED_PATHS">
|
||||
<list>
|
||||
<option value="$PROJECT_DIR$/calibration.py" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectFrameBounds" extendedState="6">
|
||||
<option name="x" value="218" />
|
||||
<option name="y" value="151" />
|
||||
<option name="width" value="911" />
|
||||
<option name="height" value="749" />
|
||||
</component>
|
||||
<component name="ProjectView">
|
||||
<navigator proportions="" version="1">
|
||||
<foldersAlwaysOnTop value="true" />
|
||||
</navigator>
|
||||
<panes>
|
||||
<pane id="Scope" />
|
||||
<pane id="ProjectPane">
|
||||
<subPane>
|
||||
<expand>
|
||||
<path>
|
||||
<item name="testCalibration" type="b2602c69:ProjectViewProjectNode" />
|
||||
<item name="testCalibration" type="462c0819:PsiDirectoryNode" />
|
||||
</path>
|
||||
</expand>
|
||||
<select />
|
||||
</subPane>
|
||||
</pane>
|
||||
</panes>
|
||||
</component>
|
||||
<component name="PropertiesComponent">
|
||||
<property name="last_opened_file_path" value="$PROJECT_DIR$/calibration.py" />
|
||||
</component>
|
||||
<component name="RunDashboard">
|
||||
<option name="ruleStates">
|
||||
<list>
|
||||
<RuleState>
|
||||
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
<RuleState>
|
||||
<option name="name" value="StatusDashboardGroupingRule" />
|
||||
</RuleState>
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="RunManager">
|
||||
<configuration name="calibration" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
|
||||
<module name="testCalibration" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="/usr/bin/python3.6" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/calibration.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
<component name="SvnConfiguration">
|
||||
<configuration />
|
||||
</component>
|
||||
<component name="TaskManager">
|
||||
<task active="true" id="Default" summary="Default task">
|
||||
<changelist id="321c7d5d-35f2-484d-886e-b87b35fcb56c" name="Default Changelist" comment="" />
|
||||
<created>1563365442088</created>
|
||||
<option name="number" value="Default" />
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1563365442088</updated>
|
||||
</task>
|
||||
<servers />
|
||||
</component>
|
||||
<component name="ToolWindowManager">
|
||||
<frame x="67" y="25" width="1213" height="999" extended-state="6" />
|
||||
<editor active="true" />
|
||||
<layout>
|
||||
<window_info id="Favorites" side_tool="true" />
|
||||
<window_info content_ui="combo" id="Project" order="0" visible="true" weight="0.24957983" />
|
||||
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
|
||||
<window_info anchor="bottom" id="Version Control" />
|
||||
<window_info anchor="bottom" id="Python Console" />
|
||||
<window_info anchor="bottom" id="Terminal" />
|
||||
<window_info anchor="bottom" id="Event Log" side_tool="true" />
|
||||
<window_info anchor="bottom" id="Message" order="0" />
|
||||
<window_info anchor="bottom" id="Find" order="1" />
|
||||
<window_info active="true" anchor="bottom" id="Run" order="2" visible="true" weight="0.32912844" />
|
||||
<window_info anchor="bottom" id="Debug" order="3" weight="0.39908257" />
|
||||
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
|
||||
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
|
||||
<window_info anchor="bottom" id="TODO" order="6" />
|
||||
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
|
||||
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
|
||||
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
|
||||
</layout>
|
||||
</component>
|
||||
<component name="editorHistoryManager">
|
||||
<entry file="file://$PROJECT_DIR$/calibration.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="145">
|
||||
<caret line="17" column="17" selection-start-line="17" selection-start-column="17" selection-end-line="17" selection-end-column="17" />
|
||||
<folding>
|
||||
<element signature="e#0#18#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,49 @@
|
||||
import numpy as np
|
||||
import cv2
|
||||
import glob
|
||||
|
||||
# termination criteria
|
||||
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
|
||||
|
||||
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
|
||||
objp = np.zeros((6*9,3), np.float32)
|
||||
objp[:,:2] = np.mgrid[0:9,0:6].T.reshape(-1,2)
|
||||
|
||||
# Arrays to store object points and image points from all the images.
|
||||
objpoints = [] # 3d point in real world space
|
||||
imgpoints = [] # 2d points in image plane.
|
||||
|
||||
images = glob.glob('*.png')
|
||||
|
||||
fname="img_001659.png"
|
||||
img = cv2.imread(fname)
|
||||
gray = cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)
|
||||
|
||||
# Find the chess board corners
|
||||
ret, corners = cv2.findChessboardCorners(gray, (9,6),None)
|
||||
|
||||
# If found, add object points, image points (after refining them)
|
||||
if ret == True:
|
||||
objpoints.append(objp)
|
||||
corners2 = cv2.cornerSubPix(gray,corners,(11,11),(-1,-1),criteria)
|
||||
imgpoints.append(corners2)
|
||||
|
||||
# Draw and display the corners
|
||||
img = cv2.drawChessboardCorners(img, (9,6), corners2,ret)
|
||||
cv2.imshow('img',img)
|
||||
cv2.waitKey(500)
|
||||
print(objpoints)
|
||||
print(imgpoints)
|
||||
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, gray.shape[::-1],None,None)
|
||||
|
||||
img = cv2.imread('img_000259.png')
|
||||
h, w = img.shape[:2]
|
||||
newcameramtx, roi=cv2.getOptimalNewCameraMatrix(mtx,dist,(w,h),1,(w,h))
|
||||
# undistort
|
||||
dst = cv2.undistort(img, mtx, dist, None, newcameramtx)
|
||||
|
||||
# crop the image
|
||||
x,y,w,h = roi
|
||||
dst = dst[y:y+h, x:x+w]
|
||||
cv2.imwrite('calibresult.png',dst)
|
||||
cv2.destroyAllWindows()
|
After Width: | Height: | Size: 3.4 MiB |
After Width: | Height: | Size: 2.9 MiB |
After Width: | Height: | Size: 1.3 MiB |
After Width: | Height: | Size: 1.2 MiB |
After Width: | Height: | Size: 1.1 MiB |
After Width: | Height: | Size: 1.6 MiB |
After Width: | Height: | Size: 1.3 MiB |
After Width: | Height: | Size: 3.5 MiB |
BIN
track/Sylvain/stage_Noham/stage_Noham/frame.png
Normal file
After Width: | Height: | Size: 3.5 MiB |
BIN
track/Sylvain/stage_Noham/stage_Noham/frame_ground_pts.png
Normal file
After Width: | Height: | Size: 2.9 MiB |
BIN
track/Sylvain/stage_Noham/stage_Noham/gmap.png
Normal file
After Width: | Height: | Size: 1.9 MiB |
BIN
track/Sylvain/stage_Noham/stage_Noham/image_vide_pts.png
Normal file
After Width: | Height: | Size: 3.4 MiB |
BIN
track/Sylvain/stage_Noham/stage_Noham/image_vide_pts_labels.png
Normal file
After Width: | Height: | Size: 3.7 MiB |
203
track/Sylvain/stage_Noham/stage_Noham/projection_sol.py
Normal file
@ -0,0 +1,203 @@
|
||||
import cv2
|
||||
import numpy as np
|
||||
import argparse
|
||||
import time
|
||||
import os
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import Ellipse, Circle, Rectangle, Polygon, Arrow
|
||||
from matplotlib.lines import Line2D
|
||||
from matplotlib.collections import EllipseCollection, LineCollection
|
||||
import sys
|
||||
from scipy.optimize import least_squares
|
||||
from scipy.spatial import cKDTree
|
||||
from imageio import imread
|
||||
from matplotlib.offsetbox import TextArea, DrawingArea, OffsetImage, AnnotationBbox
|
||||
from shapely.geometry import Point
|
||||
import geopandas as gpd
|
||||
import cartopy
|
||||
import cartopy.crs as ccrs
|
||||
import cameratransform as ct
|
||||
import geodatasets
|
||||
|
||||
img = cv2.imread("frame_ground_pts.png")
|
||||
nh,nw,_ = img.shape
|
||||
## img : b g r
|
||||
|
||||
#mask = (img[:,:,0]==0)*(img[:,:,1]==0)*(img[:,:,2]==255)
|
||||
#ind_px_ground_pts = np.where(mask)
|
||||
#px_ground_pts = np.vstack([ind_px_ground_pts[1],ind_px_ground_pts[0]]).T
|
||||
|
||||
tab = np.array([
|
||||
[215, 257, 41.94076496048223, -85.00154950929712],
|
||||
[286, 310, 41.94073282540695, -85.00133550964574],
|
||||
[532, 496, 41.94066182925292, -85.00090550095656],
|
||||
[359, 462, 41.94064090405561, -85.00098487171928],
|
||||
[391, 489, 41.94063193611181, -85.00093564175253],
|
||||
[471, 428, 41.940688733067965, -85.00101802659485],
|
||||
[536, 433, 41.94069994298754, -85.00099391395807],
|
||||
[242, 528, 41.94058410705547, -85.00092057135889],
|
||||
[636, 486, 41.9406894803946, -85.00089243994931],
|
||||
[243, 552, 41.940578130109905, -85.00088875077661],
|
||||
[279, 548, 41.9405866094118, -85.00088875077661],
|
||||
[316, 543, 41.940595088712534, -85.00088539801524],
|
||||
[347, 540, 41.94060157288293, -85.0008820452539],
|
||||
[382, 536, 41.94061055096393, -85.00088070414937],
|
||||
[414, 532, 41.94061803269714, -85.00087869249255],
|
||||
[447, 526, 41.940624516865206, -85.00087668083574],
|
||||
[479, 526, 41.94063449250709, -85.00087198696986],
|
||||
[1131, 495, 41.94081056233172, -85.00078883849507],
|
||||
[1286, 561, 41.94079011232066, -85.00068557344535],
|
||||
[1429, 652, 41.94075918790118, -85.00059236667968],
|
||||
[1410, 702, 41.94072826346671, -85.00056554458887],
|
||||
[1389, 734, 41.940709309773645, -85.00054811022981],
|
||||
[1233, 754, 41.940672399934165, -85.00055816851388],
|
||||
[1078, 778, 41.940637485201485, -85.0005668856934],
|
||||
[945, 803, 41.94060755827253, -85.00057359121611],
|
||||
[1164, 695, 41.94068486947693, -85.00060242496377],
|
||||
[892, 556, 41.94070432195875, -85.00075866364288],
|
||||
[964, 521, 41.940746219591766, -85.00078816794279],
|
||||
[1637, 895, 41.9406788840967, -85.00044752738918],
|
||||
[1354, 998, 41.940611049748284, -85.00044819794145],
|
||||
[1206, 976, 41.94059758262643, -85.00047233782321],
|
||||
[846, 906, 41.94056915202646, -85.00054341636391],
|
||||
[944, 999, 41.94056216907017, -85.00049848936177],
|
||||
[486, 925, 41.94051977253203, -85.00057962618882],
|
||||
[339, 838, 41.94052027131499, -85.00064332865455],
|
||||
[98, 826, 41.94048934676464, -85.00068356179082],
|
||||
[34, 672, 41.94050630539088, -85.00079621457232],
|
||||
[164, 538, 41.94056466298198, -85.00091758453335],
|
||||
[131, 593, 41.94054570924032, -85.00086528145621],
|
||||
[150, 619, 41.94053872628142, -85.00083108329041],
|
||||
[172, 660, 41.94053074575605, -85.00078883849733],
|
||||
[1341, 694, 41.940719285401634, -85.00057560287796],
|
||||
[1313, 701, 41.94071230246175, -85.00057627343024],
|
||||
[1281, 706, 41.940704321958115, -85.00057828508704],
|
||||
[1249, 708, 41.94069534389031, -85.00058230840068],
|
||||
[265, 379, 41.94066890846167, -85.00116770052296],
|
||||
[287, 399, 41.94066092795256, -85.00111807965492]])
|
||||
px_ground_pts = tab[:,:2].astype(int)
|
||||
|
||||
# lat long
|
||||
# [216, 258] [144,172] [41.94076551439789, -85.00155042979091] 65663.889 45130.928 MGRS / UTMREF (WGS84)
|
||||
# [1287, 561] [857,372] [41.9407914510061, -85.00068541739631]
|
||||
# [1354, 999] [905,668] [41.94061008025459, -85.00044835086464]
|
||||
# [35, 673] [25,449] [41.94050633514897, -85.0007970380291]
|
||||
#ind_pts = [0, 35, 46, 91]
|
||||
#px_ground_pts = px_ground_pts[ind_pts,:]
|
||||
|
||||
#real_ground_pts = np.array([[41.94076551439789, -85.00155042979091], [41.9407914510061, -85.00068541739631], [41.94061008025459, -85.00044835086464], [41.94050633514897, -85.0007970380291] ])
|
||||
#real_ground_pts = tab[:,2:]
|
||||
|
||||
usa = gpd.read_file(geodatasets.get_path('geoda.natregimes'))
|
||||
print("usa.crs =",usa.crs)
|
||||
|
||||
ax = usa.plot()
|
||||
ax.set_title("WGS84 (lat/lon)");
|
||||
# Reproject to Albers contiguous USA
|
||||
usa = usa.to_crs("ESRI:102003")
|
||||
ax = usa.plot()
|
||||
ax.set_title("NAD 1983 Albers contiguous USA");
|
||||
|
||||
geometry = gpd.points_from_xy(tab[:,3], tab[:,2]) # long, lat
|
||||
gts = gpd.GeoDataFrame({"lat": tab[:,2], "lon": tab[:,3]}, geometry=geometry, crs="EPSG:4326")
|
||||
gts = gts.to_crs("ESRI:102003")
|
||||
X = gts["geometry"].x
|
||||
Y = gts["geometry"].y
|
||||
gts["X"] = X
|
||||
gts["Y"] = Y
|
||||
print("gts =",gts.head(10))
|
||||
|
||||
real_ground_pts = gts[["X","Y"]].values
|
||||
|
||||
fig, ax = plt.subplots()
|
||||
usa.plot(ax=ax)
|
||||
ax.scatter(X,Y,color="red")
|
||||
ax.set_title("pts coord")
|
||||
ax.set_xlim([903530.0, 903660.0])
|
||||
ax.set_ylim([549816.0, 549865.0])
|
||||
|
||||
img_pts = img.copy()
|
||||
for i,pt in enumerate(px_ground_pts):
|
||||
img_pts = cv2.circle(img_pts, pt, 1, (0,0,255), 1)
|
||||
txt = str(i)+": "+str(pt)
|
||||
img_pts = cv2.putText(img_pts, txt, pt, cv2.FONT_HERSHEY_SIMPLEX,
|
||||
0.3, (0,255,0), 1, cv2.LINE_AA)
|
||||
|
||||
|
||||
## parametres caméra pour initialiser la minimisation de la "cost" fonction
|
||||
f = 3.2 # en mm
|
||||
sensor_size = (6.17, 4.55) # en mm
|
||||
image_size = (nw,nh) # en px
|
||||
elevation = 10 # en m
|
||||
angle = 45 # inclinaison de la caméra. (0° : caméra orientée vers le bas, 90° : caméra orientée parallèlement au sol, 180° : caméra orientée vers le haut)
|
||||
heading_deg = 45 # la direction dans laquelle la caméra regarde. (0° : la caméra est orientée « nord », 90° : est, 180° : sud, 270° : ouest)
|
||||
roll_deg = 0 # rotation de l'image. (0°: camera image is not rotated (landscape format), 90°: camera image is in portrait format, 180°: camera is in upside down landscape format)
|
||||
|
||||
|
||||
#px_ground_pts = [ [], [] ]
|
||||
#ground_pts = [ [], [] ]
|
||||
|
||||
|
||||
|
||||
|
||||
## Find camera parameters: [focal,sensorx,sensory,elevation,angle]
|
||||
def fct_cost(param):
|
||||
#print("cost param : ",param)
|
||||
f,sx,sy,e,a,b,c = param
|
||||
camloc = ct.Camera(
|
||||
ct.RectilinearProjection(
|
||||
focallength_mm=f,
|
||||
sensor=(sx,sy),
|
||||
image=image_size
|
||||
),
|
||||
ct.SpatialOrientation(
|
||||
elevation_m=e,
|
||||
tilt_deg=a,
|
||||
heading_deg=b,
|
||||
roll_deg=c
|
||||
)
|
||||
)
|
||||
pts = []
|
||||
for pt in px_ground_pts:
|
||||
gpt = camloc.spaceFromImage(pt)
|
||||
pts.append(gpt)
|
||||
pts = np.array(pts)
|
||||
#print(pts)
|
||||
#print(np.linalg.norm( real_ground_pts-pts[:,:2], axis=1 )**2)
|
||||
return np.linalg.norm( real_ground_pts-pts[:,:2], axis=1 )
|
||||
|
||||
param = [f, sensor_size[0], sensor_size[1], elevation, angle, heading_deg , roll_deg]
|
||||
#cost = fct_cost(param)
|
||||
#print("cost =",cost)
|
||||
|
||||
res = least_squares(fct_cost, param)
|
||||
print(res)
|
||||
|
||||
# initialize the camera
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=res.x[0],
|
||||
sensor=(res.x[1],res.x[2]),
|
||||
image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=res.x[3],
|
||||
tilt_deg=res.x[4],
|
||||
heading_deg = res.x[5],
|
||||
roll_deg = res.x[6] )
|
||||
)
|
||||
|
||||
test_ground_pts = []
|
||||
for pt in px_ground_pts:
|
||||
gpt = cam.spaceFromImage(pt)
|
||||
test_ground_pts.append(gpt)
|
||||
test_ground_pts = np.array(test_ground_pts)
|
||||
print("test_ground_pts =",test_ground_pts)
|
||||
|
||||
plt.figure()
|
||||
plt.plot(test_ground_pts[:,0], test_ground_pts[:,1],linewidth=0, marker="o")
|
||||
|
||||
|
||||
cv2.imshow("pts", img_pts)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
|
||||
plt.show()
|
||||
sys.exit()
|
156
track/Sylvain/stage_Noham/stage_Noham/projection_sol2.py
Normal file
@ -0,0 +1,156 @@
|
||||
import cv2
|
||||
import numpy as np
|
||||
import argparse
|
||||
import time
|
||||
import os
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import Ellipse, Circle, Rectangle, Polygon, Arrow
|
||||
from matplotlib.lines import Line2D
|
||||
from matplotlib.collections import EllipseCollection, LineCollection
|
||||
import sys
|
||||
from scipy.optimize import least_squares
|
||||
from scipy.spatial import cKDTree
|
||||
from imageio import imread
|
||||
from matplotlib.offsetbox import TextArea, DrawingArea, OffsetImage, AnnotationBbox
|
||||
from shapely.geometry import Point
|
||||
import geopandas as gpd
|
||||
import cartopy
|
||||
import cartopy.crs as ccrs
|
||||
import cameratransform as ct
|
||||
import geodatasets
|
||||
|
||||
img = cv2.imread("track/Sylvain/stage_Noham/stage_Noham/image_vide_pts.png")
|
||||
nh,nw,_ = img.shape
|
||||
## img : b g r
|
||||
|
||||
mask = (img[:,:,0]==0)*(img[:,:,1]==0)*(img[:,:,2]==255)
|
||||
ind_px_ground_pts = np.where(mask)
|
||||
px_ground_pts = np.vstack([ind_px_ground_pts[1],ind_px_ground_pts[0]]).T
|
||||
|
||||
mask2 = (img[:,:,0]==255)*(img[:,:,1]==0)*(img[:,:,2]==0)
|
||||
ind_px_ground_pts2 = np.where(mask2)
|
||||
px_ground_pts2 = np.vstack([ind_px_ground_pts2[1],ind_px_ground_pts2[0]]).T
|
||||
|
||||
|
||||
img_pts = img.copy()
|
||||
for i,pt in enumerate(px_ground_pts):
|
||||
img_pts = cv2.circle(img_pts, pt, 1, (0,0,255), 1)
|
||||
txt = str(i)+": "+str(pt)
|
||||
img_pts = cv2.putText(img_pts, txt, pt, cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0,255,0), 1, cv2.LINE_AA)
|
||||
|
||||
distances = np.array([
|
||||
[ 0, 8, 37.1],
|
||||
[ 1, 7, 10.0],
|
||||
[ 2, 4, 6.8],
|
||||
[ 2, 5, 28.3],
|
||||
[ 2, 10, 17.7],
|
||||
[ 2, 12, 19.5],
|
||||
[ 3, 11, 20.4],
|
||||
[ 4, 7, 3.8],
|
||||
[ 5, 9, 9.1],
|
||||
[ 5, 13, 12.7],
|
||||
[ 6, 11, 11.9],
|
||||
[ 9, 10, 7.0],
|
||||
[ 9, 13, 9.2],
|
||||
[ 9, 15, 16.3],
|
||||
[10, 12, 5.3],
|
||||
[11, 16, 13.6],
|
||||
[14, 20, 16.1],
|
||||
[16, 20, 9.7],
|
||||
[17, 23, 18.4],
|
||||
[17, 25, 16.0],
|
||||
[18, 19, 11.6],
|
||||
[19, 20, 16.0],
|
||||
[19, 24, 8.6],
|
||||
[22, 23, 6.0],
|
||||
[22, 25, 3.8],
|
||||
[23, 24, 12.2]
|
||||
])
|
||||
for i,dd in enumerate(distances):
|
||||
pt1 = px_ground_pts[int(dd[0]),:]
|
||||
pt2 = px_ground_pts[int(dd[1]),:]
|
||||
img_pts = cv2.line(img_pts, pt1, pt2, (255,255,0), 2)
|
||||
|
||||
# cv2.imwrite("image_vide_pts_labels.png",img_pts)
|
||||
# cv2.imshow("pts", img_pts)
|
||||
# cv2.waitKey(0)
|
||||
# cv2.destroyAllWindows()
|
||||
|
||||
|
||||
## parametres caméra pour initialiser la minimisation de la "cost" fonction
|
||||
f = 3.2 # en mm
|
||||
sensor_size = (6.17, 4.55) # en mm
|
||||
image_size = (nw,nh) # en px
|
||||
elevation = 10 # en m
|
||||
angle = 45 # inclinaison de la caméra. (0° : caméra orientée vers le bas, 90° : caméra orientée parallèlement au sol, 180° : caméra orientée vers le haut)
|
||||
heading_deg = 45 # la direction dans laquelle la caméra regarde. (0° : la caméra est orientée « nord », 90° : est, 180° : sud, 270° : ouest)
|
||||
roll_deg = 0 # rotation de l'image. (0°: camera image is not rotated (landscape format), 90°: camera image is in portrait format, 180°: camera is in upside down landscape format)
|
||||
|
||||
## Find camera parameters: [focal,sensorx,sensory,elevation,angle]
|
||||
def fct_cost(param):
|
||||
#print("cost param : ",param)
|
||||
f,sx,sy,e,a,b,c = param
|
||||
camloc = ct.Camera(
|
||||
ct.RectilinearProjection(
|
||||
focallength_mm=f,
|
||||
sensor=(sx,sy),
|
||||
image=image_size
|
||||
),
|
||||
ct.SpatialOrientation(
|
||||
elevation_m=e,
|
||||
tilt_deg=a,
|
||||
heading_deg=b,
|
||||
roll_deg=c
|
||||
)
|
||||
)
|
||||
pts = []
|
||||
for pt in px_ground_pts:
|
||||
gpt = camloc.spaceFromImage(pt)
|
||||
pts.append(gpt)
|
||||
pts = np.array(pts)
|
||||
cost = []
|
||||
for dd in distances:
|
||||
cost.append( np.linalg.norm( pts[int(dd[0]),:]-pts[int(dd[1]),:])-dd[2] )
|
||||
|
||||
return np.array(cost)
|
||||
|
||||
param = [f, sensor_size[0], sensor_size[1], elevation, angle, heading_deg , roll_deg]
|
||||
#cost = fct_cost(param)
|
||||
#print("cost =",cost)
|
||||
|
||||
res = least_squares(fct_cost, param)
|
||||
print(res)
|
||||
|
||||
|
||||
# initialize the camera
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=res.x[0],
|
||||
sensor=(res.x[1],res.x[2]),
|
||||
image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=res.x[3],
|
||||
tilt_deg=res.x[4],
|
||||
heading_deg = res.x[5],
|
||||
roll_deg = res.x[6] )
|
||||
)
|
||||
|
||||
|
||||
space_pts = []
|
||||
for pt in px_ground_pts:
|
||||
space_pts.append(cam.spaceFromImage(pt))
|
||||
space_pts = np.array(space_pts)
|
||||
|
||||
space_pts2 = []
|
||||
for pt in px_ground_pts2:
|
||||
space_pts2.append(cam.spaceFromImage(pt))
|
||||
space_pts2 = np.array(space_pts2)
|
||||
#print("space_pts2 =", space_pts2)
|
||||
|
||||
plt.figure()
|
||||
plt.scatter(space_pts[:,0], space_pts[:,1], color="red", s=2)
|
||||
# plt.scatter(space_pts2[:,0], space_pts2[:,1], color="blue", s=1)
|
||||
plt.plot([28.569, 51.681],[26.665, 89.904], color='blue', linestyle='-', linewidth=1)
|
||||
for dd in distances:
|
||||
plt.plot( [space_pts[int(dd[0]),0], space_pts[int(dd[1]),0]], [space_pts[int(dd[0]),1], space_pts[int(dd[1]),1]], color="green" )
|
||||
plt.axis("equal")
|
||||
|
||||
plt.show()
|
24
track/Sylvain/stage_Noham/stage_Noham/test_camera.py
Normal file
@ -0,0 +1,24 @@
|
||||
import cameratransform as ct
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
im = plt.imread("gmap.png")
|
||||
nh,nw,_ = im.shape
|
||||
|
||||
# intrinsic camera parameters
|
||||
f = 6.2 # in mm
|
||||
sensor_size = (6.17, 4.55) # in mm
|
||||
image_size = (nw, nh) # in px
|
||||
|
||||
# initialize the camera
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=f,
|
||||
sensor=sensor_size,
|
||||
image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=10,
|
||||
tilt_deg=45))
|
||||
|
||||
# display a top view of the image
|
||||
top_im = cam.getTopViewOfImage(im, [-150, 150, 50, 300], scaling=0.5, do_plot=True)
|
||||
plt.xlabel("x position in m")
|
||||
plt.ylabel("y position in m")
|
||||
|
||||
plt.show()
|
36
track/calibrated.py
Normal file
@ -0,0 +1,36 @@
|
||||
import cv2
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.optimize import least_squares
|
||||
from imageio import imread
|
||||
import cameratransform as ct
|
||||
|
||||
img = cv2.imread("track/Sylvain/stage_Noham/stage_Noham/image_vide_pts.png")
|
||||
nh,nw,_ = img.shape
|
||||
|
||||
res = np.array([ 3.99594676, 3.53413555, 4.55 , 16.41739973, 74.96395791, 49.11271189, 2.79384615])
|
||||
image_size = (nw,nh)
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=res[0], sensor=(res[1],res[2]), image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=res[3], tilt_deg=res[4], heading_deg = res[5], roll_deg = res[6] ) )
|
||||
|
||||
|
||||
mask = (img[:,:,0]==0)*(img[:,:,1]==0)*(img[:,:,2]==255)
|
||||
ind_px_ground_pts = np.where(mask)
|
||||
print('ind_px_ground_pts: ', ind_px_ground_pts)
|
||||
px_ground_pts = np.vstack([ind_px_ground_pts[1],ind_px_ground_pts[0]]).T
|
||||
print(px_ground_pts)
|
||||
|
||||
|
||||
|
||||
space_pts = []
|
||||
for pt in px_ground_pts:
|
||||
space_pts.append(cam.spaceFromImage(pt))
|
||||
space_pts = np.array(space_pts)
|
||||
|
||||
|
||||
plt.figure()
|
||||
plt.scatter(space_pts[:,0], space_pts[:,1], color="red", s=2)
|
||||
plt.axis("equal")
|
||||
|
||||
plt.draw()
|
||||
plt.pause(1)
|
110
track/extract + calibrated.py
Normal file
@ -0,0 +1,110 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import cv2
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.optimize import least_squares
|
||||
from imageio import imread
|
||||
import cameratransform as ct
|
||||
|
||||
cap = cv2.VideoCapture('cams/new/cut2.mp4')
|
||||
folder_path = "track/expgood/labels/"
|
||||
name = 'cut2'
|
||||
fps = 780
|
||||
|
||||
allfiles = []
|
||||
for i in range(1, fps+1):
|
||||
allfiles.append(folder_path + name + '_' + str(i) + '.txt')
|
||||
|
||||
# Set the desired dimensions for displaying the video
|
||||
display_width = 1280
|
||||
display_height = 720
|
||||
|
||||
display_width = 1920
|
||||
display_height = 1080
|
||||
|
||||
width = 1920
|
||||
height = 1080
|
||||
|
||||
frame_nb = 0
|
||||
|
||||
bleu = (255, 0, 0)
|
||||
vert = (0, 255, 0)
|
||||
|
||||
# # Cam part
|
||||
# img = cv2.imread("track/Sylvain/stage_Noham/stage_Noham/image_vide_pts.png")
|
||||
# nh,nw,_ = img.shape
|
||||
|
||||
res = np.array([ 3.99594676, 3.53413555, 4.55 , 16.41739973, 74.96395791, 49.11271189, 2.79384615])
|
||||
image_size = (width,height)
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=res[0], sensor=(res[1],res[2]), image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=res[3], tilt_deg=res[4], heading_deg = res[5], roll_deg = res[6] ) )
|
||||
|
||||
|
||||
if not cap.isOpened():
|
||||
print("Error opening video stream or file")
|
||||
|
||||
while cap.isOpened():
|
||||
ret, frame = cap.read()
|
||||
if ret:
|
||||
df = pd.read_csv(allfiles[frame_nb], header=None, sep=' ')
|
||||
ind_px_ground_pts = []
|
||||
for index, row in df.iterrows():
|
||||
class_id, center_x, center_y, bbox_width, bbox_height, object_id = row
|
||||
|
||||
center_x = int(center_x * width)
|
||||
center_y = int(center_y * height)
|
||||
bbox_width = int(bbox_width * width)
|
||||
bbox_height = int(bbox_height * height)
|
||||
|
||||
top_left_x = int(center_x - bbox_width / 2)
|
||||
top_left_y = int(center_y - bbox_height / 2)
|
||||
bottom_right_x = int(center_x + bbox_width / 2)
|
||||
bottom_right_y = int(center_y + bbox_height / 2)
|
||||
|
||||
# (19;112) à (636;714) et (86;86) à (1087;715)
|
||||
if (((112-714)/(19-636)) * top_left_x + 112 - ((112-714)/(19-636)) *19 > top_left_y ) and (((86-715)/(86-1097)) * bottom_right_x + 112 - ((86-715)/(86-1097)) *86 < bottom_right_y ):
|
||||
|
||||
label = f'Class: {int(class_id)}, Object ID: {int(object_id)}'
|
||||
cv2.putText(frame, label, (top_left_x, top_left_y - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, vert, 1)
|
||||
|
||||
# obetnir le centre du rectangle
|
||||
center_x = (top_left_x + bottom_right_x) // 2
|
||||
center_y = (top_left_y + bottom_right_y) // 2
|
||||
cv2.circle(frame, (center_x, center_y), 5, vert, -1)
|
||||
|
||||
ind_px_ground_pts += [center_x, center_y]
|
||||
|
||||
|
||||
else :
|
||||
pass
|
||||
ind_px_ground_pts = np.array(ind_px_ground_pts)
|
||||
print('ind_px_ground_pts: ', len(ind_px_ground_pts))
|
||||
|
||||
px_ground_pts = np.vstack([ind_px_ground_pts[1],ind_px_ground_pts[0]]).T
|
||||
|
||||
space_pts = []
|
||||
for pt in px_ground_pts:
|
||||
space_pts.append(cam.spaceFromImage(pt))
|
||||
space_pts = np.array(space_pts)
|
||||
|
||||
# resized_frame = cv2.resize(frame, (display_width, display_height))
|
||||
# cv2.imshow('Frame', resized_frame)
|
||||
# plt.figure()
|
||||
|
||||
#######################
|
||||
plt.scatter(space_pts[:,0], space_pts[:,1], color="red", s=2)
|
||||
plt.plot([28.569, 51.681],[26.665, 89.904], color='blue', linestyle='-', linewidth=1)
|
||||
# plt.axis("equal")
|
||||
plt.xlim([0, 100])
|
||||
plt.ylim([0, 150])
|
||||
plt.draw()
|
||||
plt.pause(0.0000000000001)
|
||||
plt.clf()
|
||||
######################
|
||||
|
||||
if cv2.waitKey(25) & 0xFF == ord('q'):break
|
||||
frame_nb = frame_nb + 1
|
||||
else:break
|
||||
|
||||
cap.release()
|
||||
cv2.destroyAllWindows()
|
@ -1,8 +1,6 @@
|
||||
import os
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import cv2
|
||||
import time
|
||||
|
||||
cap = cv2.VideoCapture('cams/new/cut2.mp4')
|
||||
folder_path = "track/expgood/labels/"
|
||||
@ -39,8 +37,6 @@ while cap.isOpened():
|
||||
for index, row in df.iterrows():
|
||||
class_id, center_x, center_y, bbox_width, bbox_height, object_id = row
|
||||
|
||||
|
||||
|
||||
center_x = int(center_x * width)
|
||||
center_y = int(center_y * height)
|
||||
bbox_width = int(bbox_width * width)
|
||||
@ -53,15 +49,25 @@ while cap.isOpened():
|
||||
|
||||
# (19;112) à (636;714) et (86;86) à (1087;715)
|
||||
if (((112-714)/(19-636)) * top_left_x + 112 - ((112-714)/(19-636)) *19 > top_left_y ) and (((86-715)/(86-1097)) * bottom_right_x + 112 - ((86-715)/(86-1097)) *86 < bottom_right_y ):
|
||||
cv2.rectangle(frame, (top_left_x, top_left_y), (bottom_right_x, bottom_right_y), vert, 2)
|
||||
|
||||
# cv2.rectangle(frame, (top_left_x, top_left_y), (bottom_right_x, bottom_right_y), vert, 2)
|
||||
|
||||
label = f'Class: {int(class_id)}, Object ID: {int(object_id)}'
|
||||
cv2.putText(frame, label, (top_left_x, top_left_y - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, vert, 1)
|
||||
else :
|
||||
cv2.rectangle(frame, (top_left_x, top_left_y), (bottom_right_x, bottom_right_y), bleu, 2)
|
||||
|
||||
label = f'Class: {int(class_id)}, Object ID: {int(object_id)}'
|
||||
cv2.putText(frame, label, (top_left_x, top_left_y - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, bleu, 1)
|
||||
# obetnir le centre du rectangle
|
||||
center_x = (top_left_x + bottom_right_x) // 2
|
||||
center_y = (top_left_y + bottom_right_y) // 2
|
||||
cv2.circle(frame, (center_x, center_y), 5, vert, -1)
|
||||
|
||||
|
||||
|
||||
else :
|
||||
pass
|
||||
# cv2.rectangle(frame, (top_left_x, top_left_y), (bottom_right_x, bottom_right_y), bleu, 2)
|
||||
|
||||
# label = f'Class: {int(class_id)}, Object ID: {int(object_id)}'
|
||||
# cv2.putText(frame, label, (top_left_x, top_left_y - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, bleu, 1)
|
||||
|
||||
resized_frame = cv2.resize(frame, (display_width, display_height))
|
||||
|
||||
|
155
track/extract_V2.py
Normal file
@ -0,0 +1,155 @@
|
||||
import cv2
|
||||
import numpy as np
|
||||
import argparse
|
||||
import time
|
||||
import os
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import Ellipse, Circle, Rectangle, Polygon, Arrow
|
||||
from matplotlib.lines import Line2D
|
||||
from matplotlib.collections import EllipseCollection, LineCollection
|
||||
import sys
|
||||
from scipy.optimize import least_squares
|
||||
from scipy.spatial import cKDTree
|
||||
from imageio import imread
|
||||
from matplotlib.offsetbox import TextArea, DrawingArea, OffsetImage, AnnotationBbox
|
||||
from shapely.geometry import Point
|
||||
import geopandas as gpd
|
||||
import cartopy
|
||||
import cartopy.crs as ccrs
|
||||
import cameratransform as ct
|
||||
import geodatasets
|
||||
|
||||
img = cv2.imread("track/Sylvain/stage_Noham/stage_Noham/image_vide_pts.png")
|
||||
nh,nw,_ = img.shape
|
||||
## img : b g r
|
||||
|
||||
mask = (img[:,:,0]==0)*(img[:,:,1]==0)*(img[:,:,2]==255)
|
||||
ind_px_ground_pts = np.where(mask)
|
||||
px_ground_pts = np.vstack([ind_px_ground_pts[1],ind_px_ground_pts[0]]).T
|
||||
|
||||
mask2 = (img[:,:,0]==255)*(img[:,:,1]==0)*(img[:,:,2]==0)
|
||||
ind_px_ground_pts2 = np.where(mask2)
|
||||
px_ground_pts2 = np.vstack([ind_px_ground_pts2[1],ind_px_ground_pts2[0]]).T
|
||||
|
||||
|
||||
img_pts = img.copy()
|
||||
for i,pt in enumerate(px_ground_pts):
|
||||
img_pts = cv2.circle(img_pts, pt, 1, (0,0,255), 1)
|
||||
txt = str(i)+": "+str(pt)
|
||||
img_pts = cv2.putText(img_pts, txt, pt, cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0,255,0), 1, cv2.LINE_AA)
|
||||
|
||||
distances = np.array([
|
||||
[ 0, 8, 37.1],
|
||||
[ 1, 7, 10.0],
|
||||
[ 2, 4, 6.8],
|
||||
[ 2, 5, 28.3],
|
||||
[ 2, 10, 17.7],
|
||||
[ 2, 12, 19.5],
|
||||
[ 3, 11, 20.4],
|
||||
[ 4, 7, 3.8],
|
||||
[ 5, 9, 9.1],
|
||||
[ 5, 13, 12.7],
|
||||
[ 6, 11, 11.9],
|
||||
[ 9, 10, 7.0],
|
||||
[ 9, 13, 9.2],
|
||||
[ 9, 15, 16.3],
|
||||
[10, 12, 5.3],
|
||||
[11, 16, 13.6],
|
||||
[14, 20, 16.1],
|
||||
[16, 20, 9.7],
|
||||
[17, 23, 18.4],
|
||||
[17, 25, 16.0],
|
||||
[18, 19, 11.6],
|
||||
[19, 20, 16.0],
|
||||
[19, 24, 8.6],
|
||||
[22, 23, 6.0],
|
||||
[22, 25, 3.8],
|
||||
[23, 24, 12.2]
|
||||
])
|
||||
for i,dd in enumerate(distances):
|
||||
pt1 = px_ground_pts[int(dd[0]),:]
|
||||
pt2 = px_ground_pts[int(dd[1]),:]
|
||||
img_pts = cv2.line(img_pts, pt1, pt2, (255,255,0), 2)
|
||||
|
||||
# cv2.imwrite("image_vide_pts_labels.png",img_pts)
|
||||
cv2.imshow("pts", img_pts)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
|
||||
|
||||
## parametres caméra pour initialiser la minimisation de la "cost" fonction
|
||||
f = 3.2 # en mm
|
||||
sensor_size = (6.17, 4.55) # en mm
|
||||
image_size = (nw,nh) # en px
|
||||
elevation = 10 # en m
|
||||
angle = 45 # inclinaison de la caméra. (0° : caméra orientée vers le bas, 90° : caméra orientée parallèlement au sol, 180° : caméra orientée vers le haut)
|
||||
heading_deg = 45 # la direction dans laquelle la caméra regarde. (0° : la caméra est orientée « nord », 90° : est, 180° : sud, 270° : ouest)
|
||||
roll_deg = 0 # rotation de l'image. (0°: camera image is not rotated (landscape format), 90°: camera image is in portrait format, 180°: camera is in upside down landscape format)
|
||||
|
||||
## Find camera parameters: [focal,sensorx,sensory,elevation,angle]
|
||||
def fct_cost(param):
|
||||
#print("cost param : ",param)
|
||||
f,sx,sy,e,a,b,c = param
|
||||
camloc = ct.Camera(
|
||||
ct.RectilinearProjection(
|
||||
focallength_mm=f,
|
||||
sensor=(sx,sy),
|
||||
image=image_size
|
||||
),
|
||||
ct.SpatialOrientation(
|
||||
elevation_m=e,
|
||||
tilt_deg=a,
|
||||
heading_deg=b,
|
||||
roll_deg=c
|
||||
)
|
||||
)
|
||||
pts = []
|
||||
for pt in px_ground_pts:
|
||||
gpt = camloc.spaceFromImage(pt)
|
||||
pts.append(gpt)
|
||||
pts = np.array(pts)
|
||||
cost = []
|
||||
for dd in distances:
|
||||
cost.append( np.linalg.norm( pts[int(dd[0]),:]-pts[int(dd[1]),:])-dd[2] )
|
||||
|
||||
return np.array(cost)
|
||||
|
||||
param = [f, sensor_size[0], sensor_size[1], elevation, angle, heading_deg , roll_deg]
|
||||
#cost = fct_cost(param)
|
||||
#print("cost =",cost)
|
||||
|
||||
res = least_squares(fct_cost, param)
|
||||
print(res)
|
||||
|
||||
|
||||
# initialize the camera
|
||||
cam = ct.Camera(ct.RectilinearProjection(focallength_mm=res.x[0],
|
||||
sensor=(res.x[1],res.x[2]),
|
||||
image=image_size),
|
||||
ct.SpatialOrientation(elevation_m=res.x[3],
|
||||
tilt_deg=res.x[4],
|
||||
heading_deg = res.x[5],
|
||||
roll_deg = res.x[6] )
|
||||
)
|
||||
|
||||
|
||||
space_pts = []
|
||||
for pt in px_ground_pts:
|
||||
space_pts.append(cam.spaceFromImage(pt))
|
||||
space_pts = np.array(space_pts)
|
||||
|
||||
space_pts2 = []
|
||||
for pt in px_ground_pts2:
|
||||
space_pts2.append(cam.spaceFromImage(pt))
|
||||
space_pts2 = np.array(space_pts2)
|
||||
#print("space_pts2 =", space_pts2)
|
||||
|
||||
plt.figure()
|
||||
plt.scatter(space_pts[:,0], space_pts[:,1], color="red", s=2)
|
||||
plt.scatter(space_pts2[:,0], space_pts2[:,1], color="blue", s=1)
|
||||
for dd in distances:
|
||||
plt.plot( [space_pts[int(dd[0]),0], space_pts[int(dd[1]),0]], [space_pts[int(dd[0]),1], space_pts[int(dd[1]),1]], color="green" )
|
||||
plt.axis("equal")
|
||||
|
||||
plt.show()
|