Commit 12fdbbcf authored by Andy Regensky's avatar Andy Regensky
Browse files

Initial commit

parents
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
from PySide2.QtCore import QObject, Signal, Slot
import numpy as np
from fishui import projections
from fishui import coordinate_conversion
class MainViewModel(QObject):
didChange = Signal()
def __init__(self, parent=None):
super().__init__(parent)
self.sensor_size_mm = (5.2, 5.2)
self.sensor_size_px = (1088, 1088)
self.focal_length_mm = 1.8
self.origin = (256, 256)
self.blocksize = (64, 64)
self.affineModel = False
self.motion_vector_cp0 = (0, 0)
self.motion_vector_cp1 = (0, 0)
self.perspective_scale = 2
# Values used for drawing
self.samples_per_edge = 100
self.projection = None
self.perspective = None
self.block_xf = None
self.block_yf = None
self.block_xp = None
self.block_yp = None
self.block_xpm = None
self.block_ypm = None
self.block_xfm = None
self.block_yfm = None
self.block_ctr_px = None
self.block_ctr_py = None
self.block_ctr_pxm = None
self.block_ctr_pym = None
self.uwc_sign = None
self.updateProjections()
def setSensorSize(self, sensor_size):
self.sensor_size_mm = sensor_size
self.updateProjections()
def setSensorResolution(self, resolution):
self.sensor_size_px = resolution
self.updateProjections()
def setFocalLength(self, focal_length_mm):
self.focal_length_mm = focal_length_mm
self.updateProjections()
def setOrigin(self, origin):
self.origin = origin
self.updateProjections()
def setBlocksize(self, blocksize):
self.blocksize = blocksize
self.updateProjections()
def setAffineModel(self, enabled):
if not self.affineModel and enabled:
self.motion_vector_cp1 = self.motion_vector_cp0
self.affineModel = enabled
self.updateProjections()
def setMotionVectorCP0(self, motion_vector):
self.motion_vector_cp0 = motion_vector
self.updateProjections()
def setMotionVectorCP1(self, motion_vector):
self.motion_vector_cp1 = motion_vector
self.updateProjections()
def setPerspectiveScale(self, scale):
self.perspective_scale = scale
self.updateProjections()
def updateProjections(self):
self.projection = projections.EquisolidProjection.init_with(self.sensor_size_mm, self.sensor_size_px, self.focal_length_mm, fov=185)
self.perspective = projections.PerspectiveProjection(self.projection.focal_length_px, fov=185)
self.blockSignals(True)
self.updateFisheyeBlockpoints()
self.updatePerspectiveBlockpoints()
self.updatePerspectiveMovedBlockpoints()
self.updateReprojectedBlockpoints()
self.blockSignals(False)
self.didChange.emit()
def updateFisheyeBlockpoints(self):
block_x = np.zeros(4 * self.samples_per_edge)
block_y = np.zeros(4 * self.samples_per_edge)
# Top edge.
block_x[0:self.samples_per_edge] = np.linspace(self.origin[0], self.origin[0] + self.blocksize[0], self.samples_per_edge)
block_y[0:self.samples_per_edge] = self.origin[1]
# Right edge.
block_x[self.samples_per_edge:2 * self.samples_per_edge] = self.origin[0] + self.blocksize[0]
block_y[self.samples_per_edge:2 * self.samples_per_edge] = np.linspace(self.origin[1], self.origin[1] + self.blocksize[1], self.samples_per_edge)
# Bottom edge.
block_x[2 * self.samples_per_edge:3 * self.samples_per_edge] = np.linspace(self.origin[0] + self.blocksize[0], self.origin[0],
self.samples_per_edge)
block_y[2 * self.samples_per_edge:3 * self.samples_per_edge] = self.origin[1] + self.blocksize[1]
# Left edge.
block_x[3 * self.samples_per_edge:4 * self.samples_per_edge] = self.origin[0]
block_y[3 * self.samples_per_edge:4 * self.samples_per_edge] = np.linspace(self.origin[1] + self.blocksize[1], self.origin[1],
self.samples_per_edge)
self.block_xf = block_x
self.block_yf = block_y
self.didChange.emit()
def updatePerspectiveBlockpoints(self):
center = np.asarray(self.sensor_size_px)/2
block_yfc = self.block_yf - center[0]
block_xfc = self.block_xf - center[1]
r_f, phi = coordinate_conversion.cartesian_to_polar(block_yfc, block_xfc)
r_p = self.projection.transform_to(self.perspective, r_f)
self.uwc_sign = np.sign(r_p)
block_ypc, block_xpc = coordinate_conversion.polar_to_cartesian(r_p, phi)
self.block_yp = block_ypc
self.block_xp = block_xpc
self.didChange.emit()
def updatePerspectiveMovedBlockpoints(self):
if self.affineModel:
self.updateControlBlockpoints()
self.updatePerspectiveMovedBlockpointsAffine()
else:
self.block_ypm = self.block_yp + self.motion_vector_cp0[1] * self.uwc_sign
self.block_xpm = self.block_xp + self.motion_vector_cp0[0] * self.uwc_sign
self.didChange.emit()
def updateReprojectedBlockpoints(self):
r_pm, phi_m = coordinate_conversion.cartesian_to_polar(self.block_ypm, self.block_xpm)
phi_m = phi_m - (self.uwc_sign < 0) * np.pi
r_fm = self.perspective.transform_to(self.projection, r_pm)
r_fm = r_fm + (self.uwc_sign < 0) * 2 * (self.projection.radius(np.deg2rad(90)) - r_fm)
block_yfm, block_xfm = coordinate_conversion.polar_to_cartesian(r_fm, phi_m)
center = np.asarray(self.sensor_size_px) / 2
self.block_yfm = block_yfm + center[0]
self.block_xfm = block_xfm + center[1]
self.didChange.emit()
def updateControlBlockpoints(self):
center = np.asarray(self.sensor_size_px) / 2
block_center = np.asarray(self.origin) + (np.asarray(self.blocksize) - 1) / 2
r_f, phi = coordinate_conversion.cartesian_to_polar(block_center[1] - center[0], block_center[0] - center[1])
r_p = self.projection.transform_to(self.perspective, r_f)
cp0_y, cp0_x = coordinate_conversion.polar_to_cartesian(r_p, phi)
cp0_y = cp0_y - self.blocksize[1] / 2
cp0_x = cp0_x - self.blocksize[0] / 2
self.block_ctr_px = np.array([cp0_x, cp0_x + self.blocksize[0], cp0_x + self.blocksize[0], cp0_x])
self.block_ctr_py = np.array([cp0_y, cp0_y, cp0_y + self.blocksize[1], cp0_y + self.blocksize[1]])
self.didChange.emit()
def updatePerspectiveMovedBlockpointsAffine(self):
cp0_position = (self.block_ctr_py[0], self.block_ctr_px[0])
cps_motion_vector = np.array([self.motion_vector_cp0[0], self.motion_vector_cp1[0],
self.motion_vector_cp0[1], self.motion_vector_cp1[1]])
block_yp_cp0 = self.block_yp - cp0_position[0]
block_xp_cp0 = self.block_xp - cp0_position[1]
block_ypm = np.empty_like(self.block_yp)
block_xpm = np.empty_like(self.block_xp)
for i in range(len(self.block_xp)):
m = block_yp_cp0[i] / (self.blocksize[0] - 1)
n = block_xp_cp0[i] / (self.blocksize[1] - 1)
affine_matrix = np.array([[-m, m, 1 - n, n],
[1 - n, n, m, -m]])
mv = affine_matrix.dot(cps_motion_vector)
block_ypm[i] = self.block_yp[i] + mv[0]
block_xpm[i] = self.block_xp[i] + mv[1]
block_ctr_pxm = np.empty(4)
block_ctr_pym = np.empty(4)
for i in range(4):
m = (self.block_ctr_py[i] - cp0_position[0]) / (self.blocksize[0] - 1)
n = (self.block_ctr_px[i] - cp0_position[1]) / (self.blocksize[1] - 1)
affine_matrix = np.array([[-m, m, 1 - n, n],
[1 - n, n, m, -m]])
mv = affine_matrix.dot(cps_motion_vector)
block_ctr_pym[i] = self.block_ctr_py[i] + mv[0]
block_ctr_pxm[i] = self.block_ctr_px[i] + mv[1]
self.block_ypm = block_ypm
self.block_xpm = block_xpm
self.block_ctr_pym = block_ctr_pym
self.block_ctr_pxm = block_ctr_pxm
self.didChange.emit()
# Preview
if __name__ == '__main__':
@Slot()
def didChange():
print("Did change")
viewModel = MainViewModel()
viewModel.didChange.connect(didChange)
viewModel.setBlocksize((64, 64))
viewModel.setOrigin((256, 256))
from PySide2.QtWidgets import *
import fishui.widgets as widgets
from fishui.MainViewModel import MainViewModel
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.viewModel = MainViewModel(self)
self.propertiesWidget = widgets.PropertiesWidget(self, self.viewModel)
self.distortionWidget = widgets.DistortionVisualizationWidget(self, self.viewModel)
# layout
splitter = QSplitter()
splitter.addWidget(self.propertiesWidget)
splitter.addWidget(self.distortionWidget)
splitter.setHandleWidth(2)
splitter.setChildrenCollapsible(False)
splitter.setStyleSheet("QSplitter:handle { background-color: gray; }")
# widget = QWidget()
# widget.setLayout(layout)
self.setCentralWidget(splitter)
self.setFocus()
self.show()
import numpy as np
from typing import Union, Tuple
def cartesian_to_polar(y: Union[float, np.ndarray], x: Union[float, np.ndarray]) -> Tuple[Union[float, np.ndarray], Union[float, np.ndarray]]:
"""Convert cartesian coordinates [y, x] to polar coordinates [r, phi]."""
r = np.sqrt(np.square(y) + np.square(x))
phi = np.arctan2(y, x)
return r, phi
def polar_to_cartesian(r: Union[float, np.ndarray], phi: Union[float, np.ndarray]) -> Tuple[Union[float, np.ndarray], Union[float, np.ndarray]]:
"""Convert polar coordinates [r, phi] to cartesian coordinates [y, x]."""
y = r * np.sin(phi)
x = r * np.cos(phi)
return y, x
from typing import Union, Tuple
import warnings
import numpy as np
from .Projection import Projection
class CalibratedProjection(Projection):
"""
Defines a calibrated projection model.
"""
def __init__(self,
polynom_coeffs: Union[np.ndarray, Tuple],
sensor_size_mm: Tuple[float, float],
sensor_size_px: Tuple[int, int],
focal_length_mm: float,
fov: float):
"""
Initialize a new calibrated projection.
WE OVERRIDE INIT DESPITE ADVISING AGAINST IT IN Projection.py
:param polynom_coeffs: coefficients in decreasing powers of the
polynomial relating incident angle to sensor radius in mm
:param sensor_size_mm: sensor size in mm
:param sensor_size_px: sensor size in pixels
:param focal_length_mm: focal length in mm
:param fov: field of view in degrees
"""
if not np.isclose(sensor_size_px[0]/sensor_size_mm[0], sensor_size_px[1]/sensor_size_mm[1]):
raise Exception("Pixel shape must be square.")
self._px_per_mm = sensor_size_px[0]/sensor_size_mm[0]
focal_length_px = self._px_per_mm * focal_length_mm
super().__init__(focal_length_px, fov)
# Get forward polynomial directly from calibrated coefficients.
self._forward_poly = np.poly1d(polynom_coeffs)
# Get backward polynomial by matching a polynomial inversely to the
# results of the forward polynomial.
theta_help = np.linspace(0, np.deg2rad(fov/2), 10000)
radius_help = self._forward_poly(theta_help)
warnings.filterwarnings("ignore", category=np.RankWarning) # Filter rank warnings.
backward_coeffs = np.polyfit(radius_help, theta_help, (len(polynom_coeffs) - 1) * 2)
del warnings.filters[0] # Remove rank warnings filter.
self._backward_poly = np.poly1d(backward_coeffs)
def radius(self, theta: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return self._px_per_mm * self._forward_poly(theta)
def theta(self, radius: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return self._backward_poly(radius / self._px_per_mm)
from .Projection import Projection
from typing import Union, Tuple
import numpy as np
class EquidistantProjection(Projection):
"""
Defines the equidistant projection model.
"""
@classmethod
def init_with(cls, sensor_size_mm: Tuple[float, float], sensor_size_px: Tuple[int, int], focal_length_mm: float, fov: float):
"""
Specify new equidistant projection model with
:param sensor_size_mm: sensor size in mm [height, width]
:param sensor_size_px: sensor size in pixels [height, width]
:param focal_length_mm: focal length in mm
:param fov: field of view in degrees
"""
if sensor_size_px[0]/sensor_size_mm[0] != sensor_size_px[1]/sensor_size_mm[1]:
raise Exception("Pixel shape must be square.")
focal_length_px = sensor_size_px[0]/sensor_size_mm[0] * focal_length_mm
return cls(focal_length_px, fov)
def radius(self, theta: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return self.focal_length_px * theta
def theta(self, radius: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return radius / self.focal_length_px
from .Projection import Projection
from typing import Union, Tuple
import numpy as np
class EquisolidProjection(Projection):
"""
Defines the equisolid projection model.
"""
@classmethod
def init_with(cls, sensor_size_mm: Tuple[float, float], sensor_size_px: Tuple[int, int], focal_length_mm: float, fov: float):
"""
Specify new equisolid projection model with
:param sensor_size_mm: sensor size in mm [height, width]
:param sensor_size_px: sensor size in pixels [height, width]
:param focal_length_mm: focal length in mm
:param fov: field of view in degrees
"""
if sensor_size_px[0]/sensor_size_mm[0] != sensor_size_px[1]/sensor_size_mm[1]:
raise Exception("Pixel shape must be square.")
focal_length_px = sensor_size_px[0]/sensor_size_mm[0] * focal_length_mm
return cls(focal_length_px, fov)
def radius(self, theta: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return 2 * self.focal_length_px * np.sin(theta / 2)
def theta(self, radius: Union[float, np.ndarray]) -> Union[float, np.ndarray]:
return 2 * np.arcsin(radius / (2 * self.focal_length_px))