1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
// Copyright (c) 2012-2017 VideoStitch SAS
// Copyright (c) 2018 stitchEm
#pragma once
#include "core/rect.hpp"
#include "core/pyramid.hpp"
#include "gpu/buffer.hpp"
#include "gpu/stream.hpp"
#include "gpu/memcpy.hpp"
#include "gpu/uniqueBuffer.hpp"
#include "libvideostitch/status.hpp"
#include "libvideostitch/ptv.hpp"
#include "libvideostitch/matrix.hpp"
#include <vector>
namespace VideoStitch {
namespace Core {
class PanoDefinition;
class ImageMapping;
class StereoRigDefinition;
template <class T>
class LaplacianPyramid;
/**
* This class is used to pre-compute coordinate mappings.
* Mapping is constructed in a multi-resolution manner.
* It deals with 3 different coordinate spaces:
* 1) Pano space or Output space: the output space of the stitched panorama.
* 2) Intermediate space: the space used to calculate image flow.
* Intermediate space is constructed by rotating the original pole @oldCoord to the new pole @newCoord.
* It make sure that the two input buffer are mapped to the equator to minimize distortion.
* 3) Input space: the space used for capturing the input videos
*/
class MergerPair {
public:
/**
* This construction is used to construct coordinate mapping between an input image pair of certain size
* It is only used for debugging purpose when the input pair will "NOT" be transformed into the intermediate space
* @param boundingFirstLevelSize Bounding size of the first (largest) level.
* @param boundingLastLevelSize Bounding size of the last (smallest) level.
* @param width0 Width of the first image.
* @param height0 Height of the first image.
* @param offset0X Left coordinate of the first image.
* @param offset0Y Top coordinate the first image.
* @param width1 Width of the second image.
* @param height1 Height of the second image.
* @param offset1X Left coordinate of the first image.
* @param offset1Y Top coordinate the first image.
* @param useInterToPano This flag is turned off in debug mode, where no actually space transformation is done
*/
MergerPair(const int boundingFirstLevelSize, const int boundingLastLevelSize, const int width0, const int height0,
const int offset0X, const int offset0Y, const GPU::Buffer<const uint32_t>& buffer0, const int width1,
const int height1, const int offset1X, const int offset1Y, const GPU::Buffer<const uint32_t>& buffer1,
GPU::Stream stream);
/**
* Create a coordinate mapping between two image Ids from panoDef
* @param panoDef The pano definition.
* @param rigDef The rig definition.
* @param boundingFirstLevelSize Bounding size of the first (largest) level.
* @param boundingLastLevelSize Bounding size of the last (smallest) level.
* @param id0 First image id.
* @param id1 Second image id.
* @param inBoundingPanoRect0 Bounding rect of the first image in pano space. Taken from ImageMapping of @id0.
* @param inBoundingPanoRect1 Bounding rect of the second image in pano space. Taken from ImageMapping of @id1.
* @param stream Where to do the computations.
*/
static Potential<MergerPair> create(const PanoDefinition& panoDef, const StereoRigDefinition* rigDef,
const int boundingFirstLevelSize, const int boundingLastLevelSize,
const std::vector<videoreaderid_t>& id0s, const videoreaderid_t id1,
const Rect& inBoundingPanoRect0, const Rect& inBoundingPanoRect1,
GPU::Stream stream);
private:
MergerPair(const int boundingFirstLevelSize, const int boundingLastLevelSize,
const std::vector<videoreaderid_t>& id0s, const videoreaderid_t id1);
public:
const Rect getBoundingInterRect(const int index, const int level) const;
const LaplacianPyramid<float2>* getInterToInputSpaceCoordMappingLaplacianPyramid(const int index) const;
int getWrapWidth() const;
int getWrapHeight() const;
bool doesOverlap() const;
std::vector<Rect> getBoundingInterRect1s() const;
GPU::Buffer<float2> getInterToLookupSpaceCoordMappingBufferLevel(const int index, const int level) const;
const Rect getBoundingPanoRect(const int index) const;
int2 getInput1Size() const;
GPU::Buffer<float2> getPanoToInputSpaceCoordMapping(const int index) const;
GPU::Buffer<float2> getPanoToInterSpaceCoordMapping(const int index) const;
Rect getBoundingPanosIRect() const;
#ifndef VS_OPENCL
/**
* Get average spherical coordinate of the input image pair.
* @param panoDef The pano definition.
* @param id0 First image id.
* @param id1 Second image id.
*/
static Vector3<double> getAverageSphericalCoord(const PanoDefinition& panoDef, const videoreaderid_t id0,
const videoreaderid_t id1);
static Vector3<double> getAverageSphericalCoord(
const PanoDefinition& panoDef, const std::vector<videoreaderid_t>& id0s, const std::vector<videoreaderid_t>& id1s,
const Rect& boundingPanoRect0, const GPU::Buffer<const float2>& panoToInputSpaceCoordMapping0,
const GPU::Buffer<const uint32_t>& maskBuffer0, const Rect& boundingPanoRect1,
const GPU::Buffer<const float2>& panoToInputSpaceCoordMapping1, const GPU::Buffer<const uint32_t>& maskBuffer1);
/**
* Set up a mask in the output space where (pixel value at x & (1 << id)) > 0
* indicates that image id is projected to x in the output space
*/
Status setupPairMappingMask(GPU::Buffer<uint32_t> devMask, GPU::Stream gpuStream) const;
Status debugMergerPair(const int2 panoSize, const GPU::Buffer<const uint32_t> panoBuffer, const int2 bufferSize0,
const GPU::Buffer<const uint32_t> buffer0, const int2 bufferSize1,
const GPU::Buffer<const uint32_t> buffer1, GPU::Stream gpuStream) const;
/**
* Find coordinate mapping from one space (either pano or intermediate) to input space.
* @param panoDef The pano definition.
* @param rigDef The rig definition.
* @param id Buffer Id.
* @param oldCoord The original pole.
* @param newCoord The new pole.
* @param toInputMapping The output coordinate mapping buffer.
* @param weight The output weight buffer, set to 1 only at valid pixels.
* @param boundingRect The output bounding rect of @weight (and @toInputMapping).
* Use the input rect if @usePassedBoundingRect is true.
* @param stream CUDA stream for the operation.
* @param usePassedBoundingRect Specify whether @boundingRect is used as input or output.
*/
static Status findMappingToInputSpace(const PanoDefinition& panoDef, const StereoRigDefinition* rigDef,
const std::vector<videoreaderid_t>& ids, const Vector3<double>& oldCoord,
const Vector3<double>& newCoord, GPU::UniqueBuffer<float2>& toInputMapping,
GPU::UniqueBuffer<uint32_t>& weight, Rect& boundingRect, GPU::Stream stream,
const bool usePassedBoundingRect = false);
std::string getImIdString(const int index) const;
bool UseInterToPano() const;
private:
Status init(const PanoDefinition& panoDef, const StereoRigDefinition* rigDef, const Rect& inBoundingPanoRect0,
const Rect& inBoundingPanoRect1, GPU::Stream stream);
/**
* Find coordinate mapping from pano space to intermediate space.
* Intermediate space is constructed by rotating the original pole @oldCoord to the new pole @newCoord.
* @param panoDef The pano definition.
* @param downRatio The bounding rect of the first buffer.
* @param id Buffer Id.
* @param oldCoord The original pole.
* @param newCoord The new pole.
* @param panoToInputSpaceCoordMapping Buffer that contain coordinate mapping from pano to input space.
* @param boundingPanoRect The bounding rect of @panoToInterSpaceCoordMapping in pano space.
* @param panoToInterSpaceCoordMapping The output mapping from pano to intermediate space.
* @param stream CUDA stream for the operation.
*/
static Status findMappingFromPanoToInterSpace(const PanoDefinition& panoDef, const float downRatio,
const std::vector<videoreaderid_t>& ids,
const Vector3<double>& oldCoord, const Vector3<double>& newCoord,
const GPU::Buffer<const float2>& panoToInputSpaceCoordMapping,
const GPU::Buffer<const uint32_t>& panoToInputSpaceMask,
const Rect& boundingPanoRect,
GPU::UniqueBuffer<float2>& panoToInterSpaceCoordMapping,
GPU::Stream stream);
static Status findMappingFromInterToPanoSpace(const PanoDefinition& panoDef, const std::vector<videoreaderid_t>& ids,
const GPU::Buffer<const float2>& interToInputSpaceCoordMapping,
const GPU::Buffer<const uint32_t>& interToInputSpaceMask,
const Rect& boundingInterRect,
GPU::UniqueBuffer<float2>& interToPanoSpaceCoordMapping,
GPU::Stream stream);
/**
* Given the input buffer pair, calculate pyramid info,
* so that the first and last level follow the bounding size of @boundingFirstLevelSize and @boundingLastLevelSize
* @param downRatio The level of down sampling.
* @param coord0Mapping Coordinate mapping of the first buffer. Store as the down-grade results as well
* @param weight0 Weight of the first buffer. Store as the down-grade result as well.
* @param boundingRect0 The output bounding box of the first buffer.
* @param coord1Mapping Coordinate mapping of the second buffer. Store as the down-grade results as well
* @param weight1 Weight of the second buffer. Store as the down-grade result as well.
* @param boundingRect1 The output bounding box of the second buffer.
* @param stream CUDA stream for the operation.
*/
Status calculateLaplacianPyramidsInfo(float& downRatio, GPU::UniqueBuffer<float2>& coord0Mapping,
GPU::UniqueBuffer<uint32_t>& weight0, Rect& boundingRect0,
GPU::UniqueBuffer<float2>& coord1Mapping, GPU::UniqueBuffer<uint32_t>& weight1,
Rect& boundingRect1, GPU::Stream stream);
/**
* Build the pyramid to map intermediate space to input space.
* @param downRatio The level of down sampling.
* @param coord0Mapping Coordinate mapping of the first buffer. Store as the down-grade results as well
* @param weight0 Weight of the first buffer. Store as the down-grade result as well.
* @param boundingRect0 The output bounding box of the first buffer.
* @param coord1Mapping Coordinate mapping of the second buffer. Store as the down-grade results as well
* @param weight1 Weight of the second buffer. Store as the down-grade result as well.
* @param boundingRect1 The output bounding box of the second buffer.
* @param stream CUDA stream for the operation.
*/
Status buildLaplacianPyramids(const PanoDefinition& panoDef, float& downRatio,
GPU::UniqueBuffer<float2>& coord0Mapping, GPU::UniqueBuffer<uint32_t>& weight0,
Rect& boundingRect0, GPU::UniqueBuffer<float2>& coord1Mapping,
GPU::UniqueBuffer<uint32_t>& weight1, Rect& boundingRect1, GPU::Stream stream);
static Status packCoordBuffer(const int warpWidth, const Core::Rect& inputRect,
const GPU::Buffer<const float2>& inputBuffer,
const GPU::Buffer<const uint32_t>& inputWeight, const Core::Rect& outputRect,
GPU::Buffer<float2> outputBuffer, GPU::Buffer<uint32_t> outputWeight,
GPU::Stream gpuStream);
#endif
private:
/**
* Need a mapping from inter space to pano space to read result from previous warp step
*/
// A mapping from the intermediate space to the pano space
std::unique_ptr<LaplacianPyramid<float2>> interToPanoSpaceCoordMappingLaplacianPyramid0;
std::unique_ptr<LaplacianPyramid<uint32_t>> interToPanoSpaceWeightLaplacianPyramid0;
std::vector<Rect> boundingInterToPanoRect0s;
/**
* Intermediate to input space pyramid
*/
std::unique_ptr<LaplacianPyramid<float2>> interToInputSpaceCoordMappingLaplacianPyramid0;
std::unique_ptr<LaplacianPyramid<uint32_t>> interToInputSpaceWeightLaplacianPyramid0;
std::vector<Rect> boundingInterRect0s;
std::unique_ptr<LaplacianPyramid<float2>> interToInputSpaceCoordMappingLaplacianPyramid1;
std::unique_ptr<LaplacianPyramid<uint32_t>> interToInputSpaceWeightLaplacianPyramid1;
std::vector<Rect> boundingInterRect1s;
const std::vector<videoreaderid_t> id0s;
const videoreaderid_t id1;
const float extendedRatio;
const bool overlappedAreaOnly;
const int boundingFirstLevelSize;
const int boundingLastLevelSize;
uint64_t wrapWidth;
uint64_t wrapHeight;
int2 input1Size;
GPU::UniqueBuffer<float2> panoToInputSpaceCoordMapping0;
GPU::UniqueBuffer<float2> panoToInterSpaceCoordMapping0;
Rect boundingPanoRect0;
GPU::UniqueBuffer<float2> panoToInputSpaceCoordMapping1;
GPU::UniqueBuffer<float2> panoToInterSpaceCoordMapping1;
GPU::UniqueBuffer<float2> interToInputCoord;
Rect boundingPanoRect1;
GPU::UniqueBuffer<float2> inputToPanoCoordMapping0;
GPU::UniqueBuffer<float2> panoToInputCoordMapping0;
const bool useInterToPano;
};
} // namespace Core
} // namespace VideoStitch