CARLA
 
载入中...
搜索中...
未找到
DVSCamera.cpp
浏览该文件的文档.
1// Copyright (c) 2020 Robotics and Perception Group (GPR)
2// University of Zurich and ETH Zurich
3//
4// This work is licensed under the terms of the MIT license.
5// For a copy, see <https://opensource.org/licenses/MIT>.
6
7
8#include <random>
9#include <cmath>
10#include <algorithm>
11
12#include "Carla.h"
16
18#include "carla/ros2/ROS2.h"
19#include <carla/Buffer.h>
20#include <carla/BufferView.h>
22
23// RGB图像的灰度值:I = 0.2989*R + 0.5870*G + 0.1140*B
24static float FColorToGrayScaleFloat(FColor Color)
25{
26 return 0.2989 * Color.R + 0.587 * Color.G + 0.114 * Color.B;
27}
28
29ADVSCamera::ADVSCamera(const FObjectInitializer &ObjectInitializer)
30 : Super(ObjectInitializer)
31{
34 TEXT("Material'/Carla/PostProcessingMaterials/PhysicLensDistortion.PhysicLensDistortion'"));
35
36 RandomEngine = CreateDefaultSubobject<URandomEngine>(TEXT("RandomEngine"));
37}
38
39// 获得动态视觉传感器的定义
40// 文档:https://openhutb.github.io/carla_doc/ref_sensors/#dvs-camera
42{
43 constexpr bool bEnableModifyingPostProcessEffects = true;
44 auto Definition = UActorBlueprintFunctionLibrary::MakeCameraDefinition(TEXT("dvs"), bEnableModifyingPostProcessEffects);
45
47 Cp.Id = TEXT("positive_threshold"); // 与亮度变化增量相关的正阈值 C,范围为 (0-1)。
48 Cp.Type = EActorAttributeType::Float; // 值的类型
49 Cp.RecommendedValues = { TEXT("0.3") }; // 默认值
50 Cp.bRestrictToRecommended = false;
51
53 Cm.Id = TEXT("negative_threshold"); // 与亮度变化减少相关的负阈值 C,范围为(0-1)。
55 Cm.RecommendedValues = { TEXT("0.3") };
56 Cm.bRestrictToRecommended = false;
57
58 FActorVariation Sigma_Cp;
59 Sigma_Cp.Id = TEXT("sigma_positive_threshold"); // 正事件的白噪声标准差,范围为 (0-1)。
61 Sigma_Cp.RecommendedValues = { TEXT("0.0") };
62 Sigma_Cp.bRestrictToRecommended = false;
63
64 FActorVariation Sigma_Cm;
65 Sigma_Cm.Id = TEXT("sigma_negative_threshold"); // 负事件的白噪声标准差,范围为 (0-1)。
67 Sigma_Cm.RecommendedValues = { TEXT("0.0") };
68 Sigma_Cm.bRestrictToRecommended = false;
69
70 FActorVariation Refractory_Period;
71 Refractory_Period.Id = TEXT("refractory_period_ns"); // 不应期(像素在触发事件后无法触发事件的时间),以纳秒为单位。它限制了触发事件的最高频率。
72 Refractory_Period.Type = EActorAttributeType::Int;
73 Refractory_Period.RecommendedValues = { TEXT("0") };
74 Refractory_Period.bRestrictToRecommended = false;
75
76 FActorVariation Use_Log;
77 Use_Log.Id = TEXT("use_log"); // 是否以对数强度刻度工作
79 Use_Log.RecommendedValues = { TEXT("True") };
80 Use_Log.bRestrictToRecommended = false;
81
82 FActorVariation Log_EPS;
83 Log_EPS.Id = TEXT("log_eps"); // 用于将图像转换为日志的 Epsilon 值: L = log(eps + I / 255.0)
85 Log_EPS.RecommendedValues = { TEXT("0.001") };
86 Log_EPS.bRestrictToRecommended = false;
87
88 Definition.Variations.Append({ Cp, Cm, Sigma_Cp, Sigma_Cm, Refractory_Period, Use_Log, Log_EPS });
89
90 return Definition;
91}
92
93void ADVSCamera::Set(const FActorDescription &Description)
94{
95 Super::Set(Description);
96
98 "positive_threshold",
99 Description.Variations,
100 0.5f);
101
103 "negative_threshold",
104 Description.Variations,
105 0.5f);
106
108 "sigma_positive_threshold",
109 Description.Variations,
110 0.0f);
111
113 "sigma_negative_threshold",
114 Description.Variations,
115 0.0f);
116
118 "refractory_period_ns",
119 Description.Variations,
120 0.0);
121
123 "use_log",
124 Description.Variations,
125 true);
126
128 "log_eps",
129 Description.Variations,
130 1e-03);
131}
132
133// 物理节拍信号后处理
134void ADVSCamera::PostPhysTick(UWorld *World, ELevelTick TickType, float DeltaTime)
135{
136 TRACE_CPUPROFILER_EVENT_SCOPE(ADVSCamera::PostPhysTick);
137 check(CaptureRenderTarget != nullptr);
138 if (!HasActorBegunPlay() || IsPendingKill())
139 {
140 return;
141 }
142
143 /// 在当前时刻,场景的立刻入队渲染命令。
146
147 //Super (ASceneCaptureSensor) Capture the Scene in a (UTextureRenderTarget2D) CaptureRenderTarge from the CaptureComponent2D
148 /** 读取图像 **/
149 TArray<FColor> RawImage;
150 this->ReadPixels(RawImage);
151
152 /** 将图像转换为灰度图 **/
153 if (this->config.use_log)
154 {
155 this->ImageToLogGray(RawImage);
156 }
157 else
158 {
159 this->ImageToGray(RawImage);
160 }
161
162 /** 动态视觉传感器仿真器 **/
163 ADVSCamera::DVSEventArray events = this->Simulation(DeltaTime);
164
165 auto Stream = GetDataStream(*this); // 获得数据流
166 auto Buff = Stream.PopBufferFromPool(); // 从内存池中获取一个内存缓冲,用于存数据
167
168 // 序列化数据:将动态视觉传感器仿真器中的时间 移动到 缓冲区中(std::move 本身并不移动任何东西;它只是将其参数转换为右值引用)
169 carla::Buffer BufferReady(carla::sensor::SensorRegistry::Serialize(*this, events, std::move(Buff)));
170 carla::SharedBufferView BufView = carla::BufferView::CreateFrom(std::move(BufferReady));
171
172 // ROS2
173 #if defined(WITH_ROS2)
174 auto ROS2 = carla::ros2::ROS2::GetInstance();
175 if (ROS2->IsEnabled())
176 {
177 TRACE_CPUPROFILER_EVENT_SCOPE_STR("ROS2 Send");
179 {
180 // 获得相机分辨率
181 int W = -1, H = -1;
182 float Fov = -1.0f;
183 auto WidthOpt = GetAttribute("image_size_x");
184 if (WidthOpt.has_value())
185 W = FCString::Atoi(*WidthOpt->Value);
186 auto HeightOpt = GetAttribute("image_size_y");
187 if (HeightOpt.has_value())
188 H = FCString::Atoi(*HeightOpt->Value);
189 auto FovOpt = GetAttribute("fov");
190 if (FovOpt.has_value())
191 Fov = FCString::Atof(*FovOpt->Value);
192 AActor* ParentActor = GetAttachParentActor();
193 if (ParentActor)
194 {
195 FTransform LocalTransformRelativeToParent = GetActorTransform().GetRelativeTransform(ParentActor->GetActorTransform());
196 ROS2->ProcessDataFromDVS(Stream.GetSensorType(), StreamId, LocalTransformRelativeToParent, BufView, W, H, Fov, this);
197 }
198 else
199 {
200 ROS2->ProcessDataFromDVS(Stream.GetSensorType(), StreamId, Stream.GetSensorTransform(), BufView, W, H, Fov, this);
201 }
202 }
203 }
204 #endif
205 if (events.size() > 0)
206 {
207 TRACE_CPUPROFILER_EVENT_SCOPE_STR("ADVSCamera Stream Send");
208 /** 发送事件 **/
209 Stream.Send(*this, BufView);
210 }
211}
212
213// 图像转为灰度图
214void ADVSCamera::ImageToGray(const TArray<FColor> &image)
215{
216 /** 合理性检查 **/
217 if (image.Num() != (this->GetImageHeight() * this->GetImageWidth()))
218 return;
219
220 /** 保存 HxW 个元素 **/
221 last_image.SetNumUninitialized(image.Num());
222
223 /** 将图像转化为灰度原始图像值 **/
224 for (size_t i = 0; i < image.Num(); ++i)
225 {
226 last_image[i] = FColorToGrayScaleFloat(image[i]);
227 }
228}
229
230// 将图像转化为 对数灰度图
231void ADVSCamera::ImageToLogGray(const TArray<FColor> &image)
232{
233 /** 合理性检查 **/
234 if (image.Num() != (this->GetImageHeight() * this->GetImageWidth()))
235 return;
236
237 /** 保存 HxW 个元素 **/
238 last_image.SetNumUninitialized(image.Num()); // 最近(当前)的图像 vs 先前的图像:动态视觉传感器是计算两者的变化
239
240 /** 将图像转化为灰度原始图像值 **/
241 for (size_t i = 0; i < image.Num(); ++i)
242 {
243 // L = log(eps + I / 255.0)
244 // log_eps = 1e-03
245 last_image[i] = std::log(this->config.log_eps + (FColorToGrayScaleFloat(image[i]) / 255.0));
246 }
247}
248
249// 执行仿真
251{
252 /** 事件数组 **/
254
255 /** 合理性检查 **/
256 if (this->last_image.Num() != (this->GetImageHeight() * this->GetImageWidth()))
257 return events;
258
259 /** 检查初始化 **/
260 if(this->prev_image.Num() == 0)
261 {
262 /** 设置第一个渲染的图像 **/
263 this->ref_values = this->last_image; // 参考值 <- 最新图像
264 this->prev_image = this->last_image; // 先前图像 <- 最新图像
265
266 /** 将数组大小调整为给定的元素数量。新元素将被归零。 **/
267 this->last_event_timestamp.SetNumZeroed(this->last_image.Num());
268
269 /** 重置当前时间 **/
270 this->current_time = dvs::secToNanosec(this->GetEpisode().GetElapsedGameTime());
271
272 return events;
273 }
274
275 static constexpr float tolerance = 1e-6; // 判断前后图像像素亮度是否发生变化的阈值
276
277 /** 以纳秒表示的时间增量 **/
278 const std::uint64_t delta_t_ns = dvs::secToNanosec(
279 this->GetEpisode().GetElapsedGameTime()) - this->current_time;
280
281 /** 沿着图像大小循环 **/
282 for (uint32 y = 0; y < this->GetImageHeight(); ++y)
283 {
284 for (uint32 x = 0; x < this->GetImageWidth(); ++x)
285 {
286 const uint32 i = (this->GetImageWidth() * y) + x; // 将2维中的索引转换为1维中的索引
287 const float itdt = this->last_image[i]; // 先前图像过了时间增量dt后的图像(即最新的图像)在索引为i位置的像素值
288 const float it = this->prev_image[i]; // 先前的图像 在索引为i的像素值
289 const float prev_cross = this->ref_values[i];
290
291 if (std::fabs (it - itdt) > tolerance) // 如果前后的像素亮度变化超过阈值
292 {
293 // 根据像素亮度变化的符号来判断事件的极性(polarity)。
294 // `+1`当亮度增加时极性为正,`-1`当亮度减少时极性为负。
295 const float pol = (itdt >= it) ? +1.0 : -1.0;
296 float C = (pol > 0) ? this->config.Cp : this->config.Cm; // Cp正事件(positive),Cm负事件。对比度门限值(C,contrast threshold)
297 const float sigma_C = (pol > 0) ? this->config.sigma_Cp : this->config.sigma_Cm;
298
299 if(sigma_C > 0)
300 {
301 C += RandomEngine->GetNormalDistribution(0, sigma_C);
302 constexpr float minimum_contrast_threshold = 0.01;
303 C = std::max(minimum_contrast_threshold, C); // 返回两个值中的最大值
304 }
305 float curr_cross = prev_cross;
306 bool all_crossings = false;
307
308 do
309 {
310 curr_cross += pol * C;
311
312 if ((pol > 0 && curr_cross > it && curr_cross <= itdt)
313 || (pol < 0 && curr_cross < it && curr_cross >= itdt))
314 {
315 const std::uint64_t edt = (curr_cross - it) * delta_t_ns / (itdt - it);
316 const std::int64_t t = this->current_time + edt;
317
318 // 检查像素(x,y)当前不处于“不应”状态
319 // i.e. |t - that last_timestamp(x,y)| >= refractory_period
320 const std::int64_t last_stamp_at_xy = dvs::secToNanosec(this->last_event_timestamp[i]);
321 if (t >= last_stamp_at_xy)
322 {
323 const std::uint64_t dt = t - last_stamp_at_xy;
324 if(this->last_event_timestamp[i] == 0 || dt >= this->config.refractory_period_ns)
325 {
326 events.push_back(::carla::sensor::data::DVSEvent(x, y, t, pol > 0));
328 }
329 else
330 {
331 /** 取消事件,因为距离上次事件的时间 小于 不应期refractory_period_ns **/
332 }
333 this->ref_values[i] = curr_cross;
334 }
335 }
336 else
337 {
338 all_crossings = true;
339 }
340 } while (!all_crossings);
341 } // end tolerance
342 } // end for each pixel
343 }
344
345 /** 更新当前时间 **/
346 this->current_time = dvs::secToNanosec(this->GetEpisode().GetElapsedGameTime());
347
348 this->prev_image = this->last_image;
349
350 // 通过增加时间戳对事件进行排序,因为这是大多数事件处理算法所期望的
351 std::sort(events.begin(), events.end(), [](const ::carla::sensor::data::DVSEvent& it1, const ::carla::sensor::data::DVSEvent& it2){return it1.t < it2.t;});
352
353 return events;
354}
TSharedPtr< const FActorInfo > carla::rpc::ActorState UWorld * World
static float FColorToGrayScaleFloat(FColor Color)
Definition DVSCamera.cpp:24
std::vector<::carla::sensor::data::DVSEvent > DVSEventArray
Definition DVSCamera.h:54
ADVSCamera(const FObjectInitializer &ObjectInitializer)
Definition DVSCamera.cpp:29
TArray< float > prev_image
Definition DVSCamera.h:69
dvs::Config config
动态时间传感器的仿真配置
Definition DVSCamera.h:81
void Set(const FActorDescription &ActorDescription) override
Definition DVSCamera.cpp:93
TArray< double > last_event_timestamp
包含以秒为单位的最新事件时间的图像
Definition DVSCamera.h:75
ADVSCamera::DVSEventArray Simulation(float DeltaTime)
static FActorDefinition GetSensorDefinition()
Definition DVSCamera.cpp:41
void ImageToLogGray(const TArray< FColor > &image)
TArray< float > last_image
包含最新(当前)图像和先前图像的图像
Definition DVSCamera.h:69
void ImageToGray(const TArray< FColor > &image)
TArray< float > ref_values
包含触发事件最新参考值的图像
Definition DVSCamera.h:72
virtual void PostPhysTick(UWorld *World, ELevelTick TickType, float DeltaTime) override
std::int64_t current_time
以纳秒为单位的当前时间
Definition DVSCamera.h:78
bool ReadPixels(TArray< FColor > &BitMap) const
Use for debugging purposes only.
void EnablePostProcessingEffects(bool Enable=true)
UTextureRenderTarget2D * CaptureRenderTarget
Render target necessary for scene capture.
void EnqueueRenderSceneImmediate()
Immediate enqueues render commands of the scene at the current time.
uint32 GetImageHeight() const
uint32 GetImageWidth() const
void WaitForRenderThreadToFinish()
Blocks until the render thread has finished all it's tasks.
boost::optional< FActorAttribute > GetAttribute(const FString Name)
Definition Sensor.cpp:49
auto GetToken() const
Return the token that allows subscribing to this sensor's stream.
FAsyncDataStream GetDataStream(const SensorT &Self)
Return the FDataStream associated with this sensor.
URandomEngine * RandomEngine
Random Engine used to provide noise for sensor output.
bool AddPostProcessingMaterial(const FString &Path)
Load the UMaterialInstanceDynamic at the given Path and append it to the list of shaders with Weight.
uint64_t GetSensorType()
Definition DataStream.h:58
static int32 RetrieveActorAttributeToInt(const FString &Id, const TMap< FString, FActorAttribute > &Attributes, int32 Default)
从参与者属性映射中检索整数,如果不存在则返回默认值。
static bool RetrieveActorAttributeToBool(const FString &Id, const TMap< FString, FActorAttribute > &Attributes, bool Default)
从参与者属性映射中检索布尔值,如果不存在则返回默认值。
static FActorDefinition MakeCameraDefinition(const FString &Id, bool bEnableModifyingPostProcessEffects=false)
创建一个相机参与者定义。
static float RetrieveActorAttributeToFloat(const FString &Id, const TMap< FString, FActorAttribute > &Attributes, float Default)
float GetNormalDistribution(float Mean, float StandardDeviation)
static std::shared_ptr< BufferView > CreateFrom(Buffer &&buffer)
Definition BufferView.h:60
一块原始数据。 请注意,如果需要更多容量,则会分配一个新的内存块,并 删除旧的内存块。这意味着默认情况下,缓冲区只能增长。要释放内存,使用 clear 或 pop。
static std::shared_ptr< ROS2 > GetInstance()
Definition ROS2.h:51
static Buffer Serialize(Sensor &sensor, Args &&... args)
Serialize the arguments provided into a Buffer by calling to the serializer registered for the given ...
静态断言,用于确保token_data结构体的大小与Token::data的大小相同。
const auto & get_stream_id() const
获取流ID的引用。
std::shared_ptr< BufferView > SharedBufferView
Definition BufferView.h:163
constexpr std::int64_t secToNanosec(double seconds)
Definition DVSCamera.h:36
constexpr double nanosecToSecTrunc(std::int64_t nanoseconds)
Definition DVSCamera.h:42
TMap< FString, FActorAttribute > Variations
用户选择了参与者的变化版本。请注意,此时是 由不可修改的属性表示
float sigma_Cp
Definition DVSCamera.h:28
std::uint64_t refractory_period_ns
Definition DVSCamera.h:30
bool use_log
Definition DVSCamera.h:31
float log_eps
Definition DVSCamera.h:32
float sigma_Cm
Definition DVSCamera.h:29