34 #include "nav2_amcl/pf/pf.hpp"
35 #include "nav2_amcl/pf/pf_pdf.hpp"
36 #include "nav2_amcl/pf/pf_kdtree.hpp"
38 #include "nav2_amcl/portable_utils.hpp"
43 static int pf_resample_limit(
pf_t * pf,
int k);
48 int min_samples,
int max_samples,
49 double alpha_slow,
double alpha_fast,
50 pf_init_model_fn_t random_pose_fn)
59 pf = calloc(1,
sizeof(
pf_t));
61 pf->random_pose_fn = random_pose_fn;
63 pf->min_samples = min_samples;
64 pf->max_samples = max_samples;
73 pf->dist_threshold = 0.5;
76 for (j = 0; j < 2; j++) {
79 set->sample_count = max_samples;
80 set->samples = calloc(max_samples,
sizeof(
pf_sample_t));
82 for (i = 0; i < set->sample_count; i++) {
83 sample = set->samples + i;
84 sample->pose.v[0] = 0.0;
85 sample->pose.v[1] = 0.0;
86 sample->pose.v[2] = 0.0;
87 sample->weight = 1.0 / max_samples;
91 set->kdtree = pf_kdtree_alloc(3 * max_samples);
93 set->cluster_count = 0;
94 set->cluster_max_count = max_samples;
95 set->clusters = calloc(set->cluster_max_count,
sizeof(
pf_cluster_t));
97 set->mean = pf_vector_zero();
98 set->cov = pf_matrix_zero();
104 pf->alpha_slow = alpha_slow;
105 pf->alpha_fast = alpha_fast;
108 pf_init_converged(pf);
114 void pf_free(
pf_t * pf)
118 for (i = 0; i < 2; i++) {
119 free(pf->sets[i].clusters);
120 pf_kdtree_free(pf->sets[i].kdtree);
121 free(pf->sets[i].samples);
134 set = pf->sets + pf->current_set;
137 pf_kdtree_clear(set->kdtree);
139 set->sample_count = pf->max_samples;
141 pdf = pf_pdf_gaussian_alloc(mean, cov);
144 for (i = 0; i < set->sample_count; i++) {
145 sample = set->samples + i;
146 sample->weight = 1.0 / pf->max_samples;
147 sample->pose = pf_pdf_gaussian_sample(pdf);
150 pf_kdtree_insert(set->kdtree, sample->pose, sample->weight);
153 pf->w_slow = pf->w_fast = 0.0;
155 pf_pdf_gaussian_free(pdf);
158 pf_cluster_stats(pf, set);
161 pf_init_converged(pf);
166 void pf_init_model(
pf_t * pf, pf_init_model_fn_t init_fn,
void * init_data)
172 set = pf->sets + pf->current_set;
175 pf_kdtree_clear(set->kdtree);
177 set->sample_count = pf->max_samples;
180 for (i = 0; i < set->sample_count; i++) {
181 sample = set->samples + i;
182 sample->weight = 1.0 / pf->max_samples;
183 sample->pose = (*init_fn)(init_data);
186 pf_kdtree_insert(set->kdtree, sample->pose, sample->weight);
189 pf->w_slow = pf->w_fast = 0.0;
192 pf_cluster_stats(pf, set);
195 pf_init_converged(pf);
198 void pf_init_converged(
pf_t * pf)
201 set = pf->sets + pf->current_set;
206 int pf_update_converged(
pf_t * pf)
212 set = pf->sets + pf->current_set;
213 double mean_x = 0, mean_y = 0;
215 for (i = 0; i < set->sample_count; i++) {
216 sample = set->samples + i;
218 mean_x += sample->pose.v[0];
219 mean_y += sample->pose.v[1];
221 mean_x /= set->sample_count;
222 mean_y /= set->sample_count;
224 for (i = 0; i < set->sample_count; i++) {
225 sample = set->samples + i;
226 if (fabs(sample->pose.v[0] - mean_x) > pf->dist_threshold ||
227 fabs(sample->pose.v[1] - mean_y) > pf->dist_threshold)
250 void pf_update_sensor(
pf_t * pf, pf_sensor_model_fn_t sensor_fn,
void * sensor_data)
257 set = pf->sets + pf->current_set;
260 total = (*sensor_fn)(sensor_data, set);
265 for (i = 0; i < set->sample_count; i++) {
266 sample = set->samples + i;
267 w_avg += sample->weight;
268 sample->weight /= total;
271 w_avg /= set->sample_count;
272 if (pf->w_slow == 0.0) {
275 pf->w_slow += pf->alpha_slow * (w_avg - pf->w_slow);
277 if (pf->w_fast == 0.0) {
280 pf->w_fast += pf->alpha_fast * (w_avg - pf->w_fast);
284 for (i = 0; i < set->sample_count; i++) {
285 sample = set->samples + i;
286 sample->weight = 1.0 / set->sample_count;
293 void pf_update_resample(
pf_t * pf,
void * random_pose_data)
307 set_a = pf->sets + pf->current_set;
308 set_b = pf->sets + (pf->current_set + 1) % 2;
313 c = (
double *)malloc(
sizeof(
double) * (set_a->sample_count + 1));
315 for (i = 0; i < set_a->sample_count; i++) {
316 c[i + 1] = c[i] + set_a->samples[i].weight;
320 pf_kdtree_clear(set_b->kdtree);
324 set_b->sample_count = 0;
326 w_diff = 1.0 - pf->w_fast / pf->w_slow;
342 while (set_b->sample_count < pf->max_samples) {
343 sample_b = set_b->samples + set_b->sample_count++;
345 if (drand48() < w_diff) {
346 sample_b->pose = (pf->random_pose_fn)(random_pose_data);
375 for (i = 0; i < set_a->sample_count; i++) {
376 if ((c[i] <= r) && (r < c[i + 1])) {
380 assert(i < set_a->sample_count);
382 sample_a = set_a->samples + i;
384 assert(sample_a->weight > 0);
387 sample_b->pose = sample_a->pose;
390 sample_b->weight = 1.0;
391 total += sample_b->weight;
394 pf_kdtree_insert(set_b->kdtree, sample_b->pose, sample_b->weight);
397 if (set_b->sample_count > pf_resample_limit(pf, set_b->kdtree->leaf_count)) {
404 pf->w_slow = pf->w_fast = 0.0;
410 for (i = 0; i < set_b->sample_count; i++) {
411 sample_b = set_b->samples + i;
412 sample_b->weight /= total;
416 pf_cluster_stats(pf, set_b);
419 pf->current_set = (pf->current_set + 1) % 2;
421 pf_update_converged(pf);
429 int pf_resample_limit(
pf_t * pf,
int k)
435 return pf->max_samples;
439 b = 2 / (9 * ((double) k - 1));
440 c = sqrt(2 / (9 * ((
double) k - 1))) * pf->pop_z;
443 n = (int) ceil((k - 1) / (2 * pf->pop_err) * x * x * x);
445 if (n < pf->min_samples) {
446 return pf->min_samples;
448 if (n > pf->max_samples) {
449 return pf->max_samples;
465 double m[4], c[2][2];
469 pf_kdtree_cluster(set->kdtree);
472 set->cluster_count = 0;
474 for (i = 0; i < set->cluster_max_count; i++) {
475 cluster = set->clusters + i;
477 cluster->mean = pf_vector_zero();
478 cluster->cov = pf_matrix_zero();
480 for (j = 0; j < 4; j++) {
483 for (j = 0; j < 2; j++) {
484 for (k = 0; k < 2; k++) {
485 cluster->c[j][k] = 0.0;
492 set->mean = pf_vector_zero();
493 set->cov = pf_matrix_zero();
494 for (j = 0; j < 4; j++) {
497 for (j = 0; j < 2; j++) {
498 for (k = 0; k < 2; k++) {
504 for (i = 0; i < set->sample_count; i++) {
505 sample = set->samples + i;
510 cidx = pf_kdtree_get_cluster(set->kdtree, sample->pose);
512 if (cidx >= set->cluster_max_count) {
515 if (cidx + 1 > set->cluster_count) {
516 set->cluster_count = cidx + 1;
519 cluster = set->clusters + cidx;
521 cluster->weight += sample->weight;
523 weight += sample->weight;
526 cluster->m[0] += sample->weight * sample->pose.v[0];
527 cluster->m[1] += sample->weight * sample->pose.v[1];
528 cluster->m[2] += sample->weight * cos(sample->pose.v[2]);
529 cluster->m[3] += sample->weight * sin(sample->pose.v[2]);
531 m[0] += sample->weight * sample->pose.v[0];
532 m[1] += sample->weight * sample->pose.v[1];
533 m[2] += sample->weight * cos(sample->pose.v[2]);
534 m[3] += sample->weight * sin(sample->pose.v[2]);
537 for (j = 0; j < 2; j++) {
538 for (k = 0; k < 2; k++) {
539 cluster->c[j][k] += sample->weight * sample->pose.v[j] * sample->pose.v[k];
540 c[j][k] += sample->weight * sample->pose.v[j] * sample->pose.v[k];
546 for (i = 0; i < set->cluster_count; i++) {
547 cluster = set->clusters + i;
549 cluster->mean.v[0] = cluster->m[0] / cluster->weight;
550 cluster->mean.v[1] = cluster->m[1] / cluster->weight;
551 cluster->mean.v[2] = atan2(cluster->m[3], cluster->m[2]);
553 cluster->cov = pf_matrix_zero();
556 for (j = 0; j < 2; j++) {
557 for (k = 0; k < 2; k++) {
558 cluster->cov.m[j][k] = cluster->c[j][k] / cluster->weight -
559 cluster->mean.v[j] * cluster->mean.v[k];
565 cluster->cov.m[2][2] = -2 * log(
567 cluster->m[2] * cluster->m[2] +
568 cluster->m[3] * cluster->m[3]));
576 set->mean.v[0] = m[0] / weight;
577 set->mean.v[1] = m[1] / weight;
578 set->mean.v[2] = atan2(m[3], m[2]);
581 for (j = 0; j < 2; j++) {
582 for (k = 0; k < 2; k++) {
583 set->cov.m[j][k] = c[j][k] / weight - set->mean.v[j] * set->mean.v[k];
589 set->cov.m[2][2] = -2 * log(sqrt(m[2] * m[2] + m[3] * m[3]));
627 int pf_get_cluster_stats(
628 pf_t * pf,
int clabel,
double * weight,
634 set = pf->sets + pf->current_set;
636 if (clabel >= set->cluster_count) {
639 cluster = set->clusters + clabel;
641 *weight = cluster->weight;
642 *mean = cluster->mean;