Eliminate isort
This saves a few bytes of static storage and less instructions are executed when looking for the best offset. Signed-off-by: Georg Sauthoff <mail@gms.tf>master
parent
bf0a644b99
commit
b6fa222201
55
sysoff.c
55
sysoff.c
|
@ -34,12 +34,6 @@ static int64_t pctns(struct ptp_clock_time *t)
|
||||||
return t->sec * NS_PER_SEC + t->nsec;
|
return t->sec * NS_PER_SEC + t->nsec;
|
||||||
}
|
}
|
||||||
|
|
||||||
static struct {
|
|
||||||
int64_t interval;
|
|
||||||
int64_t offset;
|
|
||||||
uint64_t timestamp;
|
|
||||||
} samples[PTP_MAX_SAMPLES];
|
|
||||||
|
|
||||||
static int sysoff_precise(int fd, int64_t *result, uint64_t *ts)
|
static int sysoff_precise(int fd, int64_t *result, uint64_t *ts)
|
||||||
{
|
{
|
||||||
#ifdef PTP_SYS_OFFSET_PRECISE
|
#ifdef PTP_SYS_OFFSET_PRECISE
|
||||||
|
@ -57,28 +51,28 @@ static int sysoff_precise(int fd, int64_t *result, uint64_t *ts)
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static void insertion_sort(int length, int64_t interval, int64_t offset, uint64_t ts)
|
|
||||||
{
|
|
||||||
int i = length - 1;
|
|
||||||
while (i >= 0) {
|
|
||||||
if (samples[i].interval < interval)
|
|
||||||
break;
|
|
||||||
samples[i+1] = samples[i];
|
|
||||||
i--;
|
|
||||||
}
|
|
||||||
samples[i+1].interval = interval;
|
|
||||||
samples[i+1].offset = offset;
|
|
||||||
samples[i+1].timestamp = ts;
|
|
||||||
}
|
|
||||||
|
|
||||||
static int64_t sysoff_estimate(struct ptp_clock_time *pct, int extended,
|
static int64_t sysoff_estimate(struct ptp_clock_time *pct, int extended,
|
||||||
int n_samples, uint64_t *ts, int64_t *delay)
|
int n_samples, uint64_t *ts, int64_t *delay)
|
||||||
{
|
{
|
||||||
int64_t t1, t2, tp;
|
int64_t t1, t2, tp;
|
||||||
int64_t interval, offset;
|
int64_t interval, timestamp, offset;
|
||||||
int i;
|
int64_t shortest_interval, best_timestamp, best_offset;
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
for (i = 0; i < n_samples; i++) {
|
if (extended) {
|
||||||
|
t1 = pctns(&pct[3*i]);
|
||||||
|
tp = pctns(&pct[3*i+1]);
|
||||||
|
t2 = pctns(&pct[3*i+2]);
|
||||||
|
} else {
|
||||||
|
t1 = pctns(&pct[2*i]);
|
||||||
|
tp = pctns(&pct[2*i+1]);
|
||||||
|
t2 = pctns(&pct[2*i+2]);
|
||||||
|
}
|
||||||
|
shortest_interval = t2 - t1;
|
||||||
|
best_timestamp = (t2 + t1) / 2;
|
||||||
|
best_offset = best_timestamp - tp;
|
||||||
|
|
||||||
|
for (i = 1; i < n_samples; i++) {
|
||||||
if (extended) {
|
if (extended) {
|
||||||
t1 = pctns(&pct[3*i]);
|
t1 = pctns(&pct[3*i]);
|
||||||
tp = pctns(&pct[3*i+1]);
|
tp = pctns(&pct[3*i+1]);
|
||||||
|
@ -89,12 +83,17 @@ static int64_t sysoff_estimate(struct ptp_clock_time *pct, int extended,
|
||||||
t2 = pctns(&pct[2*i+2]);
|
t2 = pctns(&pct[2*i+2]);
|
||||||
}
|
}
|
||||||
interval = t2 - t1;
|
interval = t2 - t1;
|
||||||
offset = (t2 + t1) / 2 - tp;
|
timestamp = (t2 + t1) / 2;
|
||||||
insertion_sort(i, interval, offset, (t2 + t1) / 2);
|
offset = timestamp - tp;
|
||||||
|
if (interval < shortest_interval) {
|
||||||
|
shortest_interval = interval;
|
||||||
|
best_timestamp = timestamp;
|
||||||
|
best_offset = offset;
|
||||||
}
|
}
|
||||||
*ts = samples[0].timestamp;
|
}
|
||||||
*delay = samples[0].interval;
|
*ts = best_timestamp;
|
||||||
return samples[0].offset;
|
*delay = shortest_interval;
|
||||||
|
return best_offset;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int sysoff_extended(int fd, int n_samples,
|
static int sysoff_extended(int fd, int n_samples,
|
||||||
|
|
Loading…
Reference in New Issue