Smooth RTC calibration issue in STM32l431
I cannot get smooth calibration to work in STM32l431. This is the code:
void HW_RTC_Init(void)
{
LL_RTC_TimeTypeDef time;
LL_RTC_DateTypeDef date;
LL_RCC_LSI_Enable();
while(LL_RCC_LSI_IsReady() == 0)
{
}
LL_RCC_SetRTCClockSource(LL_RCC_RTC_CLKSOURCE_LSI);
__HAL_RCC_RTC_ENABLE();
LL_RTC_WaitForSynchro(RTC);
//Set the default date to 2000-01-01 00:00:00:000
date.Year = 0;
date.Day = 1;
date.Month = 1;
date.WeekDay = 6;
time.Hours = 0;
time.Minutes = 0;
time.Seconds = 0;
/* Configure RTC */
RTCHandle.Instance = RTC;
/* Set the RTC time base to 1s */
/* Configure RTC prescaler and RTC data registers as follow:
- Hour Format = Format 24
- Asynch Prediv = Value according to source clock
- Synch Prediv = Value according to source clock
- OutPut = Output Disable
- OutPutPolarity = High Polarity
- OutPutType = Open Drain */
RTCHandle.Init.HourFormat = RTC_HOURFORMAT_24;
RTCHandle.Init.AsynchPrediv = 127;
RTCHandle.Init.SynchPrediv = 255;
RTCHandle.Init.OutPut = RTC_OUTPUT_DISABLE;
RTCHandle.Init.OutPutPolarity = RTC_OUTPUT_POLARITY_HIGH;
RTCHandle.Init.OutPutType = RTC_OUTPUT_TYPE_OPENDRAIN;
HAL_RTC_Init(&RTCHandle);
LL_RTC_TIME_SetFormat(RTC, LL_RTC_HOURFORMAT_24HOUR);
LL_RTC_TIME_Init(RTC, LL_RTC_FORMAT_BIN, &time);
LL_RTC_DATE_Init(RTC, LL_RTC_FORMAT_BIN, &date);
rtcSet = false;
}
void GPS_CalibrateRTCClock()
{
int32_t frequency;
// GPS clock
printf("GPS %u %u => RTC %u %u\n\r", gpsCalibStart, gpsCalibEnd, rtcCalibStart, rtcCalibEnd);
int32_t gpsDiff = (int32_t)(gpsCalibEnd - gpsCalibStart);
printf("Diff GPS: %d seconds\n\r", gpsDiff);
// RTC
int32_t rtcDiff = (int32_t)(rtcCalibEnd - rtcCalibStart);
printf("Diff STM32: %d seconds\n\r", rtcDiff);
if(gpsDiff < 300 || rtcDiff < 200 || rtcCalibrationDone == true)
{
return;
}
rtcCalibrationDone = true;
int diff = gpsDiff - rtcDiff;
int percent = 100 * 100 * diff / rtcDiff; // procent med 2 decimaler
printf("=> %d seconds = %d %% \n\r", diff, percent);
int originalFrequency = 32000; // LSI frequency på RTC
int preferedFrequency = originalFrequency + (originalFrequency * percent) / 10000;
//int preferedFrequency = originalFrequency - (originalFrequency * percent) / 10000;
printf("Prefered frequency %d\n\r", preferedFrequency);
rtcCalibrationDone = HW_RTC_CalculateCalibrationParameters(originalFrequency, preferedFrequency);
}
When I run the code I get the following output:
GPS 1759492828 1759493307 => RTC 1759500022 1759500494
Diff GPS: 479 seconds
Diff STM32: 472 seconds
=> 7 seconds = 148 % // this is 1.48%
Prefered frequency 32473
Smooth Pos 30 Neg 55 => 32473 Hz // 1.48% faster than 32000 Hz (LSI clock)
When apply this values I do not see any change in RTC clock compared to GPS clock
In desperation I tested to set an insane value
Setting Pos 500 Neg 55 => 42332 Hz
to verify if I can get RTC clock to go faster then GPS reference clock but result is same:
GPS: 2025-10-03 12:50:50
RTC: 2025-10-03 14:50:43
Timediff 7 second
GPS: 2025-10-03 13:03:26
RTC: 2025-10-03 13:03:07
Timediff 19 second
RTC clock has drifted slower then GPS clock for 12 seconds in 13 minutes.
What am I doing wrong here?
