Skip to content

ntptime: Year 2036 fix. #830

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 8 commits into from
3 changes: 3 additions & 0 deletions micropython/minitz/manifest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
metadata(description="MiniTZ timezone support.", version="0.1.0")

module("minitz.py", opt=3)
43 changes: 43 additions & 0 deletions micropython/minitz/minitz/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
from _minitz import Database
import datetime as _datetime


# Wraps a _minitz.Zone, and implements tzinfo
class tzwrap(_datetime.tzinfo):
def __init__(self, mtz_zone):
self._mtz_zone = mtz_zone

def __str__(self):
return self.tzname(None)

# Returns (offset: int, designator: str, is_dst: int)
def _lookup_local(self, dt):
if dt.tzinfo is not self:
raise ValueError()

Check failure on line 16 in micropython/minitz/minitz/__init__.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/__init__.py:16:29: RSE102 Unnecessary parentheses on raised exception
t = dt.replace(tzinfo=_datetime.timezone.utc, fold=0).timestamp()
return self._mtz_zone.lookup_local(t, dt.fold)

def utcoffset(self, dt):
return _datetime.timedelta(seconds=self._lookup_local(dt)[0])

def is_dst(self, dt):
# Nonstandard. Returns bool.
return bool(self._lookup_local(dt)[2])

def dst(self, dt):
is_dst = self._lookup_local(dt)[2]
# TODO in the case of is_dst=1, this is returning
# a made-up value that may be wrong.
return _datetime.timedelta(hours=is_dst)

def tzname(self, dt):
return self._lookup_local(dt)[1]

def fromutc(self, dt):
if dt.fold != 0:
raise ValueError()

Check failure on line 38 in micropython/minitz/minitz/__init__.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/__init__.py:38:29: RSE102 Unnecessary parentheses on raised exception
t = dt.replace(tzinfo=_datetime.timezone.utc).timestamp()
offset = self._mtz_zone.lookup_utc(t)[0]
_datetime.timedelta(seconds=offset)

return dt + self._offset
43 changes: 43 additions & 0 deletions micropython/minitz/minitz/fetch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import requests
import email.utils

# Configurable settings
server_url = 'http://tzdata.net/api/1/'
dataset = '1-15'


def fetch(url, last_modified, timeout=None):
headers = {}
if last_modified:
headers['if-modified-since'] = email.utils.formatdate(last_modified, False, True)

resp = requests.request('GET', url, headers=headers, timeout=timeout, parse_headers=True)

if resp.status_code == 304:
# Not modified
resp.close()
return None
if resp.status_code != 200:
resp.close()
raise Exception()

Check failure on line 22 in micropython/minitz/minitz/fetch.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/fetch.py:22:24: RSE102 Unnecessary parentheses on raised exception

content = resp.content

last_modified = resp.get_date_as_int('last-modified') or 0

return (last_modified, content)


def fetch_zone(zone_name, last_modified, timeout=None):
url = server_url + dataset + '/zones-minitzif/' + zone_name
return fetch(url, last_modified, timeout)


def fetch_all(last_modified, timeout=None):
url = server_url + dataset + '/minitzdb'
return fetch(url, last_modified, timeout)


def fetch_names(last_modified, timeout=None):
url = server_url + 'zone-names-mini'
return fetch(url, last_modified, timeout)
231 changes: 231 additions & 0 deletions micropython/minitz/minitz/persist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
import datetime
import os
import struct
import time
from . import Database, tzwrap
from .fetch import fetch_zone, fetch_all

_local_zone_name = 'UTC'
_whole_db = False

_db = None
_last_modified = None
_local_zone = None
_local_tzinfo = datetime.timezone.utc

_last_check = None

path_for_meta_file = 'tzmeta'
path_for_db_file = 'tzdata'

# Initialise by reading from persistent storage.
def init(want_zone_name=None, want_whole_db=None):
try:
with open(path_for_meta_file, 'rb') as fp:
last_modified, data_crc = struct.unpack('<QI', fp.read(12))
local_zone_name = fp.read().decode()

if not local_zone_name:
# Corrupt file:
# Mode should be 1 or 2.
# Zone name is mandatory.
raise ValueError()

Check failure on line 32 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/persist.py:32:29: RSE102 Unnecessary parentheses on raised exception
if last_modified == 0:
last_modified = None

with open(path_for_db_file, 'rb') as fp:
data = fp.read()

Check failure on line 38 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (W293)

micropython/minitz/minitz/persist.py:38:1: W293 Blank line contains whitespace
db = Database(data)
if db.kind != 1 and db.kind != 2:
raise ValueError()

Check failure on line 41 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/persist.py:41:29: RSE102 Unnecessary parentheses on raised exception
if db.crc != data_crc:
# The tzdata and tzmeta files do not match.
raise ValueError()

Check failure on line 44 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/persist.py:44:29: RSE102 Unnecessary parentheses on raised exception

whole_db = (db.kind == 2)
if want_whole_db is not None and want_whole_db != whole_db:
# Want to download one zone file only, have whole DB
# OR want to download whole DB, only have one zone
raise ValueError()

Check failure on line 50 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/persist.py:50:29: RSE102 Unnecessary parentheses on raised exception

if want_zone_name is not None and want_zone_name != local_zone_name:
if not whole_db:
# Need to download correct zone file.
raise ValueError()

Check failure on line 55 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (RSE102)

micropython/minitz/minitz/persist.py:55:33: RSE102 Unnecessary parentheses on raised exception
local_zone_name = want_zone_name

# For a TZIF file, the string passed to get_zone_by_name() is ignored.
local_zone = db.get_zone_by_name(local_zone_name)
local_tzinfo = tzwrap(local_zone)

# Success.
success = True
except:
# Failed
success = False

Check failure on line 67 in micropython/minitz/minitz/persist.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (W293)

micropython/minitz/minitz/persist.py:67:1: W293 Blank line contains whitespace
db = None
last_modified = None
local_zone_name = want_zone_name or 'UTC'
whole_db = whole_db or False
local_zone = None
local_tzinfo = datetime.timezone.utc if local_zone_name == 'UTC' else None

# Save state.
global _local_zone_name, _whole_db, _db, _last_modified, _local_zone, _local_tzinfo, _last_check
_local_zone_name = local_zone_name
_whole_db = whole_db
_db = db
_last_modified = last_modified
_local_zone = local_zone
_local_tzinfo = local_tzinfo
_last_check = None
if success:
# Pretend last check was 23.5 hours ago.
# That way the next check will be in 30 minutes.
# This means that if there are many reboots in a row, we don't flood
# the server with requests.
# 23.5 * 3600 * 1000 = 84_600_000
#
# (It would be better if we could use real UTC time to track when the
# last check was, and store the last update time in persistent memory.
# But we don't necessarily know the real UTC time at init time, and may
# not want a Flash write on every update check).
_last_check = time.ticks_add(time.ticks_ms(), -84_600_000)

return success


def _force_update_from_internet(zone_name=None, whole_db=None, timeout=None):
last_modified = _last_modified
if whole_db is None:
whole_db = _whole_db
elif whole_db != _whole_db:
# Force fresh download as it's a different file
last_modified = None
if zone_name is None:
zone_name = _local_zone_name
elif zone_name != _local_zone_name and not whole_db:
# Force fresh download as it's a different file
last_modified = None
if not zone_name:
# Empty string is not a valid zone name.
raise ValueError()

# We update _last_check even if the HTTP request fails.
# This is to comply with the fair usage policy of tzdata.net.
global _last_check
_last_check = time.ticks_ms()

if whole_db:
last_modified, data = fetch_zone(zone_name, last_modified, timeout)
else:
last_modified, data = fetch_all(last_modified, timeout)

if data is None:
# Not changed
return

db = Database(data)
if db.kind != (2 if whole_db else 1):
# Not the kind of file that was expected
raise ValueError()

# For a TZIF file, the string passed to get_zone_by_name() is ignored.
local_zone = db.get_zone_by_name(zone_name)
local_tzinfo = tzwrap(local_zone)

# Download success!

# Save state.
global _local_zone_name, _whole_db, _db, _last_modified, _local_zone, _local_tzinfo
_local_zone_name = zone_name
_whole_db = whole_db
_db = db
_last_modified = last_modified
_local_zone = local_zone
_local_tzinfo = local_tzinfo

# Save the data to persistent storage.

# Maybe this may make flash wear-levelling easier?
# We give the filesystem as much free space as possible
# before we start writing to it.
os.unlink(path_for_db_file)

with open(path_for_meta_file, 'wb') as fp:
fp.write(struct.pack('<QI', last_modified or 0, db.crc))
fp.write(zone_name.encode())

with open(path_for_db_file, 'wb') as fp:
fp.write(data)


# Initialise by reading from persistent storage.
# If that fails, will try to do first-time download of timezone
# data from the Internet.
def init_with_download_if_needed(zone_name=None, whole_db=None, timeout=None):
if not init(zone_name, whole_db):
if whole_db or zone_name != 'UTC':
_force_update_from_internet(zone_name, whole_db, timeout)


def set_zone(zone_name, can_download, timeout=None):
if _local_zone_name == zone_name:
# Nothing to do!
pass
elif _whole_db:
local_zone = _db.get_zone_by_name(zone_name)
local_tzinfo = tzwrap(local_zone)

global _local_zone_name, _local_zone, _local_tzinfo
_local_zone_name = zone_name
_local_zone = local_zone
_local_tzinfo = local_tzinfo
elif not can_download:
raise ValueError("Changing zone without whole DB or Internet")
else:
_force_update_from_internet(zone_name, _whole_db, timeout)


def update_from_internet_if_needed(timeout=None):
# Call this regularly. Ideally at least once an hour, but it's fine
# to call it much more frequently, even multiple times per second.
# This function will do nothing if an update is not needed.
#
# We attempt an Internet update at most once per day.
# This is to comply with the fair usage policy of tzdata.net.
if (_last_check is not None and
time.ticks_diff(time.ticks_ms(), _last_check) < 24 * 3600 * 1000):
# Too soon.
return
_force_update_from_internet(timeout=timeout)


def has_tzinfo():
return _local_tzinfo is not None

def get_tzinfo():
if _local_tzinfo is None:
raise ValueError()
return _local_tzinfo

def have_db():
return _db is not None

def get_raw_zone():
if _local_zone is None:
raise ValueError()
return _local_zone

def get_db():
if _db is None:
raise ValueError()
return _db

def get_zone_name():
return _local_zone_name

def get_last_modified():
return datetime.datetime.fromtimestamp(_last_modified, datetime.timezone.utc)
27 changes: 26 additions & 1 deletion micropython/net/ntptime/ntptime.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,37 @@ def time():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.settimeout(timeout)
res = s.sendto(NTP_QUERY, addr)
s.sendto(NTP_QUERY, addr)
msg = s.recv(48)
finally:
s.close()
val = struct.unpack("!I", msg[40:44])[0]

# 2024-01-01 00:00:00 converted to an NTP timestamp
MIN_NTP_TIMESTAMP = 3913056000

# Y2036 fix
#
# The NTP timestamp has a 32-bit count of seconds, which will wrap back
# to zero on 7 Feb 2036 at 06:28:16.
#
# We know that this software was written during 2024 (or later).
# So we know that timestamps less than MIN_NTP_TIMESTAMP are impossible.
# So if the timestamp is less than MIN_NTP_TIMESTAMP, that probably means
# that the NTP time wrapped at 2^32 seconds. (Or someone set the wrong
# time on their NTP server, but we can't really do anything about that).
#
# So in that case, we need to add in those extra 2^32 seconds, to get the
# correct timestamp.
#
# This means that this code will work until the year 2160. More precisely,
# this code will not work after 7th Feb 2160 at 06:28:15.
#
if val < MIN_NTP_TIMESTAMP:
val += 0x100000000

# Convert timestamp from NTP format to our internal format

EPOCH_YEAR = utime.gmtime(0)[0]
if EPOCH_YEAR == 2000:
# (date(2000, 1, 1) - date(1900, 1, 1)).days * 24*60*60
Expand Down
Loading
Loading