Skip to content

Commit

Permalink
Merge pull request #48 from jepler/leap-second-1972
Browse files Browse the repository at this point in the history
Fix 1972 leap seconds, test with leapseconddata
  • Loading branch information
jepler authored Oct 8, 2022
2 parents 48026fa + df39d0e commit ed907d5
Show file tree
Hide file tree
Showing 13 changed files with 139 additions and 115 deletions.
11 changes: 6 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
#
# SPDX-License-Identifier: CC0-1.0

/build
*,cover
*.egg-info
/.coverage*
/.reuse
/build
/coverage.xml
/dist
*.egg-info
/finals2000A.all.csv
/htmlcov
__pycache__
*,cover
/.reuse
/src/wwvb/__version__.py
__pycache__
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ repos:
entry: pylint
language: python
types: [python]
additional_dependencies: [beautifulsoup4, "pylint==2.13.5", requests, adafruit-circuitpython-datetime, click, python-dateutil]
additional_dependencies: [beautifulsoup4, "pylint==2.13.5", requests, adafruit-circuitpython-datetime, click, python-dateutil, leapseconddata]
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ requires = [
"requests",
"platformdirs",
"python-dateutil",
"leapseconddata",
"setuptools>=45",
"setuptools_scm[toml]>=6.0",
"tzdata",
Expand Down
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ build
click
coverage
mypy; implementation_name=="cpython"
leapseconddata
platformdirs
pre-commit
python-dateutil
Expand Down
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,10 @@ install_requires =
adafruit-circuitpython-datetime
beautifulsoup4
click
leapseconddata
platformdirs
requests
python-dateutil
requests

[options.entry_points]
console_scripts =
Expand Down
2 changes: 1 addition & 1 deletion src/wwvb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -836,7 +836,7 @@ def to_both_string(self, charset: List[str]) -> str:
"default": ["0", "1", "2"],
"duration": ["2", "5", "8"],
"cradek": ["0", "1", "-"],
"bar": ["▟█", "▄█", "▄▟"],
"bar": ["🬍🬎", "🬋🬎", "🬋🬍"],
"sextant": ["🬍🬎", "🬋🬎", "🬋🬍", "🬩🬹", "🬋🬹", "🬋🬩"],
}

Expand Down
3 changes: 2 additions & 1 deletion src/wwvb/dut1table.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@ def main() -> None:
for key, it in groupby(DUT1_OFFSETS):
dut1_ms = (ord(key) - ord("k")) / 10.0
count = len(list(it))
end = date + timedelta(days=count - 1)
dut1_next = wwvb.get_dut1(date + timedelta(days=count), warn_outdated=False)
ls = " LS" if dut1_ms * dut1_next < 0 else ""
ls = f" LS on {end:%F} 23:59:60 UTC" if dut1_ms * dut1_next < 0 else ""
print(f"{date:%F} {dut1_ms: 3.1f} {count:4d}{ls}")
date += timedelta(days=count)
print(date)
Expand Down
9 changes: 8 additions & 1 deletion src/wwvb/iersdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@
#
# SPDX-License-Identifier: GPL-3.0-only

import datetime
import os

import platformdirs

__all__ = ["DUT1_DATA_START", "DUT1_OFFSETS"]
__all__ = ["DUT1_DATA_START", "DUT1_OFFSETS", "start", "span", "end"]
from .iersdata_dist import DUT1_DATA_START, DUT1_OFFSETS

for location in [
Expand All @@ -22,3 +23,9 @@
with open(filename, encoding="utf-8") as f:
exec(f.read(), globals(), globals()) # pylint: disable=exec-used
break

start = datetime.datetime.combine(DUT1_DATA_START, datetime.time()).replace(
tzinfo=datetime.timezone.utc
)
span = datetime.timedelta(days=len(DUT1_OFFSETS))
end = start + span
55 changes: 28 additions & 27 deletions src/wwvb/iersdata_dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,32 +7,33 @@
# pylint: disable=invalid-name
import datetime
__all__ = ['DUT1_DATA_START', 'DUT1_OFFSETS']
DUT1_DATA_START = datetime.date(1972, 6, 1)
DUT1_DATA_START = datetime.date(1972, 1, 1)
d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s = tuple('defghijklmnopqrs')
DUT1_OFFSETS = str( # 19720601
i*30+s*203+r*31+q*29+p*28+o*30+n*36+m*40+l*39+k*33+j*31+i*31 # 19731231
+h*18+r*19+q*38+p*32+o*31+n*33+m*48+l*45+k*37+j*33+i*34+h*15 # 19750122
+r*22+q*34+p*33+o*34+n*37+m*49+l*45+k*36+j*32+i*36+h*7+r*28 # 19760301
+q*33+p*32+o*30+n*33+m*42+l*42+k*34+j*29+i*33+h*30+r*6+q*36 # 19770317
+p*34+o*31+n*32+m*42+l*51+k*37+j*32+i*33+h*31+q*32+p*29+o*29 # 19780430
+n*30+m*32+l*47+k*47+j*36+i*33+h*32+g*18+q*16+p*35+o*33+n*32 # 19790531
+m*35+l*45+k*51+j*39+i*39+h*38+g*2+q*40+p*39+o*38+n*43+m*57 # 19800923
+l*50+k*39+j*42+i*41+h*43+g*37+f*39+e*39+o*19+n*62+m*43+l*45 # 19820202
+k*48+j*44+i*39+h*44+g*21+q*44+p*48+o*43+n*41+m*36+l*34+k*34 # 19830514
+j*38+i*47+s+r*64+q*50+p*42+o*56+n*57+m*52+l*100+k*61+j*62 # 19850302
+i*66+h*52+g*67+f+p*103+o*56+n*68+m*69+l*107+k*82+j*72+i*67 # 19870518
+h*63+g*113+f*63+e*51+o*11+n*60+m*59+l*121+k*71+j*71+i*67 # 19890531
+h*57+g*93+f*61+e*48+d*12+n*41+m*44+l*46+k*61+j*66+i*47+h*45 # 19901231
+g*15+q*32+p*44+o*41+n*48+m*74+l*49+k*45+j*44+i*40+h*37+g*38 # 19920625
+f*50+e*5+o*60+n*49+m*40+l*40+k*38+j*38+i*36+h*39+g*25+q*31 # 19930919
+p*50+o*41+n*41+m*43+l*41+k*39+j*40+i*39+s*24+r*57+q*43+p*41 # 19950120
+o*39+n*38+m*35+l*37+k*43+j*69+i*44+h*42+g*37+q*4+p*51+o*45 # 19960523
+n*44+m*69+l*70+k*50+j*54+i*53+h*40+g*49+f*18+p*59+o*53+n*52 # 19980206
+m*57+l*48+k*53+j*127+i*70+h*30+r*62+q*79+p*152+o*82+n*106 # 20001026
+m*184+l*125+k*217+j*133+i*252+h*161+g*392+f*322+e*290+n*116 # 20060927
+m*154+l*85+k*83+j*91+i*168+h*105+g*147+f*105+e*42+o*70+n*91 # 20091111
+m*154+l*119+k*84+j*217+i*126+h*176+g*97+f*91+e*52+o*116 # 20130130
+n*98+m*70+l*133+k*91+j*91+i*77+h*140+g*91+f*84+e*70+d*34 # 20150910
+n*72+m*76+l*66+k*53+j*56+i*105+h*77+g*45+q*25+p*63+o*91 # 20171129
+n*154+m*105+l*190+k*118+j*105+i*807+j*376+k*351+l*86 # 20231007
DUT1_OFFSETS = str( # 19720101
i*182+s*123+k*30+i*31+s*19+r*31+q*29+p*28+o*30+n*36+m*40 # 19730909
+l*39+k*33+j*31+i*31+h*18+r*19+q*38+p*32+o*31+n*33+m*48+l*45 # 19741010
+k*37+j*33+i*34+h*15+r*22+q*34+p*33+o*34+n*37+m*49+l*45+k*36 # 19751118
+j*32+i*36+h*7+r*28+q*33+p*32+o*30+n*33+m*42+l*42+k*34+j*29 # 19761201
+i*33+h*30+r*6+q*36+p*34+o*31+n*32+m*42+l*51+k*37+j*32+i*33 # 19771231
+h*31+q*32+p*29+o*29+n*30+m*32+l*47+k*47+j*36+i*33+h*32+g*18 # 19790116
+q*16+p*35+o*33+n*32+m*35+l*45+k*51+j*39+i*39+h*38+g*2+q*40 # 19800319
+p*39+o*38+n*43+m*57+l*50+k*39+j*42+i*41+h*43+g*37+f*39+e*39 # 19810719
+o*19+n*62+m*43+l*45+k*48+j*44+i*39+h*44+g*21+q*44+p*48+o*43 # 19821223
+n*41+m*36+l*34+k*34+j*38+i*47+s+r*64+q*50+p*42+o*56+n*57 # 19840517
+m*52+l*100+k*61+j*62+i*66+h*52+g*67+f+p*103+o*56+n*68+m*69 # 19860807
+l*107+k*82+j*72+i*67+h*63+g*113+f*63+e*51+o*11+n*60+m*59 # 19880907
+l*121+k*71+j*71+i*67+h*57+g*93+f*61+e*48+d*12+n*41+m*44 # 19900511
+l*46+k*61+j*66+i*47+h*45+g*15+q*32+p*44+o*41+n*48+m*74+l*49 # 19911129
+k*45+j*44+i*40+h*37+g*38+f*50+e*5+o*60+n*49+m*40+l*40+k*38 # 19930322
+j*38+i*36+h*39+g*25+q*31+p*50+o*41+n*41+m*43+l*41+k*39+j*40 # 19940630
+i*39+s*24+r*57+q*43+p*41+o*39+n*38+m*35+l*37+k*43+j*69+i*44 # 19951124
+h*42+g*37+q*4+p*51+o*45+n*44+m*69+l*70+k*50+j*54+i*53+h*40 # 19970612
+g*49+f*18+p*59+o*53+n*52+m*57+l*48+k*53+j*127+i*70+h*30 # 19990303
+r*62+q*79+p*152+o*82+n*106+m*184+l*125+k*217+j*133+i*252 # 20030402
+h*161+g*392+f*322+e*290+n*116+m*154+l*85+k*83+j*91+i*168 # 20080312
+h*105+g*147+f*105+e*42+o*70+n*91+m*154+l*119+k*84+j*217 # 20110511
+i*126+h*176+g*97+f*91+e*52+o*116+n*98+m*70+l*133+k*91+j*91 # 20140507
+i*77+h*140+g*91+f*84+e*70+d*34+n*72+m*76+l*66+k*53+j*56 # 20160831
+i*105+h*77+g*45+q*25+p*63+o*91+n*154+m*105+l*190+k*118 # 20190501
+j*105+i*807+j*376+k*346+l*98 # 20231014
)
11 changes: 6 additions & 5 deletions src/wwvb/testcli.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def assertProgramOutput(self, expected: str, *args: str) -> None:
actual = subprocess.check_output(
args, stdin=subprocess.DEVNULL, encoding="utf-8", env=env
)
self.assertMultiLineEqual(expected, actual, "args={args}")
self.assertMultiLineEqual(expected, actual, f"args={args}")

def assertProgramOutputStarts(self, expected: str, *args: str) -> None:
"""Check the output from invoking a program matches the expected"""
Expand All @@ -37,7 +37,7 @@ def assertProgramOutputStarts(self, expected: str, *args: str) -> None:
actual = subprocess.check_output(
args, stdin=subprocess.DEVNULL, encoding="utf-8", env=env
)
self.assertMultiLineEqual(expected, actual[: len(expected)], "args={args}")
self.assertMultiLineEqual(expected, actual[: len(expected)], f"args={args}")

def assertModuleOutput(self, expected: str, *args: str) -> None:
"""Check the output from invoking a `python -m modulename` program matches the expected"""
Expand Down Expand Up @@ -143,9 +143,10 @@ def test_dut1table(self) -> None:
"""Test the dut1table program"""
self.assertModuleOutputStarts(
"""\
1972-06-01 -0.2 30 LS
1972-07-01 0.8 203
1973-01-20 0.7 31
1972-01-01 -0.2 182 LS on 1972-06-30 23:59:60 UTC
1972-07-01 0.8 123
1972-11-01 0.0 30
1972-12-01 -0.2 31 LS on 1972-12-31 23:59:60 UTC
""",
"wwvb.dut1table",
)
Expand Down
71 changes: 28 additions & 43 deletions src/wwvb/testls.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,72 +8,57 @@
import datetime
import unittest

import leapseconddata

import wwvb

from . import iersdata

ONE_DAY = datetime.timedelta(days=1)


def end_of_month(d: datetime.date) -> datetime.date:
"""Return the end of the month containing the day 'd'"""
def next_month(d: datetime.date) -> datetime.date:
"""Return the start of the next month after the day 'd'"""
d = d.replace(day=28)
while True:
d0 = d
d = d + ONE_DAY
if d.month != d0.month:
return d0
return d


class TestLeapSecond(unittest.TestCase):
"""Leap second tests"""

maxDiff = 9999

def test_leap(self) -> None:
"""Tests that the expected leap seconds all occur."""
d = iersdata.DUT1_DATA_START
e = datetime.date(2022, 1, 1)
ls = leapseconddata.LeapSecondData.from_standard_source()
assert ls.valid_until is not None

d = iersdata.start
e = min(iersdata.end, ls.valid_until)
bench = [ts.start for ts in ls.leap_seconds[1:]]
bench = [ts for ts in bench if d <= ts < e]
leap = []
while d < e:
eom = end_of_month(d)
nm = eom + ONE_DAY
if wwvb.isls(d):
month_ends_dut1 = wwvb.get_dut1(eom)
month_starts_dut1 = wwvb.get_dut1(nm)
nm = next_month(d)
eom = nm - ONE_DAY
month_ends_dut1 = wwvb.get_dut1(eom)
month_starts_dut1 = wwvb.get_dut1(nm)
our_is_ls = month_ends_dut1 * month_starts_dut1 < 0
if wwvb.isls(eom):
assert our_is_ls
self.assertLess(month_ends_dut1, 0)
self.assertGreater(month_starts_dut1, 0)
leap.append(d.strftime("%b %Y"))
d = nm
self.assertEqual(
leap,
[
"Jun 1972",
"Dec 1973",
"Dec 1974",
"Dec 1975",
"Dec 1976",
"Dec 1977",
"Dec 1978",
"Dec 1979",
"Jun 1981",
"Jun 1982",
"Jun 1983",
"Jun 1985",
"Dec 1987",
"Dec 1989",
"Dec 1990",
"Jun 1992",
"Jun 1993",
"Jun 1994",
"Dec 1995",
"Jun 1997",
"Dec 1998",
"Dec 2005",
"Dec 2008",
"Jun 2012",
"Jun 2015",
"Dec 2016",
],
)
leap.append(nm)
else:
assert not our_is_ls
d = datetime.datetime.combine(nm, datetime.time()).replace(
tzinfo=datetime.timezone.utc
)
self.assertEqual(leap, bench)


if __name__ == "__main__": # pragma: no cover
Expand Down
63 changes: 44 additions & 19 deletions src/wwvb/updateiers.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,34 +33,59 @@
except (ImportError, NameError) as e:
pass
IERS_URL = "https://datacenter.iers.org/data/csv/finals2000A.all.csv"
if os.path.exists("finals2000A.all.csv"):
IERS_URL = "finals2000A.all.csv"
print("using local", IERS_URL)
NIST_URL = "https://www.nist.gov/pml/time-and-frequency-division/atomic-standards/leap-second-and-ut1-utc-information"


def _get_text(url: str) -> str:
"""Get a local file or a http/https URL"""
if url.startswith("http"):
with requests.get(url) as response:
return response.text
else:
return open(url, encoding="utf-8").read()


def update_iersdata( # pylint: disable=too-many-locals, too-many-branches, too-many-statements
target_file: str,
) -> None:
"""Update iersdata.py"""

offsets: List[int] = []
with requests.get(IERS_URL) as iers_data:
for r in csv.DictReader(io.StringIO(iers_data.text), delimiter=";"):
jd = float(r["MJD"])
offs_str = r["UT1-UTC"]
if not offs_str:
break
offs = int(round(float(offs_str) * 10))
if not offsets:
table_start = datetime.date(1858, 11, 17) + datetime.timedelta(jd)
if table_start > datetime.date(1972, 6, 1):
when = datetime.date(1972, 6, 1)
while when < datetime.date(1972, 7, 1):
offsets.append(-2)
when = when + datetime.timedelta(days=1)
while when < table_start:
offsets.append(8)
when = when + datetime.timedelta(days=1)
table_start = datetime.date(1972, 6, 1)
offsets.append(offs)
iersdata_text = _get_text(IERS_URL)
for r in csv.DictReader(io.StringIO(iersdata_text), delimiter=";"):
jd = float(r["MJD"])
offs_str = r["UT1-UTC"]
if not offs_str:
break
offs = int(round(float(offs_str) * 10))
if not offsets:
table_start = datetime.date(1858, 11, 17) + datetime.timedelta(jd)

when = min(datetime.date(1972, 1, 1), table_start)
# iers bulletin A doesn't cover 1972, so fake data for those
# leap seconds
while when < datetime.date(1972, 7, 1):
offsets.append(-2)
when = when + datetime.timedelta(days=1)
while when < datetime.date(1972, 11, 1):
offsets.append(8)
when = when + datetime.timedelta(days=1)
while when < datetime.date(1972, 12, 1):
offsets.append(0)
when = when + datetime.timedelta(days=1)
while when < datetime.date(1973, 1, 1):
offsets.append(-2)
when = when + datetime.timedelta(days=1)
while when < table_start:
offsets.append(8)
when = when + datetime.timedelta(days=1)

table_start = min(datetime.date(1972, 1, 1), table_start)

offsets.append(offs)

wwvb_text = requests.get(NIST_URL).text
wwvb_data = bs4.BeautifulSoup(wwvb_text, features="html.parser")
Expand Down
Loading

0 comments on commit ed907d5

Please sign in to comment.