Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@

/src/dabmsc.egg-info
/build
18 changes: 9 additions & 9 deletions bin/decode
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ parser.add_argument('-f', dest='output', help='outfile file directory')

args = parser.parse_args()
if args.filename:
print 'decoding from', args.filename
print(('decoding from', args.filename))
f = open(args.filename, 'rb')
else:
f = sys.stdin
Expand Down Expand Up @@ -54,17 +54,17 @@ logger.debug("decoding function: %s", f);

for o in f:
if isinstance(o, Packet):
print 'packet:', o
print('packet:', o)
elif isinstance(o, Datagroup):
print 'dataroup:', o
print('dataroup:', o)
elif isinstance(o, MotObject):
print "=" * 48
print '{name} {type} ({size} bytes)'.format(name=o.get_name(), type=o.get_type(), size=len(o.get_body()))
print "=" * 48
print 'parameters:'
print("=" * 48)
print('{name} {type} ({size} bytes)'.format(name=o.get_name(), type=o.get_type(), size=len(o.get_body())))
print("=" * 48)
print('parameters:')
for p in o.get_parameters():
print '\t', repr(p)
print
print('\t', repr(p))
print()
if args.output:
import base64
file_output = open(os.path.join(args.output, base64.urlsafe_b64encode(o.get_name())), 'wb')
Expand Down
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python

from distutils.core import setup
from setuptools import setup

setup(name='dabmsc',
version='1.0.1',
Expand All @@ -11,5 +11,6 @@
download_url='https://github.com/GlobalRadio/python-dabmsc/tarball/1.0.1',
packages=['msc', 'msc.datagroups', 'msc.packets'],
package_dir = {'' : 'src'},
keywords = ['dab', 'msc', 'radio']
keywords = ['dab', 'msc', 'radio'],
install_requires = ['bitarray']
)
8 changes: 3 additions & 5 deletions src/msc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@

logger = logging.getLogger('msc')

# See Annex E in EN 300 401
crc16_11021 = crcmod.mkCrcFun(0x11021, 0x0, False, 0xFFFF)
crcfun = crcmod.predefined.mkPredefinedCrcFun('x25')
crcfun = crcmod.mkCrcFun(0x11021, 0x0, False, 0xFFFF)
def calculate_crc(data):
return crcfun(data)

Expand All @@ -17,7 +15,7 @@ def hex_to_bitarray(hex):
return b

def int_to_bitarray(i, n):
return bitarray(('{0:0%db}' % n).format(i))
return bitarray(('{0:0%db}' % n).format(int(i)))

def bitarray_to_int(bits):
return int(bits.to01(), 2)
Expand Down Expand Up @@ -63,7 +61,7 @@ def __init__(self):

def next(self, name=None):
# first check the cache
if name is not None and self.cache.has_key(name):
if name is not None and name in self.cache:
return self.cache.get(name)

# if we've run out then start recycling from the head
Expand Down
142 changes: 64 additions & 78 deletions src/msc/datagroups/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from msc import bitarray_to_hex, int_to_bitarray, crc16_11021, InvalidCrcError, generate_transport_id
from msc import bitarray_to_hex, int_to_bitarray, calculate_crc, InvalidCrcError, generate_transport_id
from mot import DirectoryEncoder, SortedHeaderInformation
from bitarray import bitarray
import logging
Expand Down Expand Up @@ -94,7 +94,7 @@ def _segment(data, strategy):
bits += int_to_bitarray(0, 3) # (0-2): Repetition Count remaining (0 = only broadcast)
bits += int_to_bitarray(len(segment_data), 13) # (3-16): SegmentSize

segments.append(bits.tobytes() + segment_data)
segments.append((bits.tobytes()) + segment_data)

i += segment_size

Expand Down Expand Up @@ -127,7 +127,7 @@ def encode_headermode(objects, segmenting_strategy=None):
# insert the core parameters into the header
bits = bitarray()
bits += int_to_bitarray(len(body_data) if body_data else 0, 28) # (0-27): BodySize in bytes
bits += int_to_bitarray(extension_bits.length() / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension)
bits += int_to_bitarray(len(extension_bits) / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension)
bits += int_to_bitarray(object.get_type().type, 6) # (41-46): ContentType
bits += int_to_bitarray(object.get_type().subtype, 9) # (47-55): ContentSubType
bits += extension_bits # (56-n): Header extension data
Expand Down Expand Up @@ -168,7 +168,7 @@ def encode_directorymode(objects, directory_parameters=None, segmenting_strategy

# add the core parameters into the header
entries += int_to_bitarray(len(object.get_body()), 28) # (0-27): BodySize in bytes
entries += int_to_bitarray(extension_bits.length() / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension)
entries += int_to_bitarray(len(extension_bits) / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension)
entries += int_to_bitarray(object.get_type().type, 6) # (41-46): ContentType
entries += int_to_bitarray(object.get_type().subtype, 9) # (47-55): ContentSubType
entries += extension_bits # (56-n): Header extension data
Expand All @@ -183,7 +183,7 @@ def encode_directorymode(objects, directory_parameters=None, segmenting_strategy
bits = bitarray()
bits += bitarray('0') # (0): CompressionFlag: This bit shall be set to 0
bits += bitarray('0') # (1): RFU
bits += int_to_bitarray(len(entries.tobytes()), 30) # (2-31): DirectorySize: total size of the MOT directory in bytes
bits += int_to_bitarray(len(entries.tobytes()) + 13 + len(directory_params.tobytes()), 30) # (2-31): DirectorySize: total size of the MOT directory in bytes, including the 13 header bytes and length of the directory parameter bytes
bits += int_to_bitarray(len(objects), 16) # (32-47): NumberOfObjects: Total number of objects described by the directory
bits += int_to_bitarray(0, 24) # (48-71): DataCarouselPeriod: Max time in tenths of seconds for the data carousel to complete a cycle. Value of zero for undefined
bits += bitarray('000') # (72-74): RFU
Expand All @@ -197,21 +197,40 @@ def encode_directorymode(objects, directory_parameters=None, segmenting_strategy
bits += entries

# segment and add directory datagroups with a new transport ID
continuity_directory = 0
directory_transport_id = generate_transport_id()
segments = _segment(bits.tobytes(), segmenting_strategy)
for i, segment in enumerate(segments):
header_group = Datagroup(directory_transport_id, DIRECTORY_UNCOMPRESSED, segment, i, i%16, last=True if i == len(segments) - 1 else False)
header_group = Datagroup(directory_transport_id, DIRECTORY_UNCOMPRESSED, segment, i, continuity_directory, last=True if i == len(segments) - 1 else False)
tmp = bitarray()
tmp.frombytes(header_group.tobytes())
tmp.frombytes(header_group.tobytes())
datagroups.append(header_group)
continuity_directory = (continuity_directory + 1) % 16

# add body datagroups
continuity_body = 0
for object in objects:
segments = _segment(object.get_body(), segmenting_strategy)
for i, segment in enumerate(segments):
body_group = Datagroup(object.get_transport_id(), BODY, segment, i, i%16, last=True if i == len(segments) - 1 else False)
body_group = Datagroup(object.get_transport_id(), BODY, segment, i, continuity_body, last=True if i == len(segments) - 1 else False)
datagroups.append(body_group)
continuity_body = (continuity_body + 1) % 16
# add empty body datagroups to assure continuity
if continuity_body != 0:
# segment header
bits = bitarray()
bits += int_to_bitarray(0, 3) # (0-2): Repetition Count remaining (0 = only broadcast)
bits += int_to_bitarray(0, 13) # (3-16): SegmentSize
dummysegment = bits.tobytes()
body_group = Datagroup(generate_transport_id(), BODY, dummysegment, 0, continuity_body, last=True)
datagroups.append(body_group)
continuity_body = (continuity_body + 1) % 16
if continuity_body != 0:
continuity_body = 15
body_group = Datagroup(generate_transport_id(), BODY, dummysegment, 0, continuity_body, last=True)
datagroups.append(body_group)

return datagroups

import select
Expand All @@ -233,7 +252,7 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True):

if isinstance(data, bitarray):
i = 0
while i < data.length():
while i < len(data):
datagroup = Datagroup.frombits(data, i=i, check_crc=check_crc)
yield datagroup
i += (datagroup.size * 8)
Expand All @@ -248,27 +267,27 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True):
except:
reading = False
logger.exception("error")
if not buf.length():
if not len(buf):
logger.debug('buffer is at zero length')
return
i = 0
#logger.debug('chunking buffer of length %d bytes', buf.length()/8)
length = buf.length()/8
#logger.debug('chunking buffer of length %d bytes', len(buf)/8)
length = len(buf)/8
if length < 9:
continue
size = int(buf[59:72].to01(), 2)
if length < size:
#logger.debug('buffer still not at right size for datagroup size of %d bytes', size)
continue
while i < buf.length():
while i < len(buf):
try:
datagroup = Datagroup.frombits(buf, i=i, check_crc=check_crc)
yield datagroup
i = (datagroup.size * 8)
buf = buf[i:]
except IncompleteDatagroupError:
break
except InvalidCrcError, ice:
except (InvalidCrcError, ice):
if error_callback: error_callback(ice)
buf = buf[8:] # attempt to resync?
#i += 8
Expand All @@ -287,14 +306,14 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True):
buf.frombytes(p.data)

if p.last:
logger.debug('got packet %s - buffer now %d bytes', p, buf.length()/8)
logger.debug('got packet %s - buffer now %d bytes', p, len(buf)/8)
try:
datagroup = Datagroup.frombits(buf, i=i, check_crc=check_crc)
logger.debug('yielding datagroup: %s', datagroup)
yield datagroup
except IncompleteDatagroupError, ide:
except (IncompleteDatagroupError, ide):
if error_callback: error_callback(ide)
except InvalidCrcError, ice:
except (InvalidCrcError, ice):
if error_callback: error_callback(ice)
del buf
buf = bitarray()
Expand Down Expand Up @@ -341,24 +360,22 @@ def tobytes(self):
# datagroup header
bits += bitarray('0') # (0): ExtensionFlag - 0=no extension
bits += bitarray('1' if self.crc_enabled else '0') # (1): CrcFlag - true if there is a CRC at the end of the datagroup
bits += bitarray('0' if self.segment_index is None else '1') # (2): SegmentFlag - 1=segment header included
bits += bitarray('0' if self.transport_id is None else '1') # (3): UserAccessFlag - true
bits += bitarray('1') # (2): SegmentFlag - 1=segment header included
bits += bitarray('1') # (3): UserAccessFlag - true
bits += int_to_bitarray(self._type, 4) # (4-7): DataGroupType
bits += int_to_bitarray(self.continuity % 16, 4) # (8-11): ContinuityIndex
bits += int_to_bitarray(self.repetition, 4) # (12-15): RepetitionIndex - remaining = 0 (only this once)

# session header
# segment field
if self.segment_index is not None:
bits += bitarray('1' if self.last else '0') # (16): Last - true if the last segment
bits += int_to_bitarray(self.segment_index, 15) # (17-32): SegmentNumber
bits += bitarray('1' if self.last else '0') # (16): Last - true if the last segment
bits += int_to_bitarray(self.segment_index, 15) # (17-32): SegmentNumber

# user access field
if self.transport_id is not None:
bits += bitarray('000') # (33-35): RFA
bits += bitarray('1') # (36): TransportId - true to include Transport ID
bits += int_to_bitarray(2, 4) # (37-40): LengthIndicator - length of transport Id and End user address fields (will be 2 bytes as only transport ID defined)
bits += int_to_bitarray(self._transport_id, 16) # (41-56) transport ID
bits += bitarray('000') # (33-35): RFA
bits += bitarray('1') # (36): TransportId - true to include Transport ID
bits += int_to_bitarray(2, 4) # (37-40): LengthIndicator - length of transport Id and End user address fields (will be 2 bytes as only transport ID defined)
bits += int_to_bitarray(self._transport_id, 16) # (41-56) transport ID

# data field
tmp = bitarray()
Expand All @@ -367,72 +384,41 @@ def tobytes(self):

# CRC
crc = 0;
if self.crc_enabled: crc = crc16_11021(bits.tobytes())
if self.crc_enabled: crc = calculate_crc(bits.tobytes())
bits += int_to_bitarray(crc, 16)

return bits.tobytes()

@staticmethod
def frombits(bits, i=0, check_crc=True):
"""Parse a datagroup from a bitarray, with an optional offset"""

# use only the slice indicated by the offset
bits = bits[i:]

# check we have enough header first
ext_flag = bits[0]
crc_flag = bits[1]
seg_flag = bits[2]
uaf_flag = bits[3]
header_size = 16
if ext_flag:
header_size += 16
if seg_flag:
header_size += 16
if uaf_flag:
tid_present = bits[header_size+3]
uaf_sz = int(bits[header_size+4:header_size+8].to01(), 2)
header_size += 8+8*uaf_sz
else:
tid_present = False

min_size = header_size
if crc_flag:
min_size += 16
if bits.length() < min_size:
raise IncompleteDatagroupError

if (len(bits) - i) < ((9 + 2) * 8): raise IncompleteDatagroupError

# datagroup header
type = int(bits[4:8].to01(), 2)
continuity = int(bits[8:12].to01(), 2)
repetition = int(bits[12:16].to01(), 2)

# session header
# segment field
if seg_flag:
last = bits[16]
segment_index = int(bits[17:32].to01(), 2)
else:
last = False
segment_index = None

last = bits[16]
segment_index = int(bits[17:32].to01(), 2)

# user access field
if tid_present:
transport_id = int(bits[40:56].to01(), 2)
else:
transport_id = None

# extract data and compute CRC
if crc_flag:
hdr_plus_data = bits[:-16]
crc = int(bits[bits.length()-16:].to01(), 2)
if check_crc and crc != crc16_11021(hdr_plus_data.tobytes()):
raise InvalidCrcError(crc, crc_slice.tobytes())
else:
hdr_plus_data = bits
transport_id = int(bits[40:56].to01(), 2)

# data segment header
size = int(bits[59:72].to01(), 2) # get size to check we have a complete datagroup
if len(bits) < 72 + size * 8 + 16: raise IncompleteDatagroupError
data = bits[72 : 72 + (size*8)]
if check_crc:
crc = int(bits[72 + len(data) : 72 + len(data) + 16].to01(), 2)
calculated = calculate_crc(bits[:72+len(data)].tobytes())
if crc != calculated: raise InvalidCrcError(crc, bits[:72+len(data) + 16].tobytes())

datagroup = Datagroup(transport_id, type,
hdr_plus_data[header_size:].tobytes(), segment_index, continuity,
True, repetition, last)
datagroup = Datagroup(transport_id, type, data.tobytes(), segment_index, continuity, True, repetition, last)
logger.debug('parsed datagroup: %s', datagroup)

return datagroup
Expand All @@ -443,7 +429,7 @@ def __str__(self):
elif self._type == 6: type_description = 'MOT Directory (uncompressed)'
elif self._type == 7: type_description = 'MOT Directory (compressed)'
else: type_description = 'unknown'
return '[segment=%d bytes], type=%d [%s], transportid=%s, segmentindex=%s, continuity=%d, last=%s' % (len(self._data), self._type, type_description, self._transport_id, self.segment_index, self.continuity, self.last)
return '[segment=%d bytes], type=%d [%s], transportid=%d, segmentindex=%d, continuity=%d, last=%s' % (len(self._data), self._type, type_description, self._transport_id, self.segment_index, self.continuity, self.last)

def __repr__(self):
return '<DataGroup: %s>' % str(self)
Expand Down Expand Up @@ -489,5 +475,5 @@ def regenerate(self):
def __iter__(self):
return self.iterator

def next(self):
return self.iterator.next()
def __next__(self):
return next(self.iterator)
Loading