mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-05 02:43:37 +00:00
980e05cc99
Backported 400 revisions from rev 1de4961-8897c5b (2018-2014). Move core/benchmark, core/cmd_line, core/memory, core/profiler and core/timeout to core/optional/* Remove metadata/qt* PORT: Version 2.0a3 (inline with 3.0a3 @ f80c7d5). Basic Support for XMP Packets. tga: improvements to adhere more closely to the spec. pdf: slightly improved parsing. rar: fix TypeError on unknown block types. Add MacRoman win32 codepage. tiff/exif: support SubIFDs and tiled images. Add method to export metadata in dictionary. mpeg_video: don't attempt to parse Stream past length. mpeg_video: parse ESCR correctly, add SCR value. Change centralise CustomFragments. field: don't set parser class if class is None, to enable autodetect. field: add value/display for CustomFragment. parser: inline warning to enable tracebacks in debug mode. Fix empty bytestrings in makePrintable. Fix contentSize in jpeg.py to account for image_data blocks. Fix the ELF parser. Enhance the AR archive parser. elf parser: fix wrong wrong fields order in parsing little endian section flags. elf parser: add s390 as a machine type. Flesh out mp4 parser. PORT: Version 2.0a1 (inline with 3.0a1). Major refactoring and PEP8. Fix ResourceWarning warnings on files. Add a close() method and support for the context manager protocol ("with obj: ...") to parsers, input and output streams. metadata: get comment from ZIP. Support for InputIOStream.read(0). Fix sizeGe when size is None. Remove unused new_seekable_field_set file. Remove parser Mapsforge .map. Remove parser Parallel Realities Starfighter .pak files. sevenzip: fix for newer archives. java: update access flags and modifiers for Java 1.7 and update description text for most recent Java. Support ustar prefix field in tar archives. Remove file_system* parsers. Remove misc parsers 3d0, 3ds, gnome_keyring, msoffice*, mstask, ole*, word*. Remove program parsers macho, nds, prc. Support non-8bit Character subclasses. Python parser supports Python 3.7. Enhance mpeg_ts parser to support MTS/M2TS. Support for creation date in tiff. Change don't hardcode errno constant. PORT: 1.9.1 Internal Only: The following are legacy reference to upstream commit messages. Relevant changes up to b0a115f8. Use integer division. Replace HACHOIR_ERRORS with Exception. Fix metadata.Data: make it sortable. Import fixes from e7de492. PORT: Version 2.0a1 (inline with 3.0a1 @ e9f8fad). Replace hachoir.core.field with hachoir.field Replace hachoir.core.stream with hachoir.stream Remove the compatibility module for PY1.5 to PY2.5. metadata: support TIFF picture. metadata: fix string normalization. metadata: fix datetime regex Fix hachoir bug #57. FileFromInputStream: fix comparison between None and an int. InputIOStream: open the file in binary mode.
300 lines
10 KiB
Python
300 lines
10 KiB
Python
"""
|
|
Abstract Syntax Notation One (ASN.1) parser.
|
|
|
|
Technical informations:
|
|
* PER standard
|
|
http://www.tu.int/ITU-T/studygroups/com17/languages/X.691-0207.pdf
|
|
* Python library
|
|
http://pyasn1.sourceforge.net/
|
|
* Specification of Abstract Syntax Notation One (ASN.1)
|
|
ISO/IEC 8824:1990 Information Technology
|
|
* Specification of Basic Encoding Rules (BER) for ASN.1
|
|
ISO/IEC 8825:1990 Information Technology
|
|
* OpenSSL asn1parser, use command:
|
|
openssl asn1parse -i -inform DER -in file.der
|
|
* ITU-U recommendations:
|
|
http://www.itu.int/rec/T-REC-X/en
|
|
(X.680, X.681, X.682, X.683, X.690, X.691, X.692, X.693, X.694)
|
|
* dumpasn1
|
|
http://www.cs.auckland.ac.nz/~pgut001/dumpasn1.c
|
|
|
|
General information:
|
|
* Wikipedia (english) article
|
|
http://en.wikipedia.org/wiki/Abstract_Syntax_Notation_One
|
|
* ASN.1 information site
|
|
http://asn1.elibel.tm.fr/en/
|
|
* ASN.1 consortium
|
|
http://www.asn1.org/
|
|
|
|
Encodings:
|
|
* Basic Encoding Rules (BER)
|
|
* Canonical Encoding Rules (CER) -- DER derivative that is not widely used
|
|
* Distinguished Encoding Rules (DER) -- used for encrypted applications
|
|
* XML Encoding Rules (XER)
|
|
* Packed Encoding Rules (PER) -- result in the fewest number of bytes
|
|
* Generic String Encoding Rules (GSER)
|
|
=> Are encodings compatibles? Which encodings are supported??
|
|
|
|
Author: Victor Stinner
|
|
Creation date: 24 september 2006
|
|
"""
|
|
|
|
from hachoir.parser import Parser
|
|
from hachoir.field import (FieldSet,
|
|
FieldError, ParserError,
|
|
Bit, Bits, Bytes, UInt8, GenericInteger, String,
|
|
Field, Enum, RawBytes)
|
|
from hachoir.core.endian import BIG_ENDIAN
|
|
from hachoir.core.tools import createDict, humanDatetime
|
|
from hachoir.stream import InputStreamError
|
|
from hachoir.core.text_handler import textHandler
|
|
|
|
|
|
# --- Field parser ---
|
|
|
|
class ASNInteger(Field):
|
|
"""
|
|
Integer: two cases:
|
|
- first byte in 0..127: it's the value
|
|
- first byte in 128..255: byte & 127 is the number of bytes,
|
|
next bytes are the value
|
|
"""
|
|
|
|
def __init__(self, parent, name, description=None):
|
|
Field.__init__(self, parent, name, 8, description)
|
|
stream = self._parent.stream
|
|
addr = self.absolute_address
|
|
value = stream.readBits(addr, 8, BIG_ENDIAN)
|
|
if 128 <= value:
|
|
nbits = (value & 127) * 8
|
|
if not nbits:
|
|
raise ParserError("ASN.1: invalid ASN integer size (zero)")
|
|
if 64 < nbits:
|
|
# Arbitrary limit to catch errors
|
|
raise ParserError("ASN.1: ASN integer is limited to 64 bits")
|
|
self._size = 8 + nbits
|
|
value = stream.readBits(addr + 8, nbits, BIG_ENDIAN)
|
|
self.createValue = lambda: value
|
|
|
|
|
|
class OID_Integer(Bits):
|
|
def __init__(self, parent, name, description=None):
|
|
Bits.__init__(self, parent, name, 8, description)
|
|
stream = self._parent.stream
|
|
addr = self.absolute_address
|
|
size = 8
|
|
value = 0
|
|
byte = stream.readBits(addr, 8, BIG_ENDIAN)
|
|
value = byte & 127
|
|
while 128 <= byte:
|
|
addr += 8
|
|
size += 8
|
|
if 64 < size:
|
|
# Arbitrary limit to catch errors
|
|
raise ParserError("ASN.1: Object identifier is limited 64 bits")
|
|
byte = stream.readBits(addr, 8, BIG_ENDIAN)
|
|
value = (value << 7) + (byte & 127)
|
|
self._size = size
|
|
self.createValue = lambda: value
|
|
|
|
|
|
def readSequence(self, content_size):
|
|
while self.current_size < self.size:
|
|
yield Object(self, "item[]")
|
|
|
|
|
|
def readSet(self, content_size):
|
|
yield Object(self, "value", size=content_size * 8)
|
|
|
|
|
|
def readASCIIString(self, content_size):
|
|
yield String(self, "value", content_size, charset="ASCII")
|
|
|
|
|
|
def readUTF8String(self, content_size):
|
|
yield String(self, "value", content_size, charset="UTF-8")
|
|
|
|
|
|
def readBMPString(self, content_size):
|
|
yield String(self, "value", content_size, charset="UTF-16")
|
|
|
|
|
|
def readBitString(self, content_size):
|
|
yield UInt8(self, "padding_size", description="Number of unused bits")
|
|
if content_size > 1:
|
|
yield Bytes(self, "value", content_size - 1)
|
|
|
|
|
|
def readOctetString(self, content_size):
|
|
yield Bytes(self, "value", content_size)
|
|
|
|
|
|
def formatObjectID(fieldset):
|
|
text = [fieldset["first"].display]
|
|
items = [field for field in fieldset if field.name.startswith("item[")]
|
|
text.extend(str(field.value) for field in items)
|
|
return ".".join(text)
|
|
|
|
|
|
def readObjectID(self, content_size):
|
|
yield textHandler(UInt8(self, "first"), formatFirstObjectID)
|
|
while self.current_size < self.size:
|
|
yield OID_Integer(self, "item[]")
|
|
|
|
|
|
def readBoolean(self, content_size):
|
|
if content_size != 1:
|
|
raise ParserError("Overlong boolean: got %s bytes, expected 1 byte" % content_size)
|
|
yield textHandler(UInt8(self, "value"), lambda field: str(bool(field.value)))
|
|
|
|
|
|
def readInteger(self, content_size):
|
|
# Always signed?
|
|
yield GenericInteger(self, "value", True, content_size * 8)
|
|
|
|
|
|
# --- Format ---
|
|
|
|
def formatFirstObjectID(field):
|
|
value = field.value
|
|
return "%u.%u" % (value // 40, value % 40)
|
|
|
|
|
|
def formatValue(fieldset):
|
|
return fieldset["value"].display
|
|
|
|
|
|
def formatUTCTime(fieldset):
|
|
import datetime
|
|
value = fieldset["value"].value
|
|
year = int(value[0:2])
|
|
if year < 50:
|
|
year += 2000
|
|
else:
|
|
year += 1900
|
|
month = int(value[2:4])
|
|
day = int(value[4:6])
|
|
hour = int(value[6:8])
|
|
minute = int(value[8:10])
|
|
if value[-1] == "Z":
|
|
second = int(value[10:12])
|
|
dt = datetime.datetime(year, month, day, hour, minute, second)
|
|
else:
|
|
# Skip timezone...
|
|
dt = datetime.datetime(year, month, day, hour, minute)
|
|
return humanDatetime(dt)
|
|
|
|
|
|
# --- Object parser ---
|
|
|
|
class Object(FieldSet):
|
|
TYPE_INFO = {
|
|
0: ("end[]", None, "End (reserved for BER, None)", None), # TODO: Write parser
|
|
1: ("boolean[]", readBoolean, "Boolean", None),
|
|
2: ("integer[]", readInteger, "Integer", None),
|
|
3: ("bit_str[]", readBitString, "Bit string", None),
|
|
4: ("octet_str[]", readOctetString, "Octet string", None),
|
|
5: ("null[]", None, "NULL (empty, None)", None),
|
|
6: ("obj_id[]", readObjectID, "Object identifier", formatObjectID),
|
|
7: ("obj_desc[]", None, "Object descriptor", None), # TODO: Write parser
|
|
8: ("external[]", None, "External, instance of", None), # TODO: Write parser # External?
|
|
9: ("real[]", readASCIIString, "Real number", None), # TODO: Write parser
|
|
10: ("enum[]", readInteger, "Enumerated", None),
|
|
11: ("embedded[]", None, "Embedded PDV", None), # TODO: Write parser
|
|
12: ("utf8_str[]", readUTF8String, "Printable string", None),
|
|
13: ("rel_obj_id[]", None, "Relative object identifier", None), # TODO: Write parser
|
|
14: ("time[]", None, "Time", None), # TODO: Write parser
|
|
# 15: invalid??? sequence of???
|
|
16: ("seq[]", readSequence, "Sequence", None),
|
|
17: ("set[]", readSet, "Set", None),
|
|
18: ("num_str[]", readASCIIString, "Numeric string", None),
|
|
19: ("print_str[]", readASCIIString, "Printable string", formatValue),
|
|
20: ("teletex_str[]", readASCIIString, "Teletex (T61, None) string", None),
|
|
21: ("videotex_str[]", readASCIIString, "Videotex string", None),
|
|
22: ("ia5_str[]", readASCIIString, "IA5 string", formatValue),
|
|
23: ("utc_time[]", readASCIIString, "UTC time", formatUTCTime),
|
|
24: ("general_time[]", readASCIIString, "Generalized time", None),
|
|
25: ("graphic_str[]", readASCIIString, "Graphic string", None),
|
|
26: ("visible_str[]", readASCIIString, "Visible (ISO64, None) string", None),
|
|
27: ("general_str[]", readASCIIString, "General string", None),
|
|
28: ("universal_str[]", readASCIIString, "Universal string", None),
|
|
29: ("unrestricted_str[]", readASCIIString, "Unrestricted string", None),
|
|
30: ("bmp_str[]", readBMPString, "BMP string", None),
|
|
# 31: multiple octet tag number, TODO: not supported
|
|
|
|
# Extended tag values:
|
|
# 31: Date
|
|
# 32: Time of day
|
|
# 33: Date-time
|
|
# 34: Duration
|
|
}
|
|
TYPE_DESC = createDict(TYPE_INFO, 2)
|
|
|
|
CLASS_DESC = {0: "universal", 1: "application", 2: "context", 3: "private"}
|
|
FORM_DESC = {False: "primitive", True: "constructed"}
|
|
|
|
def __init__(self, *args, **kw):
|
|
FieldSet.__init__(self, *args, **kw)
|
|
key = self["type"].value & 31
|
|
if self['class'].value == 0:
|
|
# universal object
|
|
if key in self.TYPE_INFO:
|
|
self._name, self._handler, self._description, create_desc = self.TYPE_INFO[key]
|
|
if create_desc:
|
|
self.createDescription = lambda: "%s: %s" % (self.TYPE_INFO[key][2], create_desc(self))
|
|
self._description = None
|
|
elif key == 31:
|
|
raise ParserError("ASN.1 Object: tag bigger than 30 are not supported")
|
|
else:
|
|
self._handler = None
|
|
elif self['form'].value:
|
|
# constructed: treat as sequence
|
|
self._name = 'seq[]'
|
|
self._handler = readSequence
|
|
self._description = 'constructed object type %i' % key
|
|
else:
|
|
# primitive, context/private
|
|
self._name = 'raw[]'
|
|
self._handler = readASCIIString
|
|
self._description = '%s object type %i' % (self['class'].display, key)
|
|
field = self["size"]
|
|
self._size = field.address + field.size + field.value * 8
|
|
|
|
def createFields(self):
|
|
yield Enum(Bits(self, "class", 2), self.CLASS_DESC)
|
|
yield Enum(Bit(self, "form"), self.FORM_DESC)
|
|
if self['class'].value == 0:
|
|
yield Enum(Bits(self, "type", 5), self.TYPE_DESC)
|
|
else:
|
|
yield Bits(self, "type", 5)
|
|
yield ASNInteger(self, "size", "Size in bytes")
|
|
size = self["size"].value
|
|
if size:
|
|
if self._handler:
|
|
for field in self._handler(self, size):
|
|
yield field
|
|
else:
|
|
yield RawBytes(self, "raw", size)
|
|
|
|
|
|
class ASN1File(Parser):
|
|
PARSER_TAGS = {
|
|
"id": "asn1",
|
|
"category": "container",
|
|
"file_ext": ("der",),
|
|
"min_size": 16,
|
|
"description": "Abstract Syntax Notation One (ASN.1)"
|
|
}
|
|
endian = BIG_ENDIAN
|
|
|
|
def validate(self):
|
|
try:
|
|
root = self[0]
|
|
except (InputStreamError, FieldError):
|
|
return "Unable to create root object"
|
|
if root.size != self.size:
|
|
return "Invalid root object size"
|
|
return True
|
|
|
|
def createFields(self):
|
|
yield Object(self, "root")
|