Update hachoir 3.1.2 (f739b43) → 3.2.0 (38d759f).

This commit is contained in:
JackDandy 2023-10-08 00:04:41 +01:00
parent ecd70f546f
commit cbde47c95c
124 changed files with 1639 additions and 482 deletions

View file

@ -2,6 +2,7 @@
* Update Beautiful Soup 4.12.2 to 4.12.2 (30c58a1) * Update Beautiful Soup 4.12.2 to 4.12.2 (30c58a1)
* Update soupsieve 2.4.1 (2e66beb) to 2.5.0 (dc71495) * Update soupsieve 2.4.1 (2e66beb) to 2.5.0 (dc71495)
* Update hachoir 3.1.2 (f739b43) to 3.2.0 (38d759f)
### 3.30.1 (2023-10-02 22:50:00 UTC) ### 3.30.1 (2023-10-02 22:50:00 UTC)

View file

@ -1,2 +1,2 @@
VERSION = (3, 1, 2) VERSION = (3, 2, 0)
__version__ = ".".join(map(str, VERSION)) __version__ = ".".join(map(str, VERSION))

View file

@ -4,7 +4,7 @@ string, number, hexadecimal, etc.
""" """
from hachoir.core.endian import BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN from hachoir.core.endian import BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN
from struct import calcsize, unpack, error as struct_error from struct import calcsize, error as struct_error
def swap16(value): def swap16(value):
@ -292,20 +292,11 @@ def str2long(data, endian):
>>> str2long(b"\x0b\x0a\x0d\x0c", MIDDLE_ENDIAN) == 0x0a0b0c0d >>> str2long(b"\x0b\x0a\x0d\x0c", MIDDLE_ENDIAN) == 0x0a0b0c0d
True True
""" """
assert 1 <= len(data) <= 32 # arbitrary limit: 256 bits if endian == LITTLE_ENDIAN:
try: return int.from_bytes(data, "little")
return unpack(_struct_format[endian][len(data)], data)[0] elif endian == BIG_ENDIAN:
except KeyError: return int.from_bytes(data, "big")
pass elif endian == MIDDLE_ENDIAN:
return int.from_bytes(strswapmid(data), "big")
assert endian in (BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN) else:
shift = 0 raise ValueError("Invalid endian %s" % (endian,))
value = 0
if endian is BIG_ENDIAN:
data = reversed(data)
elif endian is MIDDLE_ENDIAN:
data = reversed(strswapmid(data))
for byte in data:
value += (byte << shift)
shift += 8
return value

View file

@ -168,7 +168,7 @@ class Dict(object):
_index = index _index = index
if index < 0: if index < 0:
index += len(self._value_list) index += len(self._value_list)
if not(0 <= index <= len(self._value_list)): if not (0 <= index <= len(self._value_list)):
raise IndexError("Insert error: index '%s' is invalid" % _index) raise IndexError("Insert error: index '%s' is invalid" % _index)
for item_key, item_index in self._index.items(): for item_key, item_index in self._index.items():
if item_index >= index: if item_index >= index:

View file

@ -493,7 +493,7 @@ def timestampUNIX(value):
""" """
if not isinstance(value, (float, int)): if not isinstance(value, (float, int)):
raise TypeError("timestampUNIX(): an integer or float is required") raise TypeError("timestampUNIX(): an integer or float is required")
if not(0 <= value <= 2147483647): if not (0 <= value <= 2147483647):
raise ValueError("timestampUNIX(): value have to be in 0..2147483647") raise ValueError("timestampUNIX(): value have to be in 0..2147483647")
return UNIX_TIMESTAMP_T0 + timedelta(seconds=value) return UNIX_TIMESTAMP_T0 + timedelta(seconds=value)
@ -514,7 +514,7 @@ def timestampMac32(value):
""" """
if not isinstance(value, (float, int)): if not isinstance(value, (float, int)):
raise TypeError("an integer or float is required") raise TypeError("an integer or float is required")
if not(0 <= value <= 4294967295): if not (0 <= value <= 4294967295):
return "invalid Mac timestamp (%s)" % value return "invalid Mac timestamp (%s)" % value
return MAC_TIMESTAMP_T0 + timedelta(seconds=value) return MAC_TIMESTAMP_T0 + timedelta(seconds=value)

View file

@ -4,27 +4,27 @@ from hachoir.field.bit_field import Bit, Bits, RawBits # noqa
from hachoir.field.byte_field import Bytes, RawBytes # noqa from hachoir.field.byte_field import Bytes, RawBytes # noqa
from hachoir.field.sub_file import SubFile, CompressedField # noqa from hachoir.field.sub_file import SubFile, CompressedField # noqa
from hachoir.field.character import Character # noqa from hachoir.field.character import Character # noqa
from hachoir.field.integer import (Int8, Int16, Int24, Int32, Int64, # noqa from hachoir.field.integer import (Int8, Int16, Int24, Int32, Int64, # noqa
UInt8, UInt16, UInt24, UInt32, UInt64, UInt8, UInt16, UInt24, UInt32, UInt64,
GenericInteger) GenericInteger)
from hachoir.field.enum import Enum # noqa from hachoir.field.enum import Enum # noqa
from hachoir.field.string_field import (GenericString, # noqa from hachoir.field.string_field import (GenericString, # noqa
String, CString, UnixLine, String, CString, UnixLine,
PascalString8, PascalString16, PascalString8, PascalString16,
PascalString32) PascalString32)
from hachoir.field.padding import (PaddingBits, PaddingBytes, # noqa from hachoir.field.padding import (PaddingBits, PaddingBytes, # noqa
NullBits, NullBytes) NullBits, NullBytes)
# Functions # Functions
from hachoir.field.helper import (isString, isInteger, # noqa from hachoir.field.helper import (isString, isInteger, # noqa
createPaddingField, createNullField, createPaddingField, createNullField,
createRawField, writeIntoFile, createRawField, writeIntoFile,
createOrphanField) createOrphanField)
# FieldSet classes # FieldSet classes
from hachoir.field.fake_array import FakeArray # noqa from hachoir.field.fake_array import FakeArray # noqa
from hachoir.field.basic_field_set import (BasicFieldSet, # noqa from hachoir.field.basic_field_set import (BasicFieldSet, # noqa
ParserError, MatchError) ParserError, MatchError)
from hachoir.field.generic_field_set import GenericFieldSet # noqa from hachoir.field.generic_field_set import GenericFieldSet # noqa
from hachoir.field.seekable_field_set import SeekableFieldSet, RootSeekableFieldSet # noqa from hachoir.field.seekable_field_set import SeekableFieldSet, RootSeekableFieldSet # noqa
from hachoir.field.field_set import FieldSet # noqa from hachoir.field.field_set import FieldSet # noqa

View file

@ -20,7 +20,7 @@ class RawBytes(Field):
def __init__(self, parent, name, length, description="Raw data"): def __init__(self, parent, name, length, description="Raw data"):
assert issubclass(parent.__class__, Field) assert issubclass(parent.__class__, Field)
if not(0 < length <= MAX_LENGTH): if not (0 < length <= MAX_LENGTH):
raise FieldError("Invalid RawBytes length (%s)!" % length) raise FieldError("Invalid RawBytes length (%s)!" % length)
Field.__init__(self, parent, name, length * 8, description) Field.__init__(self, parent, name, length * 8, description)
self._display = None self._display = None

View file

@ -41,7 +41,7 @@ class Field(Logger):
None: field size is computed dynamically. None: field size is computed dynamically.
int: field size, in bits. int: field size, in bits.
callable: function that receives the same arguments as the constructor, callable: function that receives the same arguments as the constructor,
without ``parent``. without ``parent``.
""" """
is_field_set = False is_field_set = False
@ -232,7 +232,7 @@ class Field(Logger):
Args: Args:
key (str): relative or absolute path for the desired field. key (str): relative or absolute path for the desired field.
const (bool): For field sets, whether to consume additional input to const (bool): For field sets, whether to consume additional input to
find a matching field. find a matching field.
Returns: Returns:
Field: The field matching the provided path. Field: The field matching the provided path.

View file

@ -1,5 +1,5 @@
from hachoir.field import (MissingField, BasicFieldSet, Field, ParserError, from hachoir.field import (MissingField, BasicFieldSet, Field, ParserError,
createRawField, createNullField, createPaddingField, FakeArray) createRawField, createNullField, createPaddingField, FakeArray)
from hachoir.core.dict import Dict, UniqKeyError from hachoir.core.dict import Dict, UniqKeyError
from hachoir.core.tools import lowerBound, makeUnicode from hachoir.core.tools import lowerBound, makeUnicode
import hachoir.core.config as config import hachoir.core.config as config
@ -117,7 +117,7 @@ class GenericFieldSet(BasicFieldSet):
_getSize, doc="Size in bits, may create all fields to get size") _getSize, doc="Size in bits, may create all fields to get size")
def _getCurrentSize(self): def _getCurrentSize(self):
assert not(self.done) assert not (self.done)
return self._current_size return self._current_size
current_size = property(_getCurrentSize) current_size = property(_getCurrentSize)

View file

@ -1,8 +1,8 @@
from hachoir.field import (FieldError, from hachoir.field import (FieldError,
RawBits, RawBytes, RawBits, RawBytes,
PaddingBits, PaddingBytes, PaddingBits, PaddingBytes,
NullBits, NullBytes, NullBits, NullBytes,
GenericString, GenericInteger) GenericString, GenericInteger)
from hachoir.stream import FileOutputStream from hachoir.stream import FileOutputStream

View file

@ -23,7 +23,7 @@ class PaddingBits(Bits):
self._display_pattern = self.checkPattern() self._display_pattern = self.checkPattern()
def checkPattern(self): def checkPattern(self):
if not(config.check_padding_pattern): if not (config.check_padding_pattern):
return False return False
if self.pattern != 0: if self.pattern != 0:
return False return False
@ -72,7 +72,7 @@ class PaddingBytes(Bytes):
self._display_pattern = self.checkPattern() self._display_pattern = self.checkPattern()
def checkPattern(self): def checkPattern(self):
if not(config.check_padding_pattern): if not (config.check_padding_pattern):
return False return False
if self.pattern is None: if self.pattern is None:
return False return False

View file

@ -244,7 +244,7 @@ class GenericString(Bytes):
and err.end == len(text) \ and err.end == len(text) \
and self._charset == "UTF-16-LE": and self._charset == "UTF-16-LE":
try: try:
text = str(text + "\0", self._charset, "strict") text = str(text + b"\0", self._charset, "strict")
self.warning( self.warning(
"Fix truncated %s string: add missing nul byte" % self._charset) "Fix truncated %s string: add missing nul byte" % self._charset)
return text return text

View file

@ -1,6 +1,6 @@
from hachoir.core.tools import (humanDatetime, humanDuration, from hachoir.core.tools import (humanDatetime, humanDuration,
timestampUNIX, timestampMac32, timestampUUID60, timestampUNIX, timestampMac32, timestampUUID60,
timestampWin64, durationWin64, durationMillisWin64) timestampWin64, durationWin64, durationMillisWin64)
from hachoir.field import Bits, FieldSet from hachoir.field import Bits, FieldSet
from datetime import datetime from datetime import datetime
@ -61,7 +61,7 @@ class TimeDateMSDOS32(FieldSet):
def createValue(self): def createValue(self):
return datetime( return datetime(
1980 + self["year"].value, self["month"].value, self["day"].value, 1980 + self["year"].value, self["month"].value or 1, self["day"].value or 1,
self["hour"].value, self["minute"].value, 2 * self["second"].value) self["hour"].value, self["minute"].value, 2 * self["second"].value)
def createDisplay(self): def createDisplay(self):

View file

@ -7,7 +7,7 @@ class GenericVector(FieldSet):
# Sanity checks # Sanity checks
assert issubclass(item_class, Field) assert issubclass(item_class, Field)
assert isinstance(item_class.static_size, int) assert isinstance(item_class.static_size, int)
if not(0 < nb_items): if not (0 < nb_items):
raise ParserError('Unable to create empty vector "%s" in %s' raise ParserError('Unable to create empty vector "%s" in %s'
% (name, parent.path)) % (name, parent.path))
size = nb_items * item_class.static_size size = nb_items * item_class.static_size

View file

@ -3,7 +3,7 @@ from hachoir.metadata.safe import fault_tolerant, getValue
from hachoir.metadata.metadata import ( from hachoir.metadata.metadata import (
RootMetadata, Metadata, MultipleMetadata, registerExtractor) RootMetadata, Metadata, MultipleMetadata, registerExtractor)
from hachoir.parser.archive import (Bzip2Parser, CabFile, GzipParser, from hachoir.parser.archive import (Bzip2Parser, CabFile, GzipParser,
TarFile, ZipFile, MarFile) TarFile, ZipFile, MarFile)
from hachoir.core.tools import humanUnixAttributes from hachoir.core.tools import humanUnixAttributes

View file

@ -1,12 +1,12 @@
from hachoir.metadata.metadata import (registerExtractor, Metadata, from hachoir.metadata.metadata import (registerExtractor, Metadata,
RootMetadata, MultipleMetadata) RootMetadata, MultipleMetadata)
from hachoir.parser.audio import (AuFile, MpegAudioFile, RealAudioFile, from hachoir.parser.audio import (AuFile, MpegAudioFile, RealAudioFile,
AiffFile, FlacParser) AiffFile, FlacParser)
from hachoir.parser.container import OggFile, RealMediaFile from hachoir.parser.container import OggFile, RealMediaFile
from hachoir.core.tools import makePrintable, timedelta2seconds, humanBitRate from hachoir.core.tools import makePrintable, timedelta2seconds, humanBitRate
from datetime import timedelta from datetime import timedelta
from hachoir.metadata.metadata_item import (QUALITY_FAST, QUALITY_NORMAL, from hachoir.metadata.metadata_item import (QUALITY_FAST, QUALITY_NORMAL,
QUALITY_BEST) QUALITY_BEST)
from hachoir.metadata.safe import fault_tolerant, getValue from hachoir.metadata.safe import fault_tolerant, getValue

View file

@ -1,5 +1,5 @@
from hachoir.metadata.metadata import (registerExtractor, Metadata, from hachoir.metadata.metadata import (registerExtractor, Metadata,
RootMetadata, MultipleMetadata) RootMetadata, MultipleMetadata)
from hachoir.parser.image import ( from hachoir.parser.image import (
BmpFile, IcoFile, PcxFile, GifFile, PngFile, TiffFile, BmpFile, IcoFile, PcxFile, GifFile, PngFile, TiffFile,
XcfFile, TargaFile, WMF_File, PsdFile) XcfFile, TargaFile, WMF_File, PsdFile)

View file

@ -85,7 +85,7 @@ def processFile(values, filename,
with parser: with parser:
# Extract metadata # Extract metadata
extract_metadata = not(values.mime or values.type) extract_metadata = not (values.mime or values.type)
if extract_metadata: if extract_metadata:
try: try:
metadata = extractMetadata(parser, values.quality) metadata = extractMetadata(parser, values.quality)
@ -124,7 +124,7 @@ def processFile(values, filename,
def processFiles(values, filenames, display=True): def processFiles(values, filenames, display=True):
human = not(values.raw) human = not values.raw
ok = True ok = True
priority = int(values.level) * 100 + 99 priority = int(values.level) * 100 + 99
display_filename = (1 < len(filenames)) display_filename = (1 < len(filenames))

View file

@ -3,7 +3,7 @@ Extract metadata from RIFF file format: AVI video and WAV sound.
""" """
from hachoir.metadata.metadata import (Metadata, MultipleMetadata, from hachoir.metadata.metadata import (Metadata, MultipleMetadata,
registerExtractor) registerExtractor)
from hachoir.metadata.safe import fault_tolerant, getValue from hachoir.metadata.safe import fault_tolerant, getValue
from hachoir.parser.container.riff import RiffFile from hachoir.parser.container.riff import RiffFile
from hachoir.parser.video.fourcc import UNCOMPRESSED_AUDIO from hachoir.parser.video.fourcc import UNCOMPRESSED_AUDIO

View file

@ -1,6 +1,6 @@
from hachoir.field import MissingField from hachoir.field import MissingField
from hachoir.metadata.metadata import (registerExtractor, from hachoir.metadata.metadata import (registerExtractor,
Metadata, RootMetadata, MultipleMetadata) Metadata, RootMetadata, MultipleMetadata)
from hachoir.metadata.metadata_item import QUALITY_GOOD from hachoir.metadata.metadata_item import QUALITY_GOOD
from hachoir.metadata.safe import fault_tolerant from hachoir.metadata.safe import fault_tolerant
from hachoir.parser.video import AsfFile, FlvFile from hachoir.parser.video import AsfFile, FlvFile

View file

@ -2,5 +2,5 @@ from hachoir.parser.parser import ValidateError, HachoirParser, Parser # noqa
from hachoir.parser.parser_list import ParserList, HachoirParserList # noqa from hachoir.parser.parser_list import ParserList, HachoirParserList # noqa
from hachoir.parser.guess import QueryParser, guessParser, createParser # noqa from hachoir.parser.guess import QueryParser, guessParser, createParser # noqa
from hachoir.parser import (archive, audio, container, # noqa from hachoir.parser import (archive, audio, container, # noqa
file_system, image, game, misc, network, program, file_system, image, game, misc, network, program,
video) video)

View file

@ -1,5 +1,6 @@
from hachoir.parser.archive.ace import AceFile # noqa from hachoir.parser.archive.ace import AceFile # noqa
from hachoir.parser.archive.ar import ArchiveFile # noqa from hachoir.parser.archive.ar import ArchiveFile # noqa
from hachoir.parser.archive.arj import ArjParser # noqa
from hachoir.parser.archive.bomstore import BomFile # noqa from hachoir.parser.archive.bomstore import BomFile # noqa
from hachoir.parser.archive.bzip2_parser import Bzip2Parser # noqa from hachoir.parser.archive.bzip2_parser import Bzip2Parser # noqa
from hachoir.parser.archive.cab import CabFile # noqa from hachoir.parser.archive.cab import CabFile # noqa

View file

@ -13,10 +13,10 @@ Creation date: 19 january 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (StaticFieldSet, FieldSet, from hachoir.field import (StaticFieldSet, FieldSet,
Bit, Bits, NullBits, RawBytes, Enum, Bit, Bits, NullBits, RawBytes, Enum,
UInt8, UInt16, UInt32, UInt8, UInt16, UInt32,
PascalString8, PascalString16, String, PascalString8, PascalString16, String,
TimeDateMSDOS32) TimeDateMSDOS32)
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser.common.msdos import MSDOSFileAttr32 from hachoir.parser.common.msdos import MSDOSFileAttr32

View file

@ -4,7 +4,7 @@ GNU ar archive : archive file (.a) and Debian (.deb) archive.
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
String, RawBytes, UnixLine) String, RawBytes, UnixLine)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN

View file

@ -0,0 +1,155 @@
"""
ARJ archive file parser
https://github.com/FarGroup/FarManager/blob/master/plugins/multiarc/arc.doc/arj.txt
"""
from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.field import (FieldSet, ParserError,
CString, Enum, RawBytes,
UInt8, UInt16, UInt32,
Bytes)
from hachoir.parser import Parser
HOST_OS = {
0: "MSDOS",
1: "PRIMOS",
2: "UNIX",
3: "AMIGA",
4: "MACDOS",
5: "OS/2",
6: "APPLE GS",
7: "ATARI ST",
8: "NEXT",
9: "VAX VMS",
10: "WIN95",
11: "WIN32",
}
FILE_TYPE = {
0: "BINARY",
1: "TEXT",
2: "COMMENT",
3: "DIRECTORY",
4: "VOLUME",
5: "CHAPTER",
}
MAGIC = b"\x60\xEA"
class BaseBlock(FieldSet):
@property
def isEmpty(self):
return self["basic_header_size"].value == 0
def _header_start_fields(self):
yield Bytes(self, "magic", len(MAGIC))
if self["magic"].value != MAGIC:
raise ParserError("Wrong header magic")
yield UInt16(self, "basic_header_size", "zero if end of archive")
if not self.isEmpty:
yield UInt8(self, "first_hdr_size")
yield UInt8(self, "archiver_version")
yield UInt8(self, "min_archiver_version")
yield Enum(UInt8(self, "host_os"), HOST_OS)
yield UInt8(self, "arj_flags")
def _header_end_fields(self):
yield UInt8(self, "last_chapter")
fhs = self["first_hdr_size"]
name_position = fhs.address // 8 + fhs.value
current_position = self["last_chapter"].address // 8 + 1
if name_position > current_position:
yield RawBytes(self, "reserved2", name_position - current_position)
yield CString(self, "filename", "File name", charset="ASCII")
yield CString(self, "comment", "Comment", charset="ASCII")
yield UInt32(self, "crc", "Header CRC")
i = 0
while not self.eof:
yield UInt16(self, f"extended_header_size_{i}")
cur_size = self[f"extended_header_size_{i}"].value
if cur_size == 0:
break
yield RawBytes(self, "extended_header_data", cur_size)
yield UInt32(self, f"extended_header_crc_{i}")
i += 1
def validate(self):
if self.stream.readBytes(0, 2) != MAGIC:
return "Invalid magic"
return True
class Header(BaseBlock):
def createFields(self):
yield from self._header_start_fields()
if not self.isEmpty:
yield UInt8(self, "security_version")
yield Enum(UInt8(self, "file_type"), FILE_TYPE)
yield UInt8(self, "reserved")
yield UInt32(self, "date_time_created")
yield UInt32(self, "date_time_modified")
yield UInt32(self, "archive_size")
yield UInt32(self, "security_envelope_file_position")
yield UInt16(self, "filespec_position")
yield UInt16(self, "security_envelope_data_len")
yield UInt8(self, "encryption_version")
yield from self._header_end_fields()
def createDescription(self):
if self.isEmpty:
return "Empty main header"
return "Main header of '%s'" % self["filename"].value
class Block(BaseBlock):
def createFields(self):
yield from self._header_start_fields()
if not self.isEmpty:
yield UInt8(self, "method")
yield Enum(UInt8(self, "file_type"), FILE_TYPE)
yield UInt8(self, "reserved")
yield UInt32(self, "date_time_modified")
yield UInt32(self, "compressed_size")
yield UInt32(self, "original_size")
yield UInt32(self, "original_file_crc")
yield UInt16(self, "filespec_position")
yield UInt16(self, "file_access_mode")
yield UInt8(self, "first_chapter")
yield from self._header_end_fields()
compressed_size = self["compressed_size"].value
if compressed_size > 0:
yield RawBytes(self, "compressed_data", compressed_size)
def createDescription(self):
if self.isEmpty:
return "Empty file header"
return "File header of '%s'" % self["filename"].value
class ArjParser(Parser):
endian = LITTLE_ENDIAN
PARSER_TAGS = {
"id": "arj",
"category": "archive",
"file_ext": ("arj",),
"min_size": 4 * 8,
"description": "ARJ archive"
}
def validate(self):
if self.stream.readBytes(0, 2) != MAGIC:
return "Invalid magic"
return True
def createFields(self):
yield Header(self, "header")
if not self["header"].isEmpty:
while not self.eof:
block = Block(self, "file_header[]")
yield block
if block.isEmpty:
break

View file

@ -11,7 +11,7 @@ Created: 2015-05-14
from hachoir.parser import HachoirParser from hachoir.parser import HachoirParser
from hachoir.field import (RootSeekableFieldSet, FieldSet, from hachoir.field import (RootSeekableFieldSet, FieldSet,
UInt32, Bytes, NullBytes, RawBytes) UInt32, Bytes, NullBytes, RawBytes)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN

View file

@ -7,9 +7,9 @@ Author: Victor Stinner, Robert Xiao
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize
from hachoir.field import (Field, FieldSet, GenericVector, from hachoir.field import (Field, FieldSet, GenericVector,
ParserError, String, ParserError, String,
PaddingBits, Bit, Bits, Character, PaddingBits, Bit, Bits, Character,
UInt32, Enum, CompressedField) UInt32, Enum, CompressedField)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.parser.archive.zlib import build_tree, HuffmanCode from hachoir.parser.archive.zlib import build_tree, HuffmanCode
@ -218,7 +218,7 @@ class Bzip2Parser(Parser):
def validate(self): def validate(self):
if self.stream.readBytes(0, 3) != b'BZh': if self.stream.readBytes(0, 3) != b'BZh':
return "Wrong file signature" return "Wrong file signature"
if not("1" <= self["blocksize"].value <= "9"): if not ("1" <= self["blocksize"].value <= "9"):
return "Wrong blocksize" return "Wrong blocksize"
return True return True

View file

@ -10,9 +10,9 @@ Creation date: 31 january 2007
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Enum, from hachoir.field import (FieldSet, Enum,
CString, String, CString, String,
UInt8, UInt16, UInt32, Bit, Bits, PaddingBits, NullBits, UInt8, UInt16, UInt32, Bit, Bits, PaddingBits, NullBits,
DateTimeMSDOS32, RawBytes, CustomFragment) DateTimeMSDOS32, RawBytes, CustomFragment)
from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize

View file

@ -8,11 +8,12 @@ Creation date: July 18, 2007
""" """
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt32, Bit, Bits, PaddingBits, UInt32, Bit, Bits, PaddingBits,
RawBytes, ParserError) RawBytes, ParserError)
from hachoir.core.endian import MIDDLE_ENDIAN, LITTLE_ENDIAN from hachoir.core.endian import MIDDLE_ENDIAN, LITTLE_ENDIAN
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize
from hachoir.parser.archive.zlib import build_tree, HuffmanCode, extend_data from hachoir.parser.archive.zlib import build_tree, HuffmanCode, extend_data
import struct
class LZXPreTreeEncodedTree(FieldSet): class LZXPreTreeEncodedTree(FieldSet):
@ -146,6 +147,8 @@ class LZXBlock(FieldSet):
self.window_size = self.WINDOW_SIZE[self.compression_level] self.window_size = self.WINDOW_SIZE[self.compression_level]
self.block_type = self["block_type"].value self.block_type = self["block_type"].value
curlen = len(self.parent.uncompressed_data) curlen = len(self.parent.uncompressed_data)
intel_started = False # Do we perform Intel jump fixups on this block?
if self.block_type in (1, 2): # Verbatim or aligned offset block if self.block_type in (1, 2): # Verbatim or aligned offset block
if self.block_type == 2: if self.block_type == 2:
for i in range(8): for i in range(8):
@ -156,6 +159,8 @@ class LZXBlock(FieldSet):
yield LZXPreTreeEncodedTree(self, "main_tree_rest", self.window_size * 8) yield LZXPreTreeEncodedTree(self, "main_tree_rest", self.window_size * 8)
main_tree = build_tree( main_tree = build_tree(
self["main_tree_start"].lengths + self["main_tree_rest"].lengths) self["main_tree_start"].lengths + self["main_tree_rest"].lengths)
if self["main_tree_start"].lengths[0xE8]:
intel_started = True
yield LZXPreTreeEncodedTree(self, "length_tree", 249) yield LZXPreTreeEncodedTree(self, "length_tree", 249)
length_tree = build_tree(self["length_tree"].lengths) length_tree = build_tree(self["length_tree"].lengths)
current_decoded_size = 0 current_decoded_size = 0
@ -169,7 +174,7 @@ class LZXBlock(FieldSet):
field._description = "Literal value %r" % chr( field._description = "Literal value %r" % chr(
field.realvalue) field.realvalue)
current_decoded_size += 1 current_decoded_size += 1
self.parent.uncompressed_data += chr(field.realvalue) self.parent._lzx_window.append(field.realvalue)
yield field yield field
continue continue
position_header, length_header = divmod( position_header, length_header = divmod(
@ -243,8 +248,7 @@ class LZXBlock(FieldSet):
self.parent.r2 = self.parent.r1 self.parent.r2 = self.parent.r1
self.parent.r1 = self.parent.r0 self.parent.r1 = self.parent.r0
self.parent.r0 = position self.parent.r0 = position
self.parent.uncompressed_data = extend_data( extend_data(self.parent._lzx_window, length, position)
self.parent.uncompressed_data, length, position)
current_decoded_size += length current_decoded_size += length
elif self.block_type == 3: # Uncompressed block elif self.block_type == 3: # Uncompressed block
padding = paddingSize(self.address + self.current_size, 16) padding = paddingSize(self.address + self.current_size, 16)
@ -253,6 +257,7 @@ class LZXBlock(FieldSet):
else: else:
yield PaddingBits(self, "padding[]", 16) yield PaddingBits(self, "padding[]", 16)
self.endian = LITTLE_ENDIAN self.endian = LITTLE_ENDIAN
intel_started = True # apparently intel fixup may be needed on uncompressed blocks?
yield UInt32(self, "r[]", "New value of R0") yield UInt32(self, "r[]", "New value of R0")
yield UInt32(self, "r[]", "New value of R1") yield UInt32(self, "r[]", "New value of R1")
yield UInt32(self, "r[]", "New value of R2") yield UInt32(self, "r[]", "New value of R2")
@ -260,18 +265,50 @@ class LZXBlock(FieldSet):
self.parent.r1 = self["r[1]"].value self.parent.r1 = self["r[1]"].value
self.parent.r2 = self["r[2]"].value self.parent.r2 = self["r[2]"].value
yield RawBytes(self, "data", self.uncompressed_size) yield RawBytes(self, "data", self.uncompressed_size)
self.parent.uncompressed_data += self["data"].value self.parent._lzx_window += self["data"].value
if self["block_size"].value % 2: if self["block_size"].value % 2:
yield PaddingBits(self, "padding", 8) yield PaddingBits(self, "padding", 8)
else: else:
raise ParserError("Unknown block type %d!" % self.block_type) raise ParserError("Unknown block type %d!" % self.block_type)
# Fixup Intel jumps if necessary (fixups are only applied to the final output, not to the LZX window)
self.parent.uncompressed_data += self.parent._lzx_window[-self.uncompressed_size:]
self.parent._lzx_window = self.parent._lzx_window[-(1 << self.root.compr_level):]
if (
intel_started
and self.parent["filesize_indicator"].value
and self.parent["filesize"].value > 0
):
# Note that we're decoding a block-at-a-time instead of a frame-at-a-time,
# so we need to handle the frame boundaries carefully.
filesize = self.parent["filesize"].value
start_pos = max(0, curlen - 10) # We may need to correct something from the last block
end_pos = len(self.parent.uncompressed_data) - 10
while 1:
jmp_pos = self.parent.uncompressed_data.find(b"\xE8", start_pos, end_pos)
if jmp_pos == -1:
break
if (jmp_pos % 32768) >= (32768 - 10):
# jumps at the end of frames are not fixed up
start_pos = jmp_pos + 1
continue
abs_off, = struct.unpack("<i", self.parent.uncompressed_data[jmp_pos + 1:jmp_pos + 5])
if -jmp_pos <= abs_off < filesize:
if abs_off < 0:
rel_off = abs_off + filesize
else:
rel_off = abs_off - jmp_pos
self.parent.uncompressed_data[jmp_pos + 1:jmp_pos + 5] = struct.pack("<i", rel_off)
start_pos = jmp_pos + 5
class LZXStream(Parser): class LZXStream(Parser):
endian = MIDDLE_ENDIAN endian = MIDDLE_ENDIAN
def createFields(self): def createFields(self):
self.uncompressed_data = "" self.uncompressed_data = bytearray()
self._lzx_window = bytearray()
self.r0 = 1 self.r0 = 1
self.r1 = 1 self.r1 = 1
self.r2 = 1 self.r2 = 1
@ -291,6 +328,6 @@ class LZXStream(Parser):
def lzx_decompress(stream, window_bits): def lzx_decompress(stream, window_bits):
data = LZXStream(stream) data = LZXStream(stream)
data.compr_level = window_bits data.compr_level = window_bits
for unused in data: for _ in data:
pass pass
return data.uncompressed_data return data.uncompressed_data

View file

@ -44,7 +44,7 @@ class MarFile(Parser):
return "Invalid magic" return "Invalid magic"
if self["version"].value != 3: if self["version"].value != 3:
return "Invalid version" return "Invalid version"
if not(1 <= self["nb_file"].value <= MAX_NB_FILE): if not (1 <= self["nb_file"].value <= MAX_NB_FILE):
return "Invalid number of file" return "Invalid number of file"
return True return True

View file

@ -7,7 +7,7 @@ Creation date: July 10, 2007
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.field import (RootSeekableFieldSet, FieldSet, from hachoir.field import (RootSeekableFieldSet, FieldSet,
String, CString, UInt32, RawBytes) String, CString, UInt32, RawBytes)
from hachoir.core.text_handler import displayHandler, filesizeHandler from hachoir.core.text_handler import displayHandler, filesizeHandler
from hachoir.core.tools import humanUnixAttributes from hachoir.core.tools import humanUnixAttributes
from hachoir.parser import HachoirParser from hachoir.parser import HachoirParser

View file

@ -7,10 +7,10 @@ Author: Christophe Gisquet
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (StaticFieldSet, FieldSet, from hachoir.field import (StaticFieldSet, FieldSet,
Bit, Bits, Enum, Bit, Bits, Enum,
UInt8, UInt16, UInt32, UInt64, UInt8, UInt16, UInt32, UInt64,
String, TimeDateMSDOS32, String, TimeDateMSDOS32,
NullBytes, NullBits, RawBytes) NullBytes, NullBits, RawBytes)
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser.common.msdos import MSDOSFileAttr32 from hachoir.parser.common.msdos import MSDOSFileAttr32

View file

@ -6,9 +6,9 @@ Author: Victor Stinner, 1st December 2005.
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, UInt64, Enum, UInt8, UInt16, UInt32, UInt64, Enum,
NullBytes, Bytes, RawBytes, SubFile, NullBytes, Bytes, RawBytes, SubFile,
Character, CString, String) Character, CString, String)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.parser.archive.gzip_parser import GzipParser from hachoir.parser.archive.gzip_parser import GzipParser
from hachoir.parser.archive.bzip2_parser import Bzip2Parser from hachoir.parser.archive.bzip2_parser import Bzip2Parser

View file

@ -14,8 +14,8 @@ Date: February 26 2011
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (Field, FieldSet, ParserError, CString, from hachoir.field import (Field, FieldSet, ParserError, CString,
Enum, Bit, Bits, UInt8, UInt32, UInt64, Enum, Bit, Bits, UInt8, UInt32, UInt64,
Bytes, RawBytes, TimestampWin64) Bytes, RawBytes, TimestampWin64)
from hachoir.stream import StringInputStream from hachoir.stream import StringInputStream
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -6,7 +6,7 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
Enum, UInt8, SubFile, String, NullBytes) Enum, UInt8, SubFile, String, NullBytes)
from hachoir.core.tools import humanFilesize, paddingSize, timestampUNIX from hachoir.core.tools import humanFilesize, paddingSize, timestampUNIX
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
import re import re

View file

@ -7,11 +7,11 @@ Authors: Christophe Gisquet and Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
Bit, Bits, Enum, Bit, Bits, Enum,
TimeDateMSDOS32, SubFile, TimeDateMSDOS32, SubFile,
UInt8, UInt16, UInt32, UInt64, UInt8, UInt16, UInt32, UInt64,
String, PascalString16, String, PascalString16,
RawBytes) RawBytes)
from hachoir.stream.input import ReadStreamError from hachoir.stream.input import ReadStreamError
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir.core.tools import makeUnicode from hachoir.core.tools import makeUnicode

View file

@ -7,20 +7,20 @@ Creation date: July 9 2007
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (Bit, Bits, Field, Int16, UInt32, from hachoir.field import (Bit, Bits, Field, Int16, UInt32,
Enum, FieldSet, GenericFieldSet, Enum, FieldSet, GenericFieldSet,
PaddingBits, ParserError, RawBytes) PaddingBits, ParserError, RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.tools import paddingSize, alignValue from hachoir.core.tools import paddingSize, alignValue
def extend_data(data, length, offset): def extend_data(data: bytearray, length, offset):
"""Extend data using a length and an offset.""" """Extend data using a length and an offset, LZ-style."""
if length >= offset: if length >= offset:
new_data = data[-offset:] * (alignValue(length, offset) // offset) new_data = data[-offset:] * (alignValue(length, offset) // offset)
return data + new_data[:length] data += new_data[:length]
else: else:
return data + data[-offset:-offset + length] data += data[-offset:-offset + length]
def build_tree(lengths): def build_tree(lengths):
@ -136,9 +136,9 @@ class DeflateBlock(FieldSet):
CODE_LENGTH_ORDER = [16, 17, 18, 0, 8, 7, 9, CODE_LENGTH_ORDER = [16, 17, 18, 0, 8, 7, 9,
6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15] 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]
def __init__(self, parent, name, uncomp_data="", *args, **kwargs): def __init__(self, parent, name, uncomp_data=b"", *args, **kwargs):
FieldSet.__init__(self, parent, name, *args, **kwargs) FieldSet.__init__(self, parent, name, *args, **kwargs)
self.uncomp_data = uncomp_data self.uncomp_data = bytearray(uncomp_data)
def createFields(self): def createFields(self):
yield Bit(self, "final", "Is this the final block?") # BFINAL yield Bit(self, "final", "Is this the final block?") # BFINAL
@ -227,7 +227,7 @@ class DeflateBlock(FieldSet):
field._description = "Literal Code %r (Huffman Code %i)" % ( field._description = "Literal Code %r (Huffman Code %i)" % (
chr(value), field.value) chr(value), field.value)
yield field yield field
self.uncomp_data += chr(value) self.uncomp_data.append(value)
if value == 256: if value == 256:
field._description = "Block Terminator Code (256) (Huffman Code %i)" % field.value field._description = "Block Terminator Code (256) (Huffman Code %i)" % field.value
yield field yield field
@ -267,15 +267,14 @@ class DeflateBlock(FieldSet):
extrafield._description = "Distance Extra Bits (%i), total length %i" % ( extrafield._description = "Distance Extra Bits (%i), total length %i" % (
extrafield.value, distance) extrafield.value, distance)
yield extrafield yield extrafield
self.uncomp_data = extend_data( extend_data(self.uncomp_data, length, distance)
self.uncomp_data, length, distance)
class DeflateData(GenericFieldSet): class DeflateData(GenericFieldSet):
endian = LITTLE_ENDIAN endian = LITTLE_ENDIAN
def createFields(self): def createFields(self):
uncomp_data = "" uncomp_data = bytearray()
blk = DeflateBlock(self, "compressed_block[]", uncomp_data) blk = DeflateBlock(self, "compressed_block[]", uncomp_data)
yield blk yield blk
uncomp_data = blk.uncomp_data uncomp_data = blk.uncomp_data
@ -326,11 +325,11 @@ class ZlibData(Parser):
yield textHandler(UInt32(self, "data_checksum", "ADLER32 checksum of compressed data"), hexadecimal) yield textHandler(UInt32(self, "data_checksum", "ADLER32 checksum of compressed data"), hexadecimal)
def zlib_inflate(stream, wbits=None, prevdata=""): def zlib_inflate(stream, wbits=None):
if wbits is None or wbits >= 0: if wbits is None or wbits >= 0:
return ZlibData(stream)["data"].uncompressed_data return ZlibData(stream)["data"].uncompressed_data
else: else:
data = DeflateData(None, "root", stream, "", stream.askSize(None)) data = DeflateData(None, "root", stream, "", stream.askSize(None))
for unused in data: for _ in data:
pass pass
return data.uncompressed_data return data.uncompressed_data

View file

@ -7,9 +7,9 @@ Creation: 27 december 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt16, UInt32, Float80, TimestampMac32, UInt16, UInt32, Float80, TimestampMac32,
RawBytes, NullBytes, RawBytes, NullBytes,
String, Enum, PascalString32) String, Enum, PascalString32)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import filesizeHandler from hachoir.core.text_handler import filesizeHandler
from hachoir.core.tools import alignValue from hachoir.core.tools import alignValue

View file

@ -7,9 +7,9 @@ Author: Victor Stinner
""" """
from hachoir.field import (FieldSet, MatchError, ParserError, from hachoir.field import (FieldSet, MatchError, ParserError,
Enum, UInt8, UInt24, UInt32, Enum, UInt8, UInt24, UInt32,
CString, String, RawBytes, CString, String, RawBytes,
Bit, Bits, NullBytes, NullBits) Bit, Bits, NullBytes, NullBits)
from hachoir.core.text_handler import textHandler from hachoir.core.text_handler import textHandler
from hachoir.core.tools import humanDuration from hachoir.core.tools import humanDuration
from hachoir.core.endian import NETWORK_ENDIAN from hachoir.core.endian import NETWORK_ENDIAN
@ -451,7 +451,7 @@ class ID3_Chunk(FieldSet):
if size: if size:
cls = None cls = None
if not(is_compressed): if not is_compressed:
tag = self["tag"].value tag = self["tag"].value
if tag in ID3_Chunk.handler: if tag in ID3_Chunk.handler:
cls = ID3_Chunk.handler[tag] cls = ID3_Chunk.handler[tag]

View file

@ -10,8 +10,8 @@ Creation date: 19 august 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt8, UInt16, UInt32, Int32, UInt64, TimestampMac32, UInt8, UInt16, UInt32, Int32, UInt64, TimestampMac32,
String, Float32, NullBytes, Enum, RawBytes) String, Float32, NullBytes, Enum, RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import humanDuration from hachoir.core.tools import humanDuration
from hachoir.core.text_handler import displayHandler, filesizeHandler from hachoir.core.text_handler import displayHandler, filesizeHandler
@ -128,7 +128,7 @@ class DataObject(FieldSet):
yield padding yield padding
for i in range(self["entry_count"].value): for i in range(self["entry_count"].value):
yield UInt32(self, "index[" + str(i) + "]", "Index of the " + str(i) + "nth mhit") yield UInt32(self, "index[" + str(i) + "]", "Index of the " + str(i) + "nth mhit")
elif(self["type"].value < 15) or (self["type"].value > 17) or (self["type"].value >= 200): elif (self["type"].value < 15) or (self["type"].value > 17) or (self["type"].value >= 200):
yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]")
yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]")
yield UInt32(self, "position", "Position") yield UInt32(self, "position", "Position")

View file

@ -10,7 +10,7 @@ Creation: 27 december 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Bits, ParserError, from hachoir.field import (FieldSet, Bits, ParserError,
String, UInt32, UInt24, UInt16, UInt8, Enum, RawBits, RawBytes) String, UInt32, UInt24, UInt16, UInt8, Enum, RawBits, RawBytes)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.tools import createDict, humanDurationNanosec from hachoir.core.tools import createDict, humanDurationNanosec
@ -29,7 +29,7 @@ class Integer(Bits):
while True: while True:
bits = stream.readBits(addr, 8, parent.endian) bits = stream.readBits(addr, 8, parent.endian)
value = (value << 7) + (bits & 127) value = (value << 7) + (bits & 127)
if not(bits & 128): if not (bits & 128):
break break
addr += 8 addr += 8
self._size += 8 self._size += 8

View file

@ -20,8 +20,8 @@ Creation: 18th February 2007
from math import log10 from math import log10
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
Bits, UInt16, UInt8, Bits, UInt16, UInt8,
RawBytes, String, GenericVector) RawBytes, String, GenericVector)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import textHandler from hachoir.core.text_handler import textHandler

View file

@ -9,8 +9,8 @@ Creation: 10th February 2007
""" """
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt32, UInt16, UInt8, Int8, Float32, UInt32, UInt16, UInt8, Int8, Float32,
RawBytes, String, GenericVector, ParserError) RawBytes, String, GenericVector, ParserError)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -7,10 +7,10 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
MissingField, ParserError, createOrphanField, MissingField, ParserError, createOrphanField,
Bit, Bits, Enum, Bit, Bits, Enum,
PaddingBits, PaddingBytes, PaddingBits, PaddingBytes,
RawBytes) RawBytes)
from hachoir.parser.audio.id3 import ID3v1, ID3v2 from hachoir.parser.audio.id3 import ID3v1, ID3v2
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.tools import humanFrequency, humanBitSize from hachoir.core.tools import humanFrequency, humanBitSize

View file

@ -10,9 +10,9 @@ Samples:
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt8, UInt16, UInt32, UInt8, UInt16, UInt32,
Bytes, RawBytes, String, Bytes, RawBytes, String,
PascalString8) PascalString8)
from hachoir.core.tools import humanFrequency from hachoir.core.tools import humanFrequency
from hachoir.core.text_handler import displayHandler from hachoir.core.text_handler import displayHandler
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN

View file

@ -11,10 +11,10 @@ Creation: 11th February 2007
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (StaticFieldSet, FieldSet, Field, from hachoir.field import (StaticFieldSet, FieldSet, Field,
Bit, Bits, Bit, Bits,
UInt32, UInt16, UInt8, Enum, UInt32, UInt16, UInt8, Enum,
PaddingBytes, RawBytes, NullBytes, PaddingBytes, RawBytes, NullBytes,
String, GenericVector, ParserError) String, GenericVector, ParserError)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.tools import alignValue from hachoir.core.tools import alignValue

View file

@ -15,9 +15,9 @@ Creation: 8th February 2007
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (StaticFieldSet, FieldSet, from hachoir.field import (StaticFieldSet, FieldSet,
Bit, RawBits, Bits, Bit, RawBits, Bits,
UInt32, UInt16, UInt8, Int8, Enum, UInt32, UInt16, UInt8, Int8, Enum,
RawBytes, String, GenericVector) RawBytes, String, GenericVector)
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir.parser.audio.modplug import ParseModplugMetadata from hachoir.parser.audio.modplug import ParseModplugMetadata

View file

@ -1,5 +1,5 @@
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt16, UInt32, Enum, String, Bytes, Bits, TimestampUUID60) UInt16, UInt32, Enum, String, Bytes, Bits, TimestampUUID60)
from hachoir.parser.video.fourcc import video_fourcc_name from hachoir.parser.video.fourcc import video_fourcc_name
from hachoir.core.bits import str2hex from hachoir.core.bits import str2hex
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -13,8 +13,8 @@ Creation date: 26 April 2008
""" """
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
Bit, Bits, UInt8, UInt32, Int16, UInt16, Float32, Float64, CString, Enum, Bit, Bits, UInt8, UInt32, Int16, UInt16, Float32, Float64, CString, Enum,
RawBytes, String) RawBytes, String)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.field.float import FloatExponent from hachoir.field.float import FloatExponent
from struct import unpack from struct import unpack

View file

@ -41,9 +41,9 @@ Creation date: 24 september 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
FieldError, ParserError, FieldError, ParserError,
Bit, Bits, Bytes, UInt8, GenericInteger, String, Bit, Bits, Bytes, UInt8, GenericInteger, String,
Field, Enum, RawBytes) Field, Enum, RawBytes)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.tools import createDict, humanDatetime from hachoir.core.tools import createDict, humanDatetime
from hachoir.stream import InputStreamError from hachoir.stream import InputStreamError

View file

@ -6,11 +6,11 @@
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Link, from hachoir.field import (FieldSet, Link,
MissingField, ParserError, MissingField, ParserError,
Enum as _Enum, String as _String, Enum as _Enum, String as _String,
Float32, Float64, Float32, Float64,
NullBits, Bits, Bit, RawBytes, Bytes, NullBits, Bits, Bit, RawBytes, Bytes,
Int16, GenericInteger) Int16, GenericInteger)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.iso639 import ISO639_2 from hachoir.core.iso639 import ISO639_2
from hachoir.core.tools import humanDatetime from hachoir.core.tools import humanDatetime

View file

@ -20,10 +20,10 @@ Creation: 2 august 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.parser.common.win32 import GUID from hachoir.parser.common.win32 import GUID
from hachoir.field import (ParserError, FieldSet, MissingField, from hachoir.field import (ParserError, FieldSet, MissingField,
Enum, Enum,
Bit, NullBits, Bits, UInt8, Int16, UInt16, UInt24, Int32, UInt32, Int64, UInt64, TimestampMac32, Bit, NullBits, Bits, UInt8, Int16, UInt16, UInt24, Int32, UInt32, Int64, UInt64, TimestampMac32,
String, PascalString8, PascalString16, CString, String, PascalString8, PascalString16, CString,
RawBytes, NullBytes) RawBytes, NullBytes)
from hachoir.field.timestamp import timestampFactory from hachoir.field.timestamp import timestampFactory
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import textHandler from hachoir.core.text_handler import textHandler
@ -1312,7 +1312,7 @@ class MP4File(Parser):
if size < 8: if size < 8:
return "Invalid first atom size" return "Invalid first atom size"
tag = self.stream.readBytes(4 * 8, 4) tag = self.stream.readBytes(4 * 8, 4)
if tag not in (b"ftyp", b"moov", b"free"): if tag not in (b"ftyp", b"moov", b"free", b"skip"):
return "Unknown MOV file type" return "Unknown MOV file type"
return True return True

View file

@ -6,9 +6,9 @@
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (Field, FieldSet, createOrphanField, from hachoir.field import (Field, FieldSet, createOrphanField,
NullBits, Bit, Bits, Enum, Fragment, MissingField, ParserError, NullBits, Bit, Bits, Enum, Fragment, MissingField, ParserError,
UInt8, UInt16, UInt24, UInt32, UInt64, UInt8, UInt16, UInt24, UInt32, UInt64,
RawBytes, String, PascalString32, NullBytes) RawBytes, String, PascalString32, NullBytes)
from hachoir.stream import FragmentedStream, InputStreamError from hachoir.stream import FragmentedStream, InputStreamError
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN
from hachoir.core.tools import humanDurationNanosec from hachoir.core.tools import humanDurationNanosec

View file

@ -15,8 +15,8 @@ Samples:
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt16, UInt32, Bit, RawBits, UInt16, UInt32, Bit, RawBits,
RawBytes, String, PascalString8, PascalString16) RawBytes, String, PascalString8, PascalString16)
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN

View file

@ -29,10 +29,10 @@ Thanks to:
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, Enum, UInt8, UInt16, UInt32, Enum,
Bit, NullBits, NullBytes, Bit, NullBits, NullBytes,
RawBytes, String, PaddingBytes, RawBytes, String, PaddingBytes,
SubFile) SubFile)
from hachoir.core.tools import alignValue, humanDuration from hachoir.core.tools import alignValue, humanDuration
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import filesizeHandler, textHandler from hachoir.core.text_handler import filesizeHandler, textHandler

View file

@ -15,8 +15,8 @@ Creation date: 29 october 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
Bit, Bits, UInt8, UInt16, Int32, UInt32, Int64, CString, Enum, Bit, Bits, UInt8, UInt16, Int32, UInt32, Int64, CString, Enum,
Bytes, RawBytes, NullBits, String, SubFile) Bytes, RawBytes, NullBits, String, SubFile)
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN
from hachoir.core.text_handler import textHandler, filesizeHandler from hachoir.core.text_handler import textHandler, filesizeHandler
from hachoir.core.tools import paddingSize, humanFrequency from hachoir.core.tools import paddingSize, humanFrequency

View file

@ -14,9 +14,9 @@ Sources:
from hachoir.parser import HachoirParser, Parser from hachoir.parser import HachoirParser, Parser
from hachoir.field import (RootSeekableFieldSet, SeekableFieldSet, FieldSet, ParserError, from hachoir.field import (RootSeekableFieldSet, SeekableFieldSet, FieldSet, ParserError,
Bit, Bits, UInt8, UInt16, UInt32, Bit, Bits, UInt8, UInt16, UInt32,
Enum, String, TimestampUnix32, RawBytes, Enum, String, TimestampUnix32, RawBytes,
NullBytes, PaddingBits, PaddingBytes, FragmentGroup, CustomFragment) NullBytes, PaddingBits, PaddingBytes, FragmentGroup, CustomFragment)
from hachoir.core.tools import (humanDuration, humanFilesize) from hachoir.core.tools import (humanDuration, humanFilesize)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler from hachoir.core.text_handler import textHandler
@ -240,11 +240,13 @@ class Inode(FieldSet):
return out return out
def is_fast_symlink(self): def is_fast_symlink(self):
self.seekByte(4 * 15 + 4) acl_addr = self.absolute_address + self.current_size
acl = UInt32(self, "file_acl") # skip 15 blocks + version field
acl_addr += (4 * 15 + 4) * 8
acl = self.stream.readBits(acl_addr, 32, self.endian)
b = 0 b = 0
if acl.value > 0: if acl > 0:
b = (2 << self["/superblock/log_block_size"].value) b = (2 << self["/superblock/log_block_size"].value)
return (self['blocks'].value - b == 0) return (self['blocks'].value - b == 0)
@ -747,7 +749,7 @@ class EXT2_FS(HachoirParser, RootSeekableFieldSet):
def validate(self): def validate(self):
if self.stream.readBytes((1024 + 56) * 8, 2) != b"\x53\xEF": if self.stream.readBytes((1024 + 56) * 8, 2) != b"\x53\xEF":
return "Invalid magic number" return "Invalid magic number"
if not(0 <= self["superblock/log_block_size"].value <= 2): if not (0 <= self["superblock/log_block_size"].value <= 2):
return "Invalid (log) block size" return "Invalid (log) block size"
if self["superblock/inode_size"].value not in (0, 128): if self["superblock/inode_size"].value not in (0, 128):
return "Unsupported inode size" return "Unsupported inode size"

View file

@ -1,8 +1,8 @@
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, StaticFieldSet, from hachoir.field import (FieldSet, StaticFieldSet,
RawBytes, PaddingBytes, createPaddingField, Link, Fragment, RawBytes, PaddingBytes, createPaddingField, Link, Fragment,
Bit, Bits, UInt8, UInt16, UInt32, Bit, Bits, UInt8, UInt16, UInt32,
String, Bytes, NullBytes) String, Bytes, NullBytes)
from hachoir.field.integer import GenericInteger from hachoir.field.integer import GenericInteger
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -11,8 +11,8 @@ Creation: 11 july 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
UInt8, UInt32, UInt64, Enum, UInt8, UInt32, UInt64, Enum,
NullBytes, RawBytes, String) NullBytes, RawBytes, String)
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN

View file

@ -11,8 +11,8 @@ Creation date: 25 december 2006 (christmas ;-))
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (ParserError, GenericVector, from hachoir.field import (ParserError, GenericVector,
UInt32, String, UInt32, String,
Bytes, NullBytes, RawBytes) Bytes, NullBytes, RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import humanFilesize from hachoir.core.tools import humanFilesize
from hachoir.core.bits import str2hex from hachoir.core.bits import str2hex

View file

@ -14,8 +14,8 @@ Master Boot Record.
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
Enum, Bits, UInt8, UInt16, UInt32, Enum, Bits, UInt8, UInt16, UInt32,
RawBytes) RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import humanFilesize from hachoir.core.tools import humanFilesize
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -13,9 +13,9 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Enum, from hachoir.field import (FieldSet, Enum,
UInt8, UInt16, UInt32, UInt64, TimestampWin64, UInt8, UInt16, UInt32, UInt64, TimestampWin64,
String, Bytes, Bit, Bits, String, Bytes, Bit, Bits,
NullBits, NullBytes, PaddingBytes, RawBytes) NullBits, NullBytes, PaddingBytes, RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler
from hachoir.core.tools import humanFilesize, createDict from hachoir.core.tools import humanFilesize, createDict

View file

@ -22,7 +22,7 @@ Kurz.
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Enum, from hachoir.field import (FieldSet, Enum,
UInt16, UInt32, String, RawBytes, NullBytes, SeekableFieldSet, Bit) UInt16, UInt32, String, RawBytes, NullBytes, SeekableFieldSet, Bit)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN

View file

@ -7,7 +7,7 @@ Creation date: 1 January 2007
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt8, UInt16, UInt32, GenericVector) UInt8, UInt16, UInt32, GenericVector)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN

View file

@ -7,8 +7,8 @@ Creation date: 2006-09-15
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, StaticFieldSet, from hachoir.field import (FieldSet, StaticFieldSet,
UInt8, UInt16, UInt32, UInt8, UInt16, UInt32,
String, PaddingBytes, Bytes, RawBytes) String, PaddingBytes, Bytes, RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN

View file

@ -139,4 +139,7 @@ def createParser(filename, real_filename=None, tags=None):
if not tags: if not tags:
tags = [] tags = []
stream = FileInputStream(filename, real_filename, tags=tags) stream = FileInputStream(filename, real_filename, tags=tags)
return guessParser(stream) guess = guessParser(stream)
if guess is None:
stream.close()
return guess

View file

@ -8,9 +8,9 @@ Creation: 16 december 2005
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt8, UInt16, UInt32, Bits, UInt8, UInt16, UInt32, Bits,
String, RawBytes, Enum, String, RawBytes, Enum,
PaddingBytes, NullBytes, createPaddingField) PaddingBytes, NullBytes, createPaddingField)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.parser.image.common import RGB, PaletteRGBA from hachoir.parser.image.common import RGB, PaletteRGBA

View file

@ -11,11 +11,11 @@ References:
""" """
from hachoir.field import (FieldSet, SeekableFieldSet, ParserError, from hachoir.field import (FieldSet, SeekableFieldSet, ParserError,
UInt8, UInt16, UInt32, UInt8, UInt16, UInt32,
Int8, Int16, Int32, Int8, Int16, Int32,
Float32, Float64, Float32, Float64,
Enum, String, Bytes, SubFile, Enum, String, Bytes, SubFile,
NullBits, NullBytes) NullBits, NullBytes)
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN
from hachoir.core.tools import createDict from hachoir.core.tools import createDict

View file

@ -11,11 +11,11 @@ Author: Victor Stinner, Robert Xiao
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
Enum, UInt8, UInt16, Enum, UInt8, UInt16,
Bit, Bits, NullBytes, Bit, Bits, NullBytes,
String, PascalString8, Character, String, PascalString8, Character,
NullBits, RawBytes, NullBits, RawBytes,
CustomFragment) CustomFragment)
from hachoir.parser.image.common import PaletteRGB from hachoir.parser.image.common import PaletteRGB
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import humanDuration, paddingSize from hachoir.core.tools import humanDuration, paddingSize

View file

@ -6,7 +6,7 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, Enum, RawBytes) UInt8, UInt16, UInt32, Enum, RawBytes)
from hachoir.parser.image.common import PaletteRGBA from hachoir.parser.image.common import PaletteRGBA
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser.common.win32 import BitmapInfoHeader from hachoir.parser.common.win32 import BitmapInfoHeader

View file

@ -12,7 +12,7 @@ Author: Victor Stinner
""" """
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
UInt8, UInt16, String, RawBytes, NullBytes) UInt8, UInt16, String, RawBytes, NullBytes)
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -16,9 +16,9 @@ Author: Victor Stinner, Robert Xiao
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, FieldError, from hachoir.field import (FieldSet, ParserError, FieldError,
UInt8, UInt16, Enum, Field, UInt8, UInt16, Enum, Field,
Bit, Bits, NullBits, NullBytes, PaddingBits, Bit, Bits, NullBits, NullBytes, PaddingBits,
String, RawBytes) String, RawBytes)
from hachoir.parser.image.common import PaletteRGB from hachoir.parser.image.common import PaletteRGB
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
@ -205,7 +205,7 @@ class SOSComponent(FieldSet):
def createFields(self): def createFields(self):
comp_id = UInt8(self, "component_id") comp_id = UInt8(self, "component_id")
yield comp_id yield comp_id
if not(1 <= comp_id.value <= self["../nr_components"].value): if not (1 <= comp_id.value <= self["../nr_components"].value):
raise ParserError("JPEG error: Invalid component-id") raise ParserError("JPEG error: Invalid component-id")
yield Bits(self, "dc_coding_table", 4, "DC entropy coding table destination selector") yield Bits(self, "dc_coding_table", 4, "DC entropy coding table destination selector")
yield Bits(self, "ac_coding_table", 4, "AC entropy coding table destination selector") yield Bits(self, "ac_coding_table", 4, "AC entropy coding table destination selector")
@ -387,7 +387,10 @@ class JpegImageData(FieldSet):
end = self.stream.searchBytes(b"\xff", start, MAX_FILESIZE * 8) end = self.stream.searchBytes(b"\xff", start, MAX_FILESIZE * 8)
if end is None: if end is None:
# this is a bad sign, since it means there is no terminator # this is a bad sign, since it means there is no terminator
# we ignore this; it likely means a truncated image # this likely means a truncated image:
# set the size to the remaining length of the stream
# to avoid being forced to parse subfields to calculate size
self._size = self.stream._size - self.absolute_address
break break
if self.stream.readBytes(end, 2) == b'\xff\x00': if self.stream.readBytes(end, 2) == b'\xff\x00':
# padding: false alarm # padding: false alarm

View file

@ -5,9 +5,9 @@ References:
""" """
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, Float32, Enum, UInt8, UInt16, UInt32, Float32, Enum,
SubFile, String, CString, PascalString8, SubFile, String, CString, PascalString8,
NullBytes, RawBytes) NullBytes, RawBytes)
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.tools import alignValue, createDict from hachoir.core.tools import alignValue, createDict
from hachoir.parser.image.iptc import IPTC from hachoir.parser.image.iptc import IPTC

View file

@ -10,12 +10,12 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Fragment, from hachoir.field import (FieldSet, Fragment,
ParserError, MissingField, ParserError, MissingField,
UInt8, UInt16, UInt32, UInt8, UInt16, UInt32,
String, CString, String, CString,
Bytes, RawBytes, Bytes, RawBytes,
Bit, NullBits, Bit, NullBits,
Enum, CompressedField) Enum, CompressedField)
from hachoir.parser.image.common import RGB from hachoir.parser.image.common import RGB
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.endian import NETWORK_ENDIAN from hachoir.core.endian import NETWORK_ENDIAN
@ -45,7 +45,7 @@ UNIT_NAME = {1: "Meter"}
COMPRESSION_NAME = { COMPRESSION_NAME = {
0: "deflate" # with 32K sliding window 0: "deflate" # with 32K sliding window
} }
MAX_CHUNK_SIZE = 5 * 1024 * 1024 # Maximum chunk size (5 MB) MAX_CHUNK_SIZE = 64 * 1024 * 1024 # Maximum chunk size heuristic (64 MB)
def headerParse(parent): def headerParse(parent):

View file

@ -7,7 +7,7 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt16, UInt32, String, NullBytes, Enum, RawBytes) UInt16, UInt32, String, NullBytes, Enum, RawBytes)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.parser.image.photoshop_metadata import Photoshop8BIM from hachoir.parser.image.photoshop_metadata import Photoshop8BIM

View file

@ -16,8 +16,8 @@ Creation date: 26 december 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, StaticFieldSet, Enum, from hachoir.field import (FieldSet, StaticFieldSet, Enum,
MissingField, ParserError, MissingField, ParserError,
UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String) UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.core.tools import createDict from hachoir.core.tools import createDict
@ -597,7 +597,7 @@ class WMF_File(Parser):
yield UInt32(self, "max_record_size", "The size of largest record in 16-bit words") yield UInt32(self, "max_record_size", "The size of largest record in 16-bit words")
yield UInt16(self, "nb_params", "Not Used (always 0)") yield UInt16(self, "nb_params", "Not Used (always 0)")
while not(self.eof): while not self.eof:
yield Function(self, "func[]") yield Function(self, "func[]")
def isEMF(self): def isEMF(self):

View file

@ -11,7 +11,7 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (StaticFieldSet, FieldSet, ParserError, from hachoir.field import (StaticFieldSet, FieldSet, ParserError,
UInt8, UInt32, Enum, Float32, String, PascalString32, RawBytes) UInt8, UInt32, Enum, Float32, String, PascalString32, RawBytes)
from hachoir.parser.image.common import RGBA from hachoir.parser.image.common import RGBA
from hachoir.core.endian import NETWORK_ENDIAN from hachoir.core.endian import NETWORK_ENDIAN

View file

@ -16,3 +16,4 @@ from hachoir.parser.misc.word_doc import WordDocumentParser # noqa
from hachoir.parser.misc.word_2 import Word2DocumentParser # noqa from hachoir.parser.misc.word_2 import Word2DocumentParser # noqa
from hachoir.parser.misc.mstask import MSTaskFile # noqa from hachoir.parser.misc.mstask import MSTaskFile # noqa
from hachoir.parser.misc.mapsforge_map import MapsforgeMapFile # noqa from hachoir.parser.misc.mapsforge_map import MapsforgeMapFile # noqa
from hachoir.parser.misc.fit import FITFile # noqa

View file

@ -17,7 +17,7 @@ Created: 2008-09-21
from hachoir.parser import HachoirParser from hachoir.parser import HachoirParser
from hachoir.field import (RootSeekableFieldSet, FieldSet, Enum, from hachoir.field import (RootSeekableFieldSet, FieldSet, Enum,
Bits, GenericInteger, Float32, Float64, UInt8, UInt64, Bytes, NullBytes, RawBytes, String) Bits, GenericInteger, Float32, Float64, UInt8, UInt64, Bytes, NullBytes, RawBytes, String)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import displayHandler from hachoir.core.text_handler import displayHandler
from hachoir.core.tools import humanDatetime from hachoir.core.tools import humanDatetime

View file

@ -16,9 +16,9 @@ Creation date: 2007-03-04
""" """
from hachoir.field import (Field, FieldSet, ParserError, RootSeekableFieldSet, from hachoir.field import (Field, FieldSet, ParserError, RootSeekableFieldSet,
Int32, UInt16, UInt32, UInt64, Int32, UInt16, UInt32, UInt64,
RawBytes, PaddingBytes, RawBytes, PaddingBytes,
Enum, String) Enum, String)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser import HachoirParser from hachoir.parser import HachoirParser
from hachoir.parser.common.win32 import GUID from hachoir.parser.common.win32 import GUID

View file

@ -9,10 +9,10 @@ Created: 2010-09-01
from hachoir.parser import HachoirParser from hachoir.parser import HachoirParser
from hachoir.field import (RootSeekableFieldSet, FieldSet, from hachoir.field import (RootSeekableFieldSet, FieldSet,
NullBytes, RawBytes, PaddingBytes, Bytes, SubFile, String, PascalString8, NullBytes, RawBytes, PaddingBytes, Bytes, SubFile, String, PascalString8,
Bits, UInt8, UInt16, UInt32, Bits, UInt8, UInt16, UInt32,
Link, Link,
ParserError) ParserError)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import displayHandler from hachoir.core.text_handler import displayHandler
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize

View file

@ -7,8 +7,8 @@ Creation date: 28 september 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt32, Int32, String, Float32, UInt32, Int32, String, Float32,
RawBytes, PaddingBytes) RawBytes, PaddingBytes)
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN
from hachoir.parser.misc.common import Vertex, MapUV from hachoir.parser.misc.common import Vertex, MapUV

View file

@ -5,7 +5,7 @@ Author: Victor Stinner
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (StaticFieldSet, FieldSet, from hachoir.field import (StaticFieldSet, FieldSet,
UInt16, UInt32, RawBytes, Enum, CString) UInt16, UInt32, RawBytes, Enum, CString)
from hachoir.parser.image.common import RGB from hachoir.parser.image.common import RGB
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

View file

@ -0,0 +1,173 @@
"""
Garmin fit file Format parser.
Author: Sebastien Ponce <sebastien.ponce@cern.ch>
"""
from hachoir.parser import Parser
from hachoir.field import FieldSet, Int8, UInt8, Int16, UInt16, Int32, UInt32, Int64, UInt64, RawBytes, Bit, Bits, Bytes, String, Float32, Float64
from hachoir.core.endian import BIG_ENDIAN, LITTLE_ENDIAN
field_types = {
0: UInt8, # enum
1: Int8, # signed int of 8 bits
2: UInt8, # unsigned int of 8 bits
131: Int16, # signed int of 16 bits
132: UInt16, # unsigned int of 16 bits
133: Int32, # signed int of 32 bits
134: UInt32, # unsigned int of 32 bits
7: String, # string
136: Float32, # float
137: Float64, # double
10: UInt8, # unsigned int of 8 bits with 0 as invalid value
139: UInt16, # unsigned int of 16 bits with 0 as invalid value
140: UInt32, # unsigned int of 32 bits with 0 as invalid value
13: Bytes, # bytes
142: Int64, # signed int of 64 bits
143: UInt64, # unsigned int of 64 bits
144: UInt64 # unsigned int of 64 bits with 0 as invalid value
}
class Header(FieldSet):
endian = LITTLE_ENDIAN
def createFields(self):
yield UInt8(self, "size", "Header size")
yield UInt8(self, "protocol", "Protocol version")
yield UInt16(self, "profile", "Profile version")
yield UInt32(self, "datasize", "Data size")
yield RawBytes(self, "datatype", 4)
yield UInt16(self, "crc", "CRC of first 11 bytes or 0x0")
def createDescription(self):
return "Header of fit file. Data size is %d" % (self["datasize"].value)
class NormalRecordHeader(FieldSet):
def createFields(self):
yield Bit(self, "normal", "Normal header (0)")
yield Bit(self, "type", "Message type (0 data, 1 definition")
yield Bit(self, "typespecific", "0")
yield Bit(self, "reserved", "0")
yield Bits(self, "msgType", 4, description="Message type")
def createDescription(self):
return "Record header, this is a %s message" % ("definition" if self["type"].value else "data")
class FieldDefinition(FieldSet):
def createFields(self):
yield UInt8(self, "number", "Field definition number")
yield UInt8(self, "size", "Size in bytes")
yield UInt8(self, "type", "Base type")
def createDescription(self):
return "Field Definition. Number %d, Size %d" % (self["number"].value, self["size"].value)
class DefinitionMessage(FieldSet):
def createFields(self):
yield NormalRecordHeader(self, "RecordHeader")
yield UInt8(self, "reserved", "Reserved (0)")
yield UInt8(self, "architecture", "Architecture (0 little, 1 big endian")
self.endian = BIG_ENDIAN if self["architecture"].value else LITTLE_ENDIAN
yield UInt16(self, "msgNumber", "Message Number")
yield UInt8(self, "nbFields", "Number of fields")
for n in range(self["nbFields"].value):
yield FieldDefinition(self, "fieldDefinition[]")
def createDescription(self):
return "Definition Message. Contains %d fields" % (self["nbFields"].value)
class DataMessage(FieldSet):
def createFields(self):
hdr = NormalRecordHeader(self, "RecordHeader")
yield hdr
msgType = self["RecordHeader"]["msgType"].value
msgDef = self.parent.msgDefs[msgType]
for n in range(msgDef["nbFields"].value):
desc = msgDef["fieldDefinition[%d]" % n]
typ = field_types[desc["type"].value]
self.endian = BIG_ENDIAN if msgDef["architecture"].value else LITTLE_ENDIAN
if typ == String or typ == Bytes:
yield typ(self, "field%d" % n, desc["size"].value)
else:
if typ.static_size // 8 == desc["size"].value:
yield typ(self, "field%d" % n, desc["size"].value)
else:
for p in range(desc["size"].value * 8 // typ.static_size):
yield typ(self, "field%d[]" % n)
def createDescription(self):
return "Data Message"
class TimeStamp(FieldSet):
def createFields(self):
yield Bit(self, "timestamp", "TimeStamp (1)")
yield Bits(self, "msgType", 3, description="Message type")
yield Bits(self, "time", 4, description="TimeOffset")
def createDescription(self):
return "TimeStamp"
class CRC(FieldSet):
def createFields(self):
yield UInt16(self, "crc", "CRC")
def createDescription(self):
return "CRC"
class FITFile(Parser):
endian = BIG_ENDIAN
PARSER_TAGS = {
"id": "fit",
"category": "misc",
"file_ext": ("fit",),
"mime": ("application/fit",),
"min_size": 14 * 8,
"description": "Garmin binary fit format"
}
def __init__(self, *args, **kwargs):
Parser.__init__(self, *args, **kwargs)
self.msgDefs = {}
def validate(self):
s = self.stream.readBytes(0, 12)
if s[8:12] != b'.FIT':
return "Invalid header %d %d %d %d" % tuple([int(b) for b in s[8:12]])
return True
def createFields(self):
yield Header(self, "header")
while self.current_size < self["header"]["datasize"].value * 8:
b = self.stream.readBits(self.absolute_address + self.current_size, 2, self.endian)
if b == 1:
defMsg = DefinitionMessage(self, "definition[]")
msgType = defMsg["RecordHeader"]["msgType"].value
sizes = ''
ts = 0
for n in range(defMsg["nbFields"].value):
fname = "fieldDefinition[%d]" % n
size = defMsg[fname]["size"].value
ts += size
sizes += "%d/" % size
sizes += "%d" % ts
self.msgDefs[msgType] = defMsg
yield defMsg
elif b == 0:
yield DataMessage(self, "data[]")
else:
yield TimeStamp(self, "timestamp[]")
yield CRC(self, "crc")

View file

@ -12,10 +12,10 @@ Creation date: 2008-04-09
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
Bit, NullBits, NullBytes, Bit, NullBits, NullBytes,
UInt8, UInt32, String, RawBytes, Enum, UInt8, UInt32, String, RawBytes, Enum,
TimestampUnix64, CompressedField, TimestampUnix64, CompressedField,
SubFile) SubFile)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
try: try:

View file

@ -12,11 +12,11 @@ Creation date: 2007-09-03
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
Bits, Int32, UInt16, UInt32, Bits, Int32, UInt16, UInt32,
NullBytes, RawBytes, PaddingBytes, String) NullBytes, RawBytes, PaddingBytes, String)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import (textHandler, hexadecimal, from hachoir.core.text_handler import (textHandler, hexadecimal,
displayHandler, humanFilesize) displayHandler, humanFilesize)
class FileEntry(FieldSet): class FileEntry(FieldSet):

View file

@ -24,11 +24,11 @@ Changes:
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
CString, String, CString, String,
UInt32, UInt16, UInt8, UInt32, UInt16, UInt8,
Bit, Bits, PaddingBits, Bit, Bits, PaddingBits,
TimestampWin64, DateTimeMSDOS32, TimestampWin64, DateTimeMSDOS32,
NullBytes, PaddingBytes, RawBytes, Enum) NullBytes, PaddingBytes, RawBytes, Enum)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.parser.common.win32 import GUID from hachoir.parser.common.win32 import GUID

View file

@ -10,8 +10,8 @@ References:
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (Bit, Bits, UInt8, UInt16, UInt32, Int32, UInt64, String, from hachoir.field import (Bit, Bits, UInt8, UInt16, UInt32, Int32, UInt64, String,
PaddingBits, PaddingBits,
Enum, Field, FieldSet, SeekableFieldSet, RootSeekableFieldSet) Enum, Field, FieldSet, SeekableFieldSet, RootSeekableFieldSet)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
@ -41,7 +41,7 @@ class UIntVbe(Field):
size += 1 size += 1
assert size < 100, "UIntVBE is too large" assert size < 100, "UIntVBE is too large"
if not(haveMoreData): if not haveMoreData:
break break
self._size = size * 8 self._size = size * 8
@ -71,7 +71,7 @@ class IntVbe(Field):
size += 1 size += 1
assert size < 100, "IntVBE is too large" assert size < 100, "IntVBE is too large"
if not(haveMoreData): if not haveMoreData:
break break
if isNegative: if isNegative:
@ -142,7 +142,7 @@ class TileHeader(FieldSet):
def createFields(self): def createFields(self):
numLevels = int(self.zoomIntervalCfg[ numLevels = int(self.zoomIntervalCfg[
"max_zoom_level"].value - self.zoomIntervalCfg["min_zoom_level"].value) + 1 "max_zoom_level"].value - self.zoomIntervalCfg["min_zoom_level"].value) + 1
assert(numLevels < 50) assert (numLevels < 50)
for i in range(numLevels): for i in range(numLevels):
yield TileZoomTable(self, "zoom_table_entry[]") yield TileZoomTable(self, "zoom_table_entry[]")
yield UIntVbe(self, "first_way_offset") yield UIntVbe(self, "first_way_offset")

View file

@ -11,8 +11,8 @@ Creation: 8 january 2005
""" """
from hachoir.field import (SubFile, FieldSet, from hachoir.field import (SubFile, FieldSet,
UInt8, UInt16, UInt32, Enum, String, CString, UInt8, UInt16, UInt32, Enum, String, CString,
Bits, RawBytes, CustomFragment) Bits, RawBytes, CustomFragment)
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal
from hachoir.parser.misc.ole2_util import OLE2FragmentParser, RawParser from hachoir.parser.misc.ole2_util import OLE2FragmentParser, RawParser
from hachoir.parser.misc.msoffice_summary import Summary, DocSummary, CompObj from hachoir.parser.misc.msoffice_summary import Summary, DocSummary, CompObj

View file

@ -9,11 +9,11 @@ Documents
""" """
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
SeekableFieldSet, SeekableFieldSet,
Bit, Bits, NullBits, Bit, Bits, NullBits,
UInt8, UInt16, UInt32, TimestampWin64, TimedeltaWin64, Enum, UInt8, UInt16, UInt32, TimestampWin64, TimedeltaWin64, Enum,
Bytes, RawBytes, NullBytes, PaddingBits, String, Bytes, RawBytes, NullBytes, PaddingBits, String,
Int8, Int32, Float32, Float64, PascalString32) Int8, Int32, Float32, Float64, PascalString32)
from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler
from hachoir.core.tools import createDict, paddingSize from hachoir.core.tools import createDict, paddingSize
from hachoir.parser.common.win32 import GUID, PascalStringWin32, CODEPAGE_CHARSET from hachoir.parser.common.win32 import GUID, PascalStringWin32, CODEPAGE_CHARSET

View file

@ -12,8 +12,8 @@ http://technet.microsoft.com/en-us/library/bb490996.aspx
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, RootSeekableFieldSet, from hachoir.field import (FieldSet, RootSeekableFieldSet,
UInt32, UInt16, UInt32, UInt16,
Bit, RawBits, RawBytes, Enum) Bit, RawBits, RawBytes, Enum)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser.common.win32 import PascalStringWin16, GUID from hachoir.parser.common.win32 import PascalStringWin16, GUID

View file

@ -211,7 +211,7 @@ class OLE2_File(HachoirParser, RootSeekableFieldSet):
return "Unknown major version (%s)" % self["header/ver_maj"].value return "Unknown major version (%s)" % self["header/ver_maj"].value
if self["header/endian"].value not in (b"\xFF\xFE", b"\xFE\xFF"): if self["header/endian"].value not in (b"\xFF\xFE", b"\xFE\xFF"):
return "Unknown endian (%s)" % self["header/endian"].raw_display return "Unknown endian (%s)" % self["header/endian"].raw_display
if not(MIN_BIG_BLOCK_LOG2 <= self["header/bb_shift"].value <= MAX_BIG_BLOCK_LOG2): if not (MIN_BIG_BLOCK_LOG2 <= self["header/bb_shift"].value <= MAX_BIG_BLOCK_LOG2):
return "Invalid (log 2 of) big block size (%s)" % self["header/bb_shift"].value return "Invalid (log 2 of) big block size (%s)" % self["header/bb_shift"].value
if self["header/bb_shift"].value < self["header/sb_shift"].value: if self["header/bb_shift"].value < self["header/sb_shift"].value:
return "Small block size (log2=%s) is bigger than big block size (log2=%s)!" \ return "Small block size (log2=%s) is bigger than big block size (log2=%s)!" \

View file

@ -12,8 +12,8 @@ Creation date: 2007-03-20
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, Enum, from hachoir.field import (FieldSet, Enum,
UInt8, UInt32, Bytes, RawBytes, NullBytes, UInt8, UInt32, Bytes, RawBytes, NullBytes,
Bit, Bits, PaddingBits, CString) Bit, Bits, PaddingBits, CString)
from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize

View file

@ -44,7 +44,7 @@ def getElementEnd(s, limit=b' ', offset=0):
class PDFNumber(Field): class PDFNumber(Field):
LIMITS = [b'[', b'/', b'\x0D', b']'] LIMITS = [b'[', b'/', b'\x0A', b'\x0D', b'>', b']']
""" """
sprintf("%i") or sprinf("%.?f") sprintf("%i") or sprinf("%.?f")
""" """
@ -81,18 +81,18 @@ class PDFString(Field):
def __init__(self, parent, name, desc=None): def __init__(self, parent, name, desc=None):
Field.__init__(self, parent, name, description=desc) Field.__init__(self, parent, name, description=desc)
val = "" val = bytearray()
count = 1 count = 1
off = 1 off = 1
while not parent.eof: while not parent.eof:
char = parent.stream.readBytes(self.absolute_address + 8 * off, 1) char = parent.stream.readBytes(self.absolute_address + 8 * off, 1)
# Non-ASCII # Non-ASCII
if not char.isalpha() or char == '\\': if not char.isalpha() or char == b'\\':
off += 1 off += 1
continue continue
if char == '(': if char == b'(':
count += 1 count += 1
if char == ')': if char == b')':
count -= 1 count -= 1
# Parenthesis block = 0 => end of string # Parenthesis block = 0 => end of string
if count == 0: if count == 0:
@ -101,13 +101,15 @@ class PDFString(Field):
# Add it to the string # Add it to the string
val += char val += char
off += 1
val = bytes(val)
self._size = 8 * off self._size = 8 * off
self.createValue = lambda: val self.createValue = lambda: val
class PDFName(Field): class PDFName(Field):
LIMITS = [b'[', b'/', b'<', b']'] LIMITS = [b'[', b'/', b'<', b'>', b']']
""" """
String starting with '/', where characters may be written using their String starting with '/', where characters may be written using their
ASCII code (exemple: '#20' would be ' ' ASCII code (exemple: '#20' would be ' '
@ -145,7 +147,7 @@ class PDFID(Field):
def __init__(self, parent, name, desc=None): def __init__(self, parent, name, desc=None):
Field.__init__(self, parent, name, description=desc) Field.__init__(self, parent, name, description=desc)
self._size = 8 * getElementEnd(parent, '>') self._size = 8 * getElementEnd(parent, b'>')
self.createValue = lambda: parent.stream.readBytes( self.createValue = lambda: parent.stream.readBytes(
self.absolute_address + 8, (self._size // 8) - 1) self.absolute_address + 8, (self._size // 8) - 1)
@ -254,7 +256,7 @@ def parsePDFType(s):
else: else:
# First parse size # First parse size
size = getElementEnd(s) size = getElementEnd(s)
for limit in ['/', '>', '<']: for limit in [b'/', b'>', b'<']:
other_size = getElementEnd(s, limit) other_size = getElementEnd(s, limit)
if other_size is not None: if other_size is not None:
other_size -= 1 other_size -= 1
@ -424,7 +426,7 @@ class Catalog(FieldSet):
new_length = getElementEnd(self, limit) new_length = getElementEnd(self, limit)
if length is None or (new_length is not None and new_length - len(limit) < length): if length is None or (new_length is not None and new_length - len(limit) < length):
length = new_length - len(limit) length = new_length - len(limit)
yield String(self, "object", length, strip=' ') yield String(self, "object", length, strip=' \n')
if self.stream.readBytes(self.absolute_address + self.current_size, 2) == b'<<': if self.stream.readBytes(self.absolute_address + self.current_size, 2) == b'<<':
yield PDFDictionary(self, "key_list") yield PDFDictionary(self, "key_list")
# End of catalog: this one has "endobj" # End of catalog: this one has "endobj"
@ -441,9 +443,9 @@ class Trailer(FieldSet):
yield RawBytes(self, "marker", len(self.MAGIC)) yield RawBytes(self, "marker", len(self.MAGIC))
yield WhiteSpace(self, "sep[]") yield WhiteSpace(self, "sep[]")
yield String(self, "start_attribute_marker", 2) yield String(self, "start_attribute_marker", 2)
yield WhiteSpace(self, "sep[]")
addr = self.absolute_address + self.current_size addr = self.absolute_address + self.current_size
while self.stream.readBytes(addr, 2) != b'>>': while self.stream.readBytes(addr, 2) != b'>>':
yield WhiteSpace(self, "sep[]")
t = PDFName(self, "type[]") t = PDFName(self, "type[]")
yield t yield t
name = t.value.decode() name = t.value.decode()
@ -462,6 +464,7 @@ class Trailer(FieldSet):
yield PDFDictionary(self, "decrypt") yield PDFDictionary(self, "decrypt")
else: else:
raise ParserError("Don't know trailer type '%s'" % name) raise ParserError("Don't know trailer type '%s'" % name)
yield WhiteSpace(self, "sep[]")
addr = self.absolute_address + self.current_size addr = self.absolute_address + self.current_size
yield String(self, "end_attribute_marker", 2) yield String(self, "end_attribute_marker", 2)
yield LineEnd(self, "line_end[]") yield LineEnd(self, "line_end[]")

View file

@ -7,8 +7,8 @@ Creation date: 08 jul 2007
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, from hachoir.field import (FieldSet,
UInt8, UInt16, UInt24, UInt32, UInt64, Enum, UInt8, UInt16, UInt24, UInt32, UInt64, Enum,
CString, String, PaddingBytes, RawBytes, NullBytes) CString, String, PaddingBytes, RawBytes, NullBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import paddingSize, humanFilesize from hachoir.core.tools import paddingSize, humanFilesize
from hachoir.parser.common.win32 import GUID from hachoir.parser.common.win32 import GUID

View file

@ -9,7 +9,7 @@ Author: Christophe Gisquet <christophe.gisquet@free.fr>
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
String, RawBytes) String, RawBytes)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.core.tools import makePrintable, timestampUNIX, humanFilesize from hachoir.core.tools import makePrintable, timestampUNIX, humanFilesize

View file

@ -2,6 +2,8 @@
TrueType Font parser. TrueType Font parser.
Documents: Documents:
- "The OpenType Specification"
https://docs.microsoft.com/en-us/typography/opentype/spec/
- "An Introduction to TrueType Fonts: A look inside the TTF format" - "An Introduction to TrueType Fonts: A look inside the TTF format"
written by "NRSI: Computers & Writing Systems" written by "NRSI: Computers & Writing Systems"
http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&item_id=IWS-Chapter08 http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&item_id=IWS-Chapter08
@ -11,11 +13,26 @@ Creation date: 2007-02-08
""" """
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (
UInt16, UInt32, Bit, Bits, FieldSet,
PaddingBits, NullBytes, ParserError,
String, RawBytes, Bytes, Enum, UInt8,
TimestampMac32) UInt16,
UInt24,
UInt32,
Int16,
Bit,
Bits,
PaddingBits,
NullBytes,
String,
RawBytes,
Bytes,
Enum,
TimestampMac32,
GenericVector,
PascalString8,
)
from hachoir.core.endian import BIG_ENDIAN from hachoir.core.endian import BIG_ENDIAN
from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler
@ -69,11 +86,65 @@ CHARSET_MAP = {
3: {1: "UTF-16-BE"}, 3: {1: "UTF-16-BE"},
} }
PERMISSIONS = {
0: "Installable embedding",
2: "Restricted License embedding",
4: "Preview & Print embedding",
8: "Editable embedding",
}
class TableHeader(FieldSet): FWORD = Int16
UFWORD = UInt16
class Tag(String):
def __init__(self, parent, name, description=None):
String.__init__(self, parent, name, 4, description)
class Version16Dot16(FieldSet):
static_size = 32
def createFields(self): def createFields(self):
yield String(self, "tag", 4) yield UInt16(self, "major")
yield UInt16(self, "minor")
def createValue(self):
return float("%u.%x" % (self["major"].value, self["minor"].value))
class Fixed(FieldSet):
def createFields(self):
yield UInt16(self, "int_part")
yield UInt16(self, "float_part")
def createValue(self):
return self["int_part"].value + float(self["float_part"].value) / 65536
class Tuple(FieldSet):
def __init__(self, parent, name, axisCount):
super().__init__(parent, name, description="Tuple Record")
self.axisCount = axisCount
def createFields(self):
for _ in range(self.axisCount):
yield (Fixed(self, "coordinate[]"))
class F2DOT14(FieldSet):
static_size = 16
def createFields(self):
yield Int16(self, "int_part")
def createValue(self):
return self["int_part"].value / 16384
class TableHeader(FieldSet):
def createFields(self):
yield Tag(self, "tag")
yield textHandler(UInt32(self, "checksum"), hexadecimal) yield textHandler(UInt32(self, "checksum"), hexadecimal)
yield UInt32(self, "offset") yield UInt32(self, "offset")
yield filesizeHandler(UInt32(self, "size")) yield filesizeHandler(UInt32(self, "size"))
@ -83,7 +154,6 @@ class TableHeader(FieldSet):
class NameHeader(FieldSet): class NameHeader(FieldSet):
def createFields(self): def createFields(self):
yield Enum(UInt16(self, "platformID"), PLATFORM_NAME) yield Enum(UInt16(self, "platformID"), PLATFORM_NAME)
yield UInt16(self, "encodingID") yield UInt16(self, "encodingID")
@ -135,7 +205,7 @@ def parseFontHeader(self):
yield Bits(self, "adobe", 2, "(used by Adobe)") yield Bits(self, "adobe", 2, "(used by Adobe)")
yield UInt16(self, "unit_per_em", "Units per em") yield UInt16(self, "unit_per_em", "Units per em")
if not(16 <= self["unit_per_em"].value <= 16384): if not (16 <= self["unit_per_em"].value <= 16384):
raise ParserError("TTF: Invalid unit/em value") raise ParserError("TTF: Invalid unit/em value")
yield UInt32(self, "created_high") yield UInt32(self, "created_high")
yield TimestampMac32(self, "created") yield TimestampMac32(self, "created")
@ -162,17 +232,273 @@ def parseFontHeader(self):
yield UInt16(self, "glyph_format", "(=0)") yield UInt16(self, "glyph_format", "(=0)")
class AxisValueMap(FieldSet):
static_size = 32
def createFields(self):
yield F2DOT14(self, "fromCoordinate")
yield F2DOT14(self, "toCoordinate")
class SegmentMaps(FieldSet):
def createFields(self):
yield UInt16(
self, "positionMapCount", "The number of correspondence pairs for this axis"
)
for _ in range(self["positionMapCount"].value):
yield (AxisValueMap(self, "axisValueMaps[]"))
def parseAvar(self):
yield UInt16(self, "majorVersion", "Major version")
yield UInt16(self, "minorVersion", "Minor version")
yield PaddingBits(self, "reserved[]", 16)
yield UInt16(self, "axisCount", "The number of variation axes for this font")
for _ in range(self["axisCount"].value):
yield (SegmentMaps(self, "segmentMaps[]"))
class VariationAxisRecord(FieldSet):
def createFields(self):
yield Tag(self, "axisTag", "Tag identifying the design variation for the axis")
yield Fixed(self, "minValue", "The minimum coordinate value for the axis")
yield Fixed(self, "defaultValue", "The default coordinate value for the axis")
yield Fixed(self, "maxValue", "The maximum coordinate value for the axis")
yield PaddingBits(self, "reservedFlags", 15)
yield Bit(
self, "hidden", "The axis should not be exposed directly in user interfaces"
)
yield UInt16(
self,
"axisNameID",
"The name ID for entries in the 'name' table that provide a display name for this axis",
)
class InstanceRecord(FieldSet):
def __init__(self, parent, name, axisCount, hasPSNameID=False):
super().__init__(parent, name, description="Instance record")
self.axisCount = axisCount
self.hasPSNameID = hasPSNameID
def createFields(self):
yield UInt16(
self, "subfamilyNameID", "Name ID for subfamily names for this instance"
)
yield PaddingBits(self, "reservedFlags", 16)
yield Tuple(self, "coordinates", axisCount=self.axisCount)
if self.hasPSNameID:
yield UInt16(
self,
"postScriptNameID",
"Name ID for PostScript names for this instance",
)
def parseFvar(self):
yield UInt16(self, "majorVersion", "Major version")
yield UInt16(self, "minorVersion", "Minor version")
yield UInt16(
self, "axisArrayOffset", "Offset to the start of the VariationAxisRecord array."
)
yield PaddingBits(self, "reserved[]", 16)
yield UInt16(self, "axisCount", "The number of variation axes for this font")
yield UInt16(self, "axisSize", "The size in bytes of each VariationAxisRecord")
yield UInt16(self, "instanceCount", "The number of named instances for this font")
yield UInt16(self, "instanceSize", "The size in bytes of each InstanceRecord")
if self["axisArrayOffset"].value > 16:
yield PaddingBits(self, "padding", 8 * (self["axisArrayOffset"].value - 16))
for _ in range(self["axisCount"].value):
yield (VariationAxisRecord(self, "axes[]"))
for _ in range(self["instanceCount"].value):
yield (
InstanceRecord(
self,
"instances[]",
axisCount=self["axisCount"].value,
hasPSNameID=(
self["instanceSize"].value == (2 * self["axisCount"].value + 6)
),
)
)
class EncodingRecord(FieldSet):
static_size = 64
def createFields(self):
yield Enum(UInt16(self, "platformID"), PLATFORM_NAME)
yield UInt16(self, "encodingID")
self.offset = UInt32(self, "subtableOffset")
yield self.offset
class CmapTable0(FieldSet):
def createFields(self):
yield UInt16(self, "format", "Table format")
yield UInt16(self, "length", "Length in bytes")
yield UInt16(self, "language", "Language ID")
yield GenericVector(self, "mapping", 256, UInt8)
class CmapTable4(FieldSet):
def createFields(self):
yield UInt16(self, "format", "Table format")
yield UInt16(self, "length", "Length in bytes")
yield UInt16(self, "language", "Language ID")
yield UInt16(self, "segCountX2", "Twice the number of segments")
segments = self["segCountX2"].value // 2
yield UInt16(self, "searchRange")
yield UInt16(self, "entrySelector")
yield UInt16(self, "rangeShift")
yield GenericVector(self, "endCode", segments, UInt16)
yield PaddingBits(self, "reserved[]", 16)
yield GenericVector(self, "startCode", segments, UInt16)
yield GenericVector(self, "idDelta", segments, Int16)
yield GenericVector(self, "idRangeOffsets", segments, UInt16)
remainder = (self["length"].value - (self.current_size / 8)) / 2
if remainder:
yield GenericVector(self, "glyphIdArray", remainder, UInt16)
class CmapTable6(FieldSet):
def createFields(self):
yield UInt16(self, "format", "Table format")
yield UInt16(self, "length", "Length in bytes")
yield UInt16(self, "language", "Language ID")
yield UInt16(self, "firstCode", "First character code of subrange")
yield UInt16(self, "entryCount", "Number of character codes in subrange")
yield GenericVector(self, "glyphIdArray", self["entryCount"].value, UInt16)
class SequentialMapGroup(FieldSet):
def createFields(self):
yield UInt32(self, "startCharCode", "First character code in this group")
yield UInt32(self, "endCharCode", "First character code in this group")
yield UInt32(
self,
"startGlyphID",
"Glyph index corresponding to the starting character code",
)
class CmapTable12(FieldSet):
def createFields(self):
yield UInt16(self, "format", "Table format")
yield PaddingBits(self, "reserved[]", 16)
yield UInt32(self, "length", "Length in bytes")
yield UInt32(self, "language", "Language ID")
yield UInt32(self, "numGroups", "Number of groupings which follow")
for i in range(self["numGroups"].value):
yield SequentialMapGroup(self, "mapgroup[]")
class VariationSelector(FieldSet):
def createFields(self):
yield UInt24(self, "varSelector", "Variation selector")
yield UInt32(self, "defaultUVSOffset", "Offset to default UVS table")
yield UInt32(self, "nonDefaultUVSOffset", "Offset to non-default UVS table")
class CmapTable14(FieldSet):
def createFields(self):
yield UInt16(self, "format", "Table format")
yield UInt32(self, "length", "Length in bytes")
yield UInt32(
self, "numVarSelectorRecords", "Number of variation selector records"
)
for i in range(self["numVarSelectorRecords"].value):
yield VariationSelector(self, "variationSelector[]")
def parseCmap(self):
yield UInt16(self, "version")
numTables = UInt16(self, "numTables", "Number of encoding tables")
yield numTables
encodingRecords = []
for index in range(numTables.value):
entry = EncodingRecord(self, "encodingRecords[]")
yield entry
encodingRecords.append(entry)
encodingRecords.sort(key=lambda field: field["subtableOffset"].value)
last = None
for er in encodingRecords:
offset = er["subtableOffset"].value
if last and last == offset:
continue
last = offset
# Add padding if any
padding = self.seekByte(offset, relative=True, null=False)
if padding:
yield padding
format = UInt16(self, "format").value
if format == 0:
yield CmapTable0(self, "cmap table format 0")
elif format == 4:
yield CmapTable4(self, "cmap table format 4")
elif format == 6:
yield CmapTable6(self, "cmap table format 6")
elif format == 12:
yield CmapTable12(self, "cmap table format 12")
elif format == 14:
yield CmapTable14(self, "cmap table format 14")
class SignatureRecord(FieldSet):
def createFields(self):
yield UInt16(self, "format", "Table format")
yield UInt16(self, "length", "Length of signature")
yield UInt16(self, "signatureBlockOffset", "Offset to signature block")
class SignatureBlock(FieldSet):
def createFields(self):
yield PaddingBits(self, "reserved[]", 32)
yield UInt32(
self,
"length",
"Length (in bytes) of the PKCS#7 packet in the signature field",
)
yield String(self, "signature", self["length"].value, "Signature block")
def parseDSIG(self):
yield UInt32(self, "version")
yield UInt16(self, "numSignatures", "Number of signatures in the table")
yield Bit(self, "flag", "Cannot be resigned")
yield PaddingBits(self, "reserved[]", 7)
entries = []
for i in range(self["numSignatures"].value):
record = SignatureRecord(self, "signatureRecords[]")
entries.append(record)
yield record
entries.sort(key=lambda field: field["signatureBlockOffset"].value)
last = None
for entry in entries:
offset = entry["signatureBlockOffset"].value
if last and last == offset:
continue
last = offset
# Add padding if any
padding = self.seekByte(offset, relative=True, null=False)
if padding:
yield padding
padding = (self.size - self.current_size) // 8
if padding:
yield NullBytes(self, "padding_end", padding)
def parseNames(self): def parseNames(self):
# Read header # Read header
yield UInt16(self, "format") yield UInt16(self, "format")
if self["format"].value != 0: if self["format"].value != 0:
raise ParserError("TTF (names): Invalid format (%u)" % raise ParserError("TTF (names): Invalid format (%u)" % self["format"].value)
self["format"].value)
yield UInt16(self, "count") yield UInt16(self, "count")
yield UInt16(self, "offset") yield UInt16(self, "offset")
if MAX_NAME_COUNT < self["count"].value: if MAX_NAME_COUNT < self["count"].value:
raise ParserError("Invalid number of names (%s)" raise ParserError("Invalid number of names (%s)" % self["count"].value)
% self["count"].value)
# Read name index # Read name index
entries = [] entries = []
@ -208,17 +534,210 @@ def parseNames(self):
# Read value # Read value
size = entry["length"].value size = entry["length"].value
if size: if size:
yield String(self, "value[]", size, entry.description, charset=entry.getCharset()) yield String(
self, "value[]", size, entry.description, charset=entry.getCharset()
)
padding = (self.size - self.current_size) // 8 padding = (self.size - self.current_size) // 8
if padding: if padding:
yield NullBytes(self, "padding_end", padding) yield NullBytes(self, "padding_end", padding)
def parseMaxp(self):
# Read header
yield Version16Dot16(self, "format", "format version")
yield UInt16(self, "numGlyphs", "Number of glyphs")
if self["format"].value >= 1:
yield UInt16(self, "maxPoints", "Maximum points in a non-composite glyph")
yield UInt16(self, "maxContours", "Maximum contours in a non-composite glyph")
yield UInt16(self, "maxCompositePoints", "Maximum points in a composite glyph")
yield UInt16(
self, "maxCompositeContours", "Maximum contours in a composite glyph"
)
yield UInt16(self, "maxZones", "Do instructions use the twilight zone?")
yield UInt16(self, "maxTwilightPoints", "Maximum points used in Z0")
yield UInt16(self, "maxStorage", "Number of Storage Area locations")
yield UInt16(self, "maxFunctionDefs", "Number of function definitions")
yield UInt16(self, "maxInstructionDefs", "Number of instruction definitions")
yield UInt16(self, "maxStackElements", "Maximum stack depth")
yield UInt16(
self, "maxSizeOfInstructions", "Maximum byte count for glyph instructions"
)
yield UInt16(
self,
"maxComponentElements",
"Maximum number of components at glyph top level",
)
yield UInt16(self, "maxComponentDepth", "Maximum level of recursion")
def parseHhea(self):
yield UInt16(self, "majorVersion", "Major version")
yield UInt16(self, "minorVersion", "Minor version")
yield FWORD(self, "ascender", "Typographic ascent")
yield FWORD(self, "descender", "Typographic descent")
yield FWORD(self, "lineGap", "Typographic linegap")
yield UFWORD(self, "advanceWidthMax", "Maximum advance width")
yield FWORD(self, "minLeftSideBearing", "Minimum left sidebearing value")
yield FWORD(self, "minRightSideBearing", "Minimum right sidebearing value")
yield FWORD(self, "xMaxExtent", "Maximum X extent")
yield Int16(self, "caretSlopeRise", "Caret slope rise")
yield Int16(self, "caretSlopeRun", "Caret slope run")
yield Int16(self, "caretOffset", "Caret offset")
yield GenericVector(self, "reserved", 4, Int16)
yield Int16(self, "metricDataFormat", "Metric data format")
yield UInt16(self, "numberOfHMetrics", "Number of horizontal metrics")
class fsType(FieldSet):
def createFields(self):
yield Enum(Bits(self, "usage_permissions", 4), PERMISSIONS)
yield PaddingBits(self, "reserved[]", 4)
yield Bit(self, "no_subsetting", "Font may not be subsetted prior to embedding")
yield Bit(
self,
"bitmap_embedding",
"Only bitmaps contained in the font may be embedded",
)
yield PaddingBits(self, "reserved[]", 6)
def parseOS2(self):
yield UInt16(self, "version", "Table version")
yield Int16(self, "xAvgCharWidth")
yield UInt16(self, "usWeightClass")
yield UInt16(self, "usWidthClass")
yield fsType(self, "fsType")
yield Int16(self, "ySubscriptXSize")
yield Int16(self, "ySubscriptYSize")
yield Int16(self, "ySubscriptXOffset")
yield Int16(self, "ySubscriptYOffset")
yield Int16(self, "ySuperscriptXSize")
yield Int16(self, "ySuperscriptYSize")
yield Int16(self, "ySuperscriptXOffset")
yield Int16(self, "ySuperscriptYOffset")
yield Int16(self, "yStrikeoutSize")
yield Int16(self, "yStrikeoutPosition")
yield Int16(self, "sFamilyClass")
yield GenericVector(self, "panose", 10, UInt8)
yield UInt32(self, "ulUnicodeRange1")
yield UInt32(self, "ulUnicodeRange2")
yield UInt32(self, "ulUnicodeRange3")
yield UInt32(self, "ulUnicodeRange4")
yield Tag(self, "achVendID", "Vendor ID")
yield UInt16(self, "fsSelection")
yield UInt16(self, "usFirstCharIndex")
yield UInt16(self, "usLastCharIndex")
yield Int16(self, "sTypoAscender")
yield Int16(self, "sTypoDescender")
yield Int16(self, "sTypoLineGap")
yield UInt16(self, "usWinAscent")
yield UInt16(self, "usWinDescent")
if self["version"].value >= 1:
yield UInt32(self, "ulCodePageRange1")
yield UInt32(self, "ulCodePageRange2")
if self["version"].value >= 2:
yield Int16(self, "sxHeight")
yield Int16(self, "sCapHeight")
yield UInt16(self, "usDefaultChar")
yield UInt16(self, "usBreakChar")
yield UInt16(self, "usMaxContext")
if self["version"].value >= 5:
yield UInt16(self, "usLowerOpticalPointSize")
yield UInt16(self, "usUpperOpticalPointSize")
def parsePost(self):
yield Version16Dot16(self, "version", "Table version")
yield Fixed(
self,
"italicAngle",
"Italic angle in counter-clockwise degrees from the vertical.",
)
yield FWORD(self, "underlinePosition", "Top of underline to baseline")
yield FWORD(self, "underlineThickness", "Suggested underline thickness")
yield UInt32(self, "isFixedPitch", "Is the font fixed pitch?")
yield UInt32(self, "minMemType42", "Minimum memory usage (OpenType)")
yield UInt32(self, "maxMemType42", "Maximum memory usage (OpenType)")
yield UInt32(self, "minMemType1", "Minimum memory usage (Type 1)")
yield UInt32(self, "maxMemType1", "Maximum memory usage (Type 1)")
if self["version"].value == 2.0:
yield UInt16(self, "numGlyphs")
indices = GenericVector(
self,
"Array of indices into the string data",
self["numGlyphs"].value,
UInt16,
"glyphNameIndex",
)
yield indices
for gid, index in enumerate(indices):
if index.value >= 258:
yield PascalString8(self, "glyphname[%i]" % gid)
elif self["version"].value == 2.0:
yield UInt16(self, "numGlyphs")
indices = GenericVector(
self,
"Difference between graphic index and standard order of glyph",
self["numGlyphs"].value,
UInt16,
"offset",
)
yield indices
# This is work-in-progress until I work out good ways to do random-access on offsets
parseScriptList = (
parseFeatureList
) = parseLookupList = parseFeatureVariationsTable = lambda x: None
def parseGSUB(self):
yield UInt16(self, "majorVersion", "Major version")
yield UInt16(self, "minorVersion", "Minor version")
SUBTABLES = [
("script list", parseScriptList),
("feature list", parseFeatureList),
("lookup list", parseLookupList),
]
offsets = []
for description, parser in SUBTABLES:
name = description.title().replace(" ", "")
offset = UInt16(
self, name[0].lower() + name[1:], "Offset to %s table" % description
)
yield offset
offsets.append((offset.value, parser))
if self["min_ver"].value == 1:
offset = UInt32(
self, "featureVariationsOffset", "Offset to feature variations table"
)
offsets.append((offset.value, parseFeatureVariationsTable))
offsets.sort(key=lambda field: field[0])
padding = self.seekByte(offsets[0][0], null=True)
if padding:
yield padding
lastOffset, first_parser = offsets[0]
for offset, parser in offsets[1:]:
# yield parser(self)
yield RawBytes(self, "content", offset - lastOffset)
lastOffset = offset
class Table(FieldSet): class Table(FieldSet):
TAG_INFO = { TAG_INFO = {
"DSIG": ("DSIG", "Digital Signature", parseDSIG),
"GSUB": ("GSUB", "Glyph Substitutions", parseGSUB),
"avar": ("avar", "Axis variation table", parseAvar),
"cmap": ("cmap", "Character to Glyph Index Mapping", parseCmap),
"fvar": ("fvar", "Font variations table", parseFvar),
"head": ("header", "Font header", parseFontHeader), "head": ("header", "Font header", parseFontHeader),
"hhea": ("hhea", "Horizontal Header", parseHhea),
"maxp": ("maxp", "Maximum Profile", parseMaxp),
"name": ("names", "Names", parseNames), "name": ("names", "Names", parseNames),
"OS/2": ("OS_2", "OS/2 and Windows Metrics", parseOS2),
"post": ("post", "PostScript", parsePost),
} }
def __init__(self, parent, name, table, **kw): def __init__(self, parent, name, table, **kw):
@ -251,10 +770,15 @@ class TrueTypeFontFile(Parser):
} }
def validate(self): def validate(self):
if self["maj_ver"].value != 1: if self["maj_ver"].value == 1 and self["min_ver"].value == 0:
return "Invalid major version (%u)" % self["maj_ver"].value pass
if self["min_ver"].value != 0: elif self["maj_ver"].value == 0x4F54 and self["min_ver"].value == 0x544F:
return "Invalid minor version (%u)" % self["min_ver"].value pass
else:
return "Invalid version (%u.%u)" % (
self["maj_ver"].value,
self["min_ver"].value,
)
if not (MIN_NB_TABLE <= self["nb_table"].value <= MAX_NB_TABLE): if not (MIN_NB_TABLE <= self["nb_table"].value <= MAX_NB_TABLE):
return "Invalid number of table (%u)" % self["nb_table"].value return "Invalid number of table (%u)" % self["nb_table"].value
return True return True

View file

@ -6,10 +6,10 @@ Documents:
""" """
from hachoir.field import (FieldSet, Enum, from hachoir.field import (FieldSet, Enum,
Bit, Bits, Bit, Bits,
UInt8, Int16, UInt16, UInt32, Int32, UInt8, Int16, UInt16, UInt32, Int32,
NullBytes, Bytes, RawBytes, NullBytes, Bytes, RawBytes,
DateTimeMSDOS32) DateTimeMSDOS32)
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser.misc.ole2_util import OLE2FragmentParser from hachoir.parser.misc.ole2_util import OLE2FragmentParser
from hachoir.core.tools import paddingSize from hachoir.core.tools import paddingSize

View file

@ -12,10 +12,10 @@ Documents:
""" """
from hachoir.field import (FieldSet, Enum, from hachoir.field import (FieldSet, Enum,
Bit, Bits, Bit, Bits,
UInt8, Int16, UInt16, UInt32, Int32, UInt8, Int16, UInt16, UInt32, Int32,
NullBytes, Bytes, RawBytes, PascalString8, CString, String, NullBytes, Bytes, RawBytes, PascalString8, CString, String,
TimestampMac32, TimestampWin64) TimestampMac32, TimestampWin64)
from hachoir.core.text_handler import displayHandler from hachoir.core.text_handler import displayHandler
from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.endian import LITTLE_ENDIAN
from hachoir.parser import guessParser from hachoir.parser import guessParser

View file

@ -14,9 +14,9 @@ Creation: 23 march 2006
from hachoir.parser import Parser from hachoir.parser import Parser
from hachoir.field import (FieldSet, ParserError, from hachoir.field import (FieldSet, ParserError,
Enum, Bytes, NullBytes, RawBytes, Enum, Bytes, NullBytes, RawBytes,
UInt8, UInt16, UInt32, Int32, TimestampUnix32, UInt8, UInt16, UInt32, Int32, TimestampUnix32,
Bit, Bits, NullBits) Bit, Bits, NullBits)
from hachoir.core.endian import NETWORK_ENDIAN, LITTLE_ENDIAN from hachoir.core.endian import NETWORK_ENDIAN, LITTLE_ENDIAN
from hachoir.core.tools import humanDuration from hachoir.core.tools import humanDuration
from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.text_handler import textHandler, hexadecimal

Some files were not shown because too many files have changed in this diff Show more