mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-02 17:33:37 +00:00
Change handle if BTS returns no data.
Change improve hachoir error handling with bad source metadata.
This commit is contained in:
parent
64dd798ba8
commit
aa8b750803
11 changed files with 208 additions and 90 deletions
|
@ -1,4 +1,11 @@
|
||||||
### 0.17.0 (2018-08-24 23:40:00 UTC)
|
### 0.17.1 (2018-08-29 23:40:00 UTC)
|
||||||
|
|
||||||
|
* Change replace imdb lib with imdb-pie 5.6.3 (df7411d1)
|
||||||
|
* Change handle if BTS returns no data
|
||||||
|
* Change improve hachoir error handling with bad source metadata
|
||||||
|
|
||||||
|
|
||||||
|
### 0.17.0 (2018-08-24 23:40:00 UTC)
|
||||||
|
|
||||||
* Change save config values only where reqd. reduces file by up to 75%
|
* Change save config values only where reqd. reduces file by up to 75%
|
||||||
* Add 'Map an NZBGet "DestDir"' setting to config/Search/NZB Results tab (select NZBGet)
|
* Add 'Map an NZBGet "DestDir"' setting to config/Search/NZB Results tab (select NZBGet)
|
||||||
|
|
25
_cleaner.py
25
_cleaner.py
|
@ -4,6 +4,31 @@ import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
parent_dir = os.path.abspath(os.path.dirname(__file__))
|
parent_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
cleaned_file = os.path.abspath(os.path.join(parent_dir, '.cleaned003.tmp'))
|
||||||
|
test = os.path.abspath(os.path.join(parent_dir, 'lib', 'imdb'))
|
||||||
|
if not os.path.isfile(cleaned_file) or os.path.exists(test):
|
||||||
|
dead_dirs = [os.path.abspath(os.path.join(parent_dir, *d)) for d in [
|
||||||
|
('lib', 'imdb'),
|
||||||
|
]]
|
||||||
|
|
||||||
|
for dirpath, dirnames, filenames in os.walk(parent_dir):
|
||||||
|
for dead_dir in filter(lambda x: x in dead_dirs, [os.path.abspath(os.path.join(dirpath, d)) for d in dirnames]):
|
||||||
|
try:
|
||||||
|
shutil.rmtree(dead_dir)
|
||||||
|
except (StandardError, Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
for filename in [fn for fn in filenames if os.path.splitext(fn)[-1].lower() in ('.pyc', '.pyo')]:
|
||||||
|
try:
|
||||||
|
os.remove(os.path.abspath(os.path.join(dirpath, filename)))
|
||||||
|
except (StandardError, Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(cleaned_file, 'wb') as fp:
|
||||||
|
fp.write('This file exists to prevent a rerun delete of *.pyc, *.pyo files')
|
||||||
|
fp.flush()
|
||||||
|
os.fsync(fp.fileno())
|
||||||
|
|
||||||
cleaned_file = os.path.abspath(os.path.join(parent_dir, '.cleaned002.tmp'))
|
cleaned_file = os.path.abspath(os.path.join(parent_dir, '.cleaned002.tmp'))
|
||||||
test = os.path.abspath(os.path.join(parent_dir, 'lib', 'hachoir_core'))
|
test = os.path.abspath(os.path.join(parent_dir, 'lib', 'hachoir_core'))
|
||||||
if not os.path.isfile(cleaned_file) or os.path.exists(test):
|
if not os.path.isfile(cleaned_file) or os.path.exists(test):
|
||||||
|
|
|
@ -93,7 +93,8 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '<%s path=%s, current_size=%s, current length=%s>' % \
|
return '<%s path=%s, current_size=%s, current length=%s>' % \
|
||||||
(self.__class__.__name__, self.path, self._current_size, len(self._fields))
|
(self.__class__.__name__, self.path,
|
||||||
|
self._current_size, len(self._fields))
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
"""
|
"""
|
||||||
|
@ -106,23 +107,22 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
|
|
||||||
def _getCurrentLength(self):
|
def _getCurrentLength(self):
|
||||||
return len(self._fields)
|
return len(self._fields)
|
||||||
|
|
||||||
current_length = property(_getCurrentLength)
|
current_length = property(_getCurrentLength)
|
||||||
|
|
||||||
def _getSize(self):
|
def _getSize(self):
|
||||||
if self._size is None:
|
if self._size is None:
|
||||||
self._feedAll()
|
self._feedAll()
|
||||||
return self._size
|
return self._size
|
||||||
|
size = property(
|
||||||
size = property(_getSize, doc="Size in bits, may create all fields to get size")
|
_getSize, doc="Size in bits, may create all fields to get size")
|
||||||
|
|
||||||
def _getCurrentSize(self):
|
def _getCurrentSize(self):
|
||||||
assert not (self.done)
|
assert not(self.done)
|
||||||
return self._current_size
|
return self._current_size
|
||||||
|
|
||||||
current_size = property(_getCurrentSize)
|
current_size = property(_getCurrentSize)
|
||||||
|
|
||||||
eof = property(lambda self: self._checkSize(self._current_size + 1, True) < 0)
|
eof = property(lambda self: self._checkSize(
|
||||||
|
self._current_size + 1, True) < 0)
|
||||||
|
|
||||||
def _checkSize(self, size, strict):
|
def _checkSize(self, size, strict):
|
||||||
field = self
|
field = self
|
||||||
|
@ -170,11 +170,13 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
_ = field.size
|
_ = field.size
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if field.is_field_set and field.current_length and field.eof:
|
if field.is_field_set and field.current_length and field.eof:
|
||||||
self.warning("Error when getting size of '%s': %s" % (field.name, err))
|
self.warning(
|
||||||
|
"Error when getting size of '%s': %s" % (field.name, err))
|
||||||
field._stopFeeding()
|
field._stopFeeding()
|
||||||
ask_stop = True
|
ask_stop = True
|
||||||
else:
|
else:
|
||||||
self.warning("Error when getting size of '%s': delete it" % field.name)
|
self.warning(
|
||||||
|
"Error when getting size of '%s': delete it" % field.name)
|
||||||
self.__is_feeding = False
|
self.__is_feeding = False
|
||||||
raise
|
raise
|
||||||
self.__is_feeding = False
|
self.__is_feeding = False
|
||||||
|
@ -190,8 +192,8 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
self._current_size += field.size
|
self._current_size += field.size
|
||||||
try:
|
try:
|
||||||
self._fields.append(field._name, field)
|
self._fields.append(field._name, field)
|
||||||
except UniqKeyError, err:
|
except UniqKeyError as err:
|
||||||
self.warning("Duplicate field name " + unicode(err))
|
self.warning("Duplicate field name " + str(err))
|
||||||
field._name += "[]"
|
field._name += "[]"
|
||||||
self.setUniqueFieldName(field)
|
self.setUniqueFieldName(field)
|
||||||
self._fields.append(field._name, field)
|
self._fields.append(field._name, field)
|
||||||
|
@ -220,7 +222,7 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
return field
|
return field
|
||||||
|
|
||||||
def getField(self, key, const=True):
|
def getField(self, key, const=True):
|
||||||
if isinstance(key, (int, long)):
|
if isinstance(key, int):
|
||||||
if key < 0:
|
if key < 0:
|
||||||
raise KeyError("Key must be positive!")
|
raise KeyError("Key must be positive!")
|
||||||
if not const:
|
if not const:
|
||||||
|
@ -250,7 +252,7 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
self._fields.append(field._name, field)
|
self._fields.append(field._name, field)
|
||||||
self._current_size = self._size
|
self._current_size = self._size
|
||||||
else:
|
else:
|
||||||
assert size < self._size or self._size is None
|
assert self._size is None or size < self._size
|
||||||
self._size = size
|
self._size = size
|
||||||
if self._size == self._current_size:
|
if self._size == self._current_size:
|
||||||
self._field_generator = None
|
self._field_generator = None
|
||||||
|
@ -327,7 +329,7 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
field = self._field_generator.next()
|
field = next(self._field_generator)
|
||||||
self._addField(field)
|
self._addField(field)
|
||||||
if field.name == field_name:
|
if field.name == field_name:
|
||||||
return field
|
return field
|
||||||
|
@ -349,7 +351,7 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
oldlen = len(self._fields)
|
oldlen = len(self._fields)
|
||||||
try:
|
try:
|
||||||
for index in xrange(number):
|
for index in xrange(number):
|
||||||
self._addField(self._field_generator.next())
|
self._addField(next(self._field_generator))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self._stopFeeding()
|
self._stopFeeding()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
@ -362,7 +364,7 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
field = self._field_generator.next()
|
field = next(self._field_generator)
|
||||||
self._addField(field)
|
self._addField(field)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self._stopFeeding()
|
self._stopFeeding()
|
||||||
|
@ -381,7 +383,7 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
if done == len(self._fields):
|
if done == len(self._fields):
|
||||||
if self._field_generator is None:
|
if self._field_generator is None:
|
||||||
break
|
break
|
||||||
self._addField(self._field_generator.next())
|
self._addField(next(self._field_generator))
|
||||||
for field in self._fields.values[done:]:
|
for field in self._fields.values[done:]:
|
||||||
yield field
|
yield field
|
||||||
done += 1
|
done += 1
|
||||||
|
@ -404,7 +406,6 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
|
|
||||||
def _isDone(self):
|
def _isDone(self):
|
||||||
return (self._field_generator is None)
|
return (self._field_generator is None)
|
||||||
|
|
||||||
done = property(_isDone, doc="Boolean to know if parsing is done or not")
|
done = property(_isDone, doc="Boolean to know if parsing is done or not")
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -445,7 +446,8 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
# TODO: Check in self and not self.field
|
# TODO: Check in self and not self.field
|
||||||
# Problem is that "generator is already executing"
|
# Problem is that "generator is already executing"
|
||||||
if name not in self._fields:
|
if name not in self._fields:
|
||||||
raise ParserError("Unable to replace %s: field doesn't exist!" % name)
|
raise ParserError(
|
||||||
|
"Unable to replace %s: field doesn't exist!" % name)
|
||||||
assert 1 <= len(new_fields)
|
assert 1 <= len(new_fields)
|
||||||
old_field = self[name]
|
old_field = self[name]
|
||||||
total_size = sum((field.size for field in new_fields))
|
total_size = sum((field.size for field in new_fields))
|
||||||
|
@ -486,7 +488,8 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
if feed and self._field_generator is not None:
|
if feed and self._field_generator is not None:
|
||||||
self._feedAll()
|
self._feedAll()
|
||||||
if address < self._current_size:
|
if address < self._current_size:
|
||||||
i = lowerBound(self._fields.values, lambda x: x.address + x.size <= address)
|
i = lowerBound(self._fields.values,
|
||||||
|
lambda x: x.address + x.size <= address)
|
||||||
if i is not None:
|
if i is not None:
|
||||||
return self._fields.values[i]
|
return self._fields.values[i]
|
||||||
return None
|
return None
|
||||||
|
@ -499,8 +502,8 @@ class GenericFieldSet(BasicFieldSet):
|
||||||
# Check size
|
# Check size
|
||||||
total_size = sum(field.size for field in new_fields)
|
total_size = sum(field.size for field in new_fields)
|
||||||
if old_field.size < total_size:
|
if old_field.size < total_size:
|
||||||
raise ParserError( \
|
raise ParserError(
|
||||||
"Unable to write fields at address %s " \
|
"Unable to write fields at address %s "
|
||||||
"(too big)!" % (address))
|
"(too big)!" % (address))
|
||||||
|
|
||||||
# Need padding before?
|
# Need padding before?
|
||||||
|
|
|
@ -3,6 +3,7 @@ from hachoir.core.endian import endian_name
|
||||||
from hachoir.core.tools import makePrintable, makeUnicode
|
from hachoir.core.tools import makePrintable, makeUnicode
|
||||||
from hachoir.core.dict import Dict
|
from hachoir.core.dict import Dict
|
||||||
from hachoir.core.i18n import _
|
from hachoir.core.i18n import _
|
||||||
|
from hachoir.core.error import error
|
||||||
from hachoir.core.log import Logger
|
from hachoir.core.log import Logger
|
||||||
from hachoir.metadata.metadata_item import (
|
from hachoir.metadata.metadata_item import (
|
||||||
MIN_PRIORITY, MAX_PRIORITY, QUALITY_NORMAL)
|
MIN_PRIORITY, MAX_PRIORITY, QUALITY_NORMAL)
|
||||||
|
@ -351,13 +352,11 @@ def extractMetadata(parser, quality=QUALITY_NORMAL, **kwargs):
|
||||||
meta_extract_error = False
|
meta_extract_error = False
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
error("Error during metadata extraction: %s" % unicode(err))
|
error("Error during metadata extraction: %s" % unicode(err))
|
||||||
except Exception as err:
|
|
||||||
error("Error during metadata extraction: %s" % unicode(err))
|
|
||||||
|
|
||||||
if meta_extract_error:
|
if meta_extract_error:
|
||||||
try:
|
try:
|
||||||
parser.stream._input.close()
|
parser.stream._input.close()
|
||||||
except:
|
except (StandardError, Exception):
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ from hachoir.field import (FieldSet, ParserError,
|
||||||
UInt8, UInt16, UInt32, UInt64,
|
UInt8, UInt16, UInt32, UInt64,
|
||||||
String, PascalString16,
|
String, PascalString16,
|
||||||
RawBytes)
|
RawBytes)
|
||||||
|
from hachoir.stream.input import ReadStreamError
|
||||||
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
|
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
|
||||||
from hachoir.core.tools import makeUnicode
|
from hachoir.core.tools import makeUnicode
|
||||||
from hachoir.core.endian import LITTLE_ENDIAN
|
from hachoir.core.endian import LITTLE_ENDIAN
|
||||||
|
@ -153,6 +154,7 @@ class ExtraField(FieldSet):
|
||||||
|
|
||||||
|
|
||||||
class ExtraFields(FieldSet):
|
class ExtraFields(FieldSet):
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
while self.current_size < self.size:
|
while self.current_size < self.size:
|
||||||
yield ExtraField(self, "extra[]")
|
yield ExtraField(self, "extra[]")
|
||||||
|
@ -183,6 +185,7 @@ class ZipCentralDirectory(FieldSet):
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
yield ZipVersion(self, "version_made_by", "Version made by")
|
yield ZipVersion(self, "version_made_by", "Version made by")
|
||||||
|
# yield from ZipStartCommonFields(self) # PY3
|
||||||
for field in ZipStartCommonFields(self):
|
for field in ZipStartCommonFields(self):
|
||||||
yield field
|
yield field
|
||||||
|
|
||||||
|
@ -266,7 +269,8 @@ class FileEntry(FieldSet):
|
||||||
compression = self["compression"].value
|
compression = self["compression"].value
|
||||||
if compression == 0:
|
if compression == 0:
|
||||||
return SubFile(self, "data", size, filename=self.filename)
|
return SubFile(self, "data", size, filename=self.filename)
|
||||||
compressed = SubFile(self, "compressed_data", size, filename=self.filename)
|
compressed = SubFile(self, "compressed_data",
|
||||||
|
size, filename=self.filename)
|
||||||
if compression == COMPRESSION_DEFLATE:
|
if compression == COMPRESSION_DEFLATE:
|
||||||
return Deflate(compressed)
|
return Deflate(compressed)
|
||||||
else:
|
else:
|
||||||
|
@ -292,6 +296,7 @@ class FileEntry(FieldSet):
|
||||||
(size, data_desc["file_compressed_size"].value))
|
(size, data_desc["file_compressed_size"].value))
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
|
# yield from ZipStartCommonFields(self) # PY3
|
||||||
for field in ZipStartCommonFields(self):
|
for field in ZipStartCommonFields(self):
|
||||||
yield field
|
yield field
|
||||||
length = self["filename_length"].value
|
length = self["filename_length"].value
|
||||||
|
@ -308,6 +313,7 @@ class FileEntry(FieldSet):
|
||||||
if size > 0:
|
if size > 0:
|
||||||
yield self.data(size)
|
yield self.data(size)
|
||||||
elif self["flags/incomplete"].value:
|
elif self["flags/incomplete"].value:
|
||||||
|
# yield from self.resync() # PY3
|
||||||
for field in self.resync():
|
for field in self.resync():
|
||||||
yield field
|
yield field
|
||||||
if self["flags/has_descriptor"].value and self['crc32'].value == 0:
|
if self["flags/has_descriptor"].value and self['crc32'].value == 0:
|
||||||
|
@ -315,7 +321,7 @@ class FileEntry(FieldSet):
|
||||||
|
|
||||||
def createDescription(self):
|
def createDescription(self):
|
||||||
return "File entry: %s (%s)" % \
|
return "File entry: %s (%s)" % \
|
||||||
(self["filename"].value, self["compressed_size"].display)
|
(self["filename"].value, self["compressed_size"].display)
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
if self["compression"].value not in COMPRESSION_METHOD:
|
if self["compression"].value not in COMPRESSION_METHOD:
|
||||||
|
@ -396,10 +402,24 @@ class ZipFile(Parser):
|
||||||
"min_size": (4 + 26) * 8, # header + file entry
|
"min_size": (4 + 26) * 8, # header + file entry
|
||||||
"description": "ZIP archive"
|
"description": "ZIP archive"
|
||||||
}
|
}
|
||||||
|
CHUNK_TYPES = {
|
||||||
|
FileEntry.HEADER: (FileEntry, "file[]", None),
|
||||||
|
ZipDataDescriptor.HEADER: (ZipDataDescriptor, "spanning[]", None),
|
||||||
|
0x30304b50: (ZipDataDescriptor, "temporary_spanning[]", None),
|
||||||
|
ZipCentralDirectory.HEADER: (ZipCentralDirectory, "central_directory[]", None),
|
||||||
|
ZipEndCentralDirectory.HEADER: (ZipEndCentralDirectory, "end_central_directory", "End of central directory"),
|
||||||
|
Zip64EndCentralDirectory.HEADER: (Zip64EndCentralDirectory, "end64_central_directory", "ZIP64 end of central directory"),
|
||||||
|
ZipSignature.HEADER: (ZipSignature, "signature", "Signature"),
|
||||||
|
Zip64EndCentralDirectoryLocator.HEADER: (Zip64EndCentralDirectoryLocator, "end_locator", "ZIP64 Enf of central directory locator"),
|
||||||
|
}
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
if self["header[0]"].value != FileEntry.HEADER:
|
try:
|
||||||
return "Invalid magic"
|
if self["header[0]"].value != FileEntry.HEADER:
|
||||||
|
return "Invalid magic"
|
||||||
|
except Exception as err:
|
||||||
|
return "Unable to get header #0"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file0 = self["file[0]"]
|
file0 = self["file[0]"]
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
@ -407,6 +427,7 @@ class ZipFile(Parser):
|
||||||
err = file0.validate()
|
err = file0.validate()
|
||||||
if err:
|
if err:
|
||||||
return "File #0: %s" % err
|
return "File #0: %s" % err
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
|
@ -414,29 +435,30 @@ class ZipFile(Parser):
|
||||||
self.signature = None
|
self.signature = None
|
||||||
self.central_directory = []
|
self.central_directory = []
|
||||||
while not self.eof:
|
while not self.eof:
|
||||||
header = textHandler(
|
skip = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
header = self.stream.readBits(self.absolute_address + self.current_size + skip, 32, self.endian)
|
||||||
|
if header in self.CHUNK_TYPES:
|
||||||
|
break
|
||||||
|
skipdelta = self.stream.searchBytes(b'PK', self.absolute_address + self.current_size + skip + 8)
|
||||||
|
if skipdelta is None:
|
||||||
|
if not self.current_size:
|
||||||
|
raise ParserError("Failed to find any zip headers")
|
||||||
|
return
|
||||||
|
skip = skipdelta - (self.absolute_address + self.current_size)
|
||||||
|
except ReadStreamError:
|
||||||
|
if not self.current_size:
|
||||||
|
raise ParserError("Failed to read stream")
|
||||||
|
return
|
||||||
|
if skip:
|
||||||
|
yield RawBytes(self, "unparsed[]", skip // 8)
|
||||||
|
|
||||||
|
yield textHandler(
|
||||||
UInt32(self, "header[]", "Header"), hexadecimal)
|
UInt32(self, "header[]", "Header"), hexadecimal)
|
||||||
yield header
|
|
||||||
header = header.value
|
ftype, fname, fdesc = self.CHUNK_TYPES[header]
|
||||||
if header == FileEntry.HEADER:
|
yield ftype(self, fname, fdesc)
|
||||||
yield FileEntry(self, "file[]")
|
|
||||||
elif header == ZipDataDescriptor.HEADER:
|
|
||||||
yield ZipDataDescriptor(self, "spanning[]")
|
|
||||||
elif header == 0x30304b50:
|
|
||||||
yield ZipDataDescriptor(self, "temporary_spanning[]")
|
|
||||||
elif header == ZipCentralDirectory.HEADER:
|
|
||||||
yield ZipCentralDirectory(self, "central_directory[]")
|
|
||||||
elif header == ZipEndCentralDirectory.HEADER:
|
|
||||||
yield ZipEndCentralDirectory(self, "end_central_directory", "End of central directory")
|
|
||||||
elif header == Zip64EndCentralDirectory.HEADER:
|
|
||||||
yield Zip64EndCentralDirectory(self, "end64_central_directory", "ZIP64 end of central directory")
|
|
||||||
elif header == ZipSignature.HEADER:
|
|
||||||
yield ZipSignature(self, "signature", "Signature")
|
|
||||||
elif header == Zip64EndCentralDirectoryLocator.HEADER:
|
|
||||||
yield Zip64EndCentralDirectoryLocator(self, "end_locator", "ZIP64 Enf of central directory locator")
|
|
||||||
else:
|
|
||||||
raise ParserError(
|
|
||||||
"Error, unknown ZIP header (0x%08X)." % header)
|
|
||||||
|
|
||||||
def createMimeType(self):
|
def createMimeType(self):
|
||||||
if self["file[0]/filename"].value == "mimetype":
|
if self["file[0]/filename"].value == "mimetype":
|
||||||
|
|
|
@ -91,7 +91,7 @@ class HachoirParser(object):
|
||||||
try:
|
try:
|
||||||
self._mime_type = self.createMimeType()
|
self._mime_type = self.createMimeType()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.error("Error when creating MIME type: %s" % unicode(err))
|
error("Error when creating MIME type: %s" % unicode(err))
|
||||||
if not self._mime_type \
|
if not self._mime_type \
|
||||||
and self.createMimeType != Parser.createMimeType:
|
and self.createMimeType != Parser.createMimeType:
|
||||||
self._mime_type = Parser.createMimeType(self)
|
self._mime_type = Parser.createMimeType(self)
|
||||||
|
|
|
@ -173,6 +173,7 @@ class SectionFlags(FieldSet):
|
||||||
|
|
||||||
|
|
||||||
class SymbolStringTableOffset(UInt32):
|
class SymbolStringTableOffset(UInt32):
|
||||||
|
|
||||||
def createDisplay(self):
|
def createDisplay(self):
|
||||||
section_index = self['/header/shstrndx'].value
|
section_index = self['/header/shstrndx'].value
|
||||||
section = self['/section[' + str(section_index) + ']']
|
section = self['/section[' + str(section_index) + ']']
|
||||||
|
@ -213,7 +214,7 @@ class SectionHeader32(FieldSet):
|
||||||
|
|
||||||
def createDescription(self):
|
def createDescription(self):
|
||||||
return "Section header (name: %s, type: %s)" % \
|
return "Section header (name: %s, type: %s)" % \
|
||||||
(self["name"].display, self["type"].display)
|
(self["name"].display, self["type"].display)
|
||||||
|
|
||||||
|
|
||||||
class SectionHeader64(SectionHeader32):
|
class SectionHeader64(SectionHeader32):
|
||||||
|
@ -316,7 +317,8 @@ class ElfFile(HachoirParser, RootSeekableFieldSet):
|
||||||
endian = LITTLE_ENDIAN
|
endian = LITTLE_ENDIAN
|
||||||
|
|
||||||
def __init__(self, stream, **args):
|
def __init__(self, stream, **args):
|
||||||
RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
|
RootSeekableFieldSet.__init__(
|
||||||
|
self, None, "root", stream, None, stream.askSize(self))
|
||||||
HachoirParser.__init__(self, stream, **args)
|
HachoirParser.__init__(self, stream, **args)
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
|
@ -354,7 +356,8 @@ class ElfFile(HachoirParser, RootSeekableFieldSet):
|
||||||
|
|
||||||
for index in xrange(self["header/shnum"].value):
|
for index in xrange(self["header/shnum"].value):
|
||||||
field = self["section_header[" + str(index) + "]"]
|
field = self["section_header[" + str(index) + "]"]
|
||||||
if field['size'].value != 0:
|
if field['size'].value != 0 and field['type'].value != 8:
|
||||||
|
# skip NOBITS sections
|
||||||
self.seekByte(field['LMA'].value, relative=False)
|
self.seekByte(field['LMA'].value, relative=False)
|
||||||
yield RawBytes(self, "section[" + str(index) + "]", field['size'].value)
|
yield RawBytes(self, "section[" + str(index) + "]", field['size'].value)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ Creation date: 2007-01-19
|
||||||
from hachoir.field import (FieldSet, ParserError, Enum,
|
from hachoir.field import (FieldSet, ParserError, Enum,
|
||||||
Bit, Bits, SeekableFieldSet,
|
Bit, Bits, SeekableFieldSet,
|
||||||
UInt16, UInt32, TimestampUnix32,
|
UInt16, UInt32, TimestampUnix32,
|
||||||
RawBytes, PaddingBytes, NullBytes, NullBits,
|
Bytes, RawBytes, PaddingBytes, NullBytes, NullBits,
|
||||||
CString, String)
|
CString, String)
|
||||||
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
|
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
|
||||||
from hachoir.core.tools import createDict, paddingSize, alignValue, makePrintable
|
from hachoir.core.tools import createDict, paddingSize, alignValue, makePrintable
|
||||||
|
@ -82,6 +82,7 @@ FONT_SUBTYPE_NAME = {
|
||||||
|
|
||||||
|
|
||||||
class VersionInfoBinary(FieldSet):
|
class VersionInfoBinary(FieldSet):
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
yield textHandler(UInt32(self, "magic", "File information magic (0xFEEF04BD)"), hexadecimal)
|
yield textHandler(UInt32(self, "magic", "File information magic (0xFEEF04BD)"), hexadecimal)
|
||||||
if self["magic"].value != 0xFEEF04BD:
|
if self["magic"].value != 0xFEEF04BD:
|
||||||
|
@ -172,6 +173,7 @@ def parseIcon(parent):
|
||||||
|
|
||||||
|
|
||||||
class WindowsString(FieldSet):
|
class WindowsString(FieldSet):
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
yield UInt16(self, "length", "Number of 16-bit characters")
|
yield UInt16(self, "length", "Number of 16-bit characters")
|
||||||
size = self["length"].value * 2
|
size = self["length"].value * 2
|
||||||
|
@ -230,10 +232,25 @@ class Entry(FieldSet):
|
||||||
|
|
||||||
|
|
||||||
class NameOffset(FieldSet):
|
class NameOffset(FieldSet):
|
||||||
|
|
||||||
|
def __init__(self, parent, name):
|
||||||
|
FieldSet.__init__(self, parent, name)
|
||||||
|
self.name_field = None
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
yield UInt32(self, "name")
|
yield Bits(self, "name_offset", 31)
|
||||||
yield Bits(self, "offset", 31)
|
|
||||||
yield Bit(self, "is_name")
|
yield Bit(self, "is_name")
|
||||||
|
yield Bits(self, "offset", 31)
|
||||||
|
yield Bit(self, "is_subdir")
|
||||||
|
|
||||||
|
def getResType(self):
|
||||||
|
return self.name_field.value
|
||||||
|
|
||||||
|
def createDescription(self):
|
||||||
|
if self["is_subdir"].value:
|
||||||
|
return "Sub-directory: %s at %s" % (self.name_field.display, self["offset"].value)
|
||||||
|
else:
|
||||||
|
return "Index: %s at %s" % (self.name_field.display, self["offset"].value)
|
||||||
|
|
||||||
|
|
||||||
class IndexOffset(FieldSet):
|
class IndexOffset(FieldSet):
|
||||||
|
@ -244,18 +261,27 @@ class IndexOffset(FieldSet):
|
||||||
self.res_type = res_type
|
self.res_type = res_type
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
yield Enum(UInt32(self, "type"), self.TYPE_DESC)
|
if self.res_type is None:
|
||||||
|
# immediate subdirectory of the root
|
||||||
|
yield Enum(UInt32(self, "type"), self.TYPE_DESC)
|
||||||
|
else:
|
||||||
|
# sub-subdirectory, "type" field is just an ID
|
||||||
|
yield textHandler(UInt32(self, "type"), lambda field: "ID %d" % field.value)
|
||||||
yield Bits(self, "offset", 31)
|
yield Bits(self, "offset", 31)
|
||||||
yield Bit(self, "is_subdir")
|
yield Bit(self, "is_subdir")
|
||||||
|
|
||||||
|
def getResType(self):
|
||||||
|
return self["type"].value
|
||||||
|
|
||||||
def createDescription(self):
|
def createDescription(self):
|
||||||
if self["is_subdir"].value:
|
if self["is_subdir"].value:
|
||||||
return "Sub-directory: %s at %s" % (self["type"].display, self["offset"].value)
|
return "Sub-directory: %s at %s" % (self["type"].display, self["offset"].value)
|
||||||
else:
|
else:
|
||||||
return "Index: ID %s at %s" % (self["type"].display, self["offset"].value)
|
return "Index: %s at %s" % (self["type"].display, self["offset"].value)
|
||||||
|
|
||||||
|
|
||||||
class ResourceContent(FieldSet):
|
class ResourceContent(FieldSet):
|
||||||
|
|
||||||
def __init__(self, parent, name, entry, size=None):
|
def __init__(self, parent, name, entry, size=None):
|
||||||
FieldSet.__init__(self, parent, name, size=entry["size"].value * 8)
|
FieldSet.__init__(self, parent, name, size=entry["size"].value * 8)
|
||||||
self.entry = entry
|
self.entry = entry
|
||||||
|
@ -273,6 +299,7 @@ class ResourceContent(FieldSet):
|
||||||
|
|
||||||
def createFields(self):
|
def createFields(self):
|
||||||
if self._parser:
|
if self._parser:
|
||||||
|
# yield from self._parser(self) # PY3
|
||||||
for field in self._parser(self):
|
for field in self._parser(self):
|
||||||
yield field
|
yield field
|
||||||
else:
|
else:
|
||||||
|
@ -309,15 +336,25 @@ class Header(FieldSet):
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
class Name(FieldSet):
|
class WidePascalString16(String):
|
||||||
def createFields(self):
|
|
||||||
yield UInt16(self, "length")
|
def __init__(self, parent, name, description=None,
|
||||||
size = min(self["length"].value, 255)
|
strip=None, nbytes=None, truncate=None):
|
||||||
if size:
|
Bytes.__init__(self, parent, name, 1, description)
|
||||||
yield String(self, "name", size, charset="UTF-16LE")
|
|
||||||
|
self._format = "WidePascalString16"
|
||||||
|
self._strip = strip
|
||||||
|
self._truncate = truncate
|
||||||
|
self._character_size = 2
|
||||||
|
self._charset = "UTF-16-LE"
|
||||||
|
self._content_offset = 2
|
||||||
|
self._content_size = self._character_size * self._parent.stream.readBits(
|
||||||
|
self.absolute_address, self._content_offset * 8, self._parent.endian)
|
||||||
|
self._size = (self._content_size + self.content_offset) * 8
|
||||||
|
|
||||||
|
|
||||||
class Directory(FieldSet):
|
class Directory(FieldSet):
|
||||||
|
|
||||||
def __init__(self, parent, name, res_type=None):
|
def __init__(self, parent, name, res_type=None):
|
||||||
FieldSet.__init__(self, parent, name)
|
FieldSet.__init__(self, parent, name)
|
||||||
nb_entries = self["header/nb_name"].value + \
|
nb_entries = self["header/nb_name"].value + \
|
||||||
|
@ -346,12 +383,21 @@ class Directory(FieldSet):
|
||||||
|
|
||||||
|
|
||||||
class PE_Resource(SeekableFieldSet):
|
class PE_Resource(SeekableFieldSet):
|
||||||
|
|
||||||
def __init__(self, parent, name, section, size):
|
def __init__(self, parent, name, section, size):
|
||||||
SeekableFieldSet.__init__(self, parent, name, size=size)
|
SeekableFieldSet.__init__(self, parent, name, size=size)
|
||||||
self.section = section
|
self.section = section
|
||||||
|
|
||||||
def parseSub(self, directory, name, depth):
|
def parseSub(self, directory, name, depth):
|
||||||
indexes = []
|
indexes = []
|
||||||
|
for index in directory.array("name"):
|
||||||
|
if index["is_subdir"].value:
|
||||||
|
indexes.append(index)
|
||||||
|
self.seekByte(index["name_offset"].value)
|
||||||
|
field = WidePascalString16(self, name.replace("directory", "name"))
|
||||||
|
index.name_field = field
|
||||||
|
yield field
|
||||||
|
|
||||||
for index in directory.array("index"):
|
for index in directory.array("index"):
|
||||||
if index["is_subdir"].value:
|
if index["is_subdir"].value:
|
||||||
indexes.append(index)
|
indexes.append(index)
|
||||||
|
@ -360,7 +406,7 @@ class PE_Resource(SeekableFieldSet):
|
||||||
for index in indexes:
|
for index in indexes:
|
||||||
self.seekByte(index["offset"].value)
|
self.seekByte(index["offset"].value)
|
||||||
if depth == 1:
|
if depth == 1:
|
||||||
res_type = index["type"].value
|
res_type = index.getResType()
|
||||||
else:
|
else:
|
||||||
res_type = directory.res_type
|
res_type = directory.res_type
|
||||||
yield Directory(self, name, res_type)
|
yield Directory(self, name, res_type)
|
||||||
|
@ -461,6 +507,6 @@ class NE_VersionInfoNode(FieldSet):
|
||||||
|
|
||||||
def createDescription(self):
|
def createDescription(self):
|
||||||
text = "Version info node: %s" % self["name"].value
|
text = "Version info node: %s" % self["name"].value
|
||||||
# if self["type"].value == self.TYPE_STRING and "value" in self:
|
# if self["type"].value == self.TYPE_STRING and "value" in self:
|
||||||
# text += "=%s" % self["value"].value
|
# text += "=%s" % self["value"].value
|
||||||
return text
|
return text
|
||||||
|
|
|
@ -4,7 +4,6 @@ from hachoir.core.log import Logger
|
||||||
from hachoir.core.bits import str2long
|
from hachoir.core.bits import str2long
|
||||||
from hachoir.core.i18n import getTerminalCharset
|
from hachoir.core.i18n import getTerminalCharset
|
||||||
from hachoir.core.tools import lowerBound
|
from hachoir.core.tools import lowerBound
|
||||||
from hachoir.core.i18n import _
|
|
||||||
from hachoir.core.tools import alignValue
|
from hachoir.core.tools import alignValue
|
||||||
from errno import ESPIPE
|
from errno import ESPIPE
|
||||||
from weakref import ref as weakref_ref
|
from weakref import ref as weakref_ref
|
||||||
|
@ -16,21 +15,25 @@ class InputStreamError(StreamError):
|
||||||
|
|
||||||
|
|
||||||
class ReadStreamError(InputStreamError):
|
class ReadStreamError(InputStreamError):
|
||||||
|
|
||||||
def __init__(self, size, address, got=None):
|
def __init__(self, size, address, got=None):
|
||||||
self.size = size
|
self.size = size
|
||||||
self.address = address
|
self.address = address
|
||||||
self.got = got
|
self.got = got
|
||||||
if self.got is not None:
|
if self.got is not None:
|
||||||
msg = _("Can't read %u bits at address %u (got %u bits)") % (self.size, self.address, self.got)
|
msg = "Can't read %u bits at address %u (got %u bits)" % (
|
||||||
|
self.size, self.address, self.got)
|
||||||
else:
|
else:
|
||||||
msg = _("Can't read %u bits at address %u") % (self.size, self.address)
|
msg = "Can't read %u bits at address %u" % (
|
||||||
|
self.size, self.address)
|
||||||
InputStreamError.__init__(self, msg)
|
InputStreamError.__init__(self, msg)
|
||||||
|
|
||||||
|
|
||||||
class NullStreamError(InputStreamError):
|
class NullStreamError(InputStreamError):
|
||||||
|
|
||||||
def __init__(self, source):
|
def __init__(self, source):
|
||||||
self.source = source
|
self.source = source
|
||||||
msg = _("Input size is nul (source='%s')!") % self.source
|
msg = "Input size is nul (source='%s')!" % self.source
|
||||||
InputStreamError.__init__(self, msg)
|
InputStreamError.__init__(self, msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -76,7 +79,6 @@ class FileFromInputStream:
|
||||||
if shift:
|
if shift:
|
||||||
raise InputStreamError("TODO: handle non-byte-aligned data")
|
raise InputStreamError("TODO: handle non-byte-aligned data")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
if self._size or size is not None and not self._from_end:
|
if self._size or size is not None and not self._from_end:
|
||||||
# We don't want self.tell() to read anything
|
# We don't want self.tell() to read anything
|
||||||
# and the size must be known if we read until the end.
|
# and the size must be known if we read until the end.
|
||||||
|
@ -96,7 +98,8 @@ class FileFromInputStream:
|
||||||
if size <= 0:
|
if size <= 0:
|
||||||
return ''
|
return ''
|
||||||
data = '', ''
|
data = '', ''
|
||||||
self._offset = max(0, self.stream._current_size // 8 + self._offset)
|
self._offset = max(
|
||||||
|
0, self.stream._current_size // 8 + self._offset)
|
||||||
self._from_end = False
|
self._from_end = False
|
||||||
bs = max(max_size, 1 << 16)
|
bs = max(max_size, 1 << 16)
|
||||||
while True:
|
while True:
|
||||||
|
@ -129,7 +132,7 @@ class InputStream(Logger):
|
||||||
|
|
||||||
def __init__(self, source=None, size=None, packets=None, **args):
|
def __init__(self, source=None, size=None, packets=None, **args):
|
||||||
self.source = source
|
self.source = source
|
||||||
self._size = size # in bits
|
self._size = size # in bits
|
||||||
if size == 0:
|
if size == 0:
|
||||||
raise NullStreamError(source)
|
raise NullStreamError(source)
|
||||||
self.tags = tuple(args.get("tags", tuple()))
|
self.tags = tuple(args.get("tags", tuple()))
|
||||||
|
@ -245,7 +248,8 @@ class InputStream(Logger):
|
||||||
None else.
|
None else.
|
||||||
"""
|
"""
|
||||||
if start_address % 8:
|
if start_address % 8:
|
||||||
raise InputStreamError("Unable to search bytes with address with bit granularity")
|
raise InputStreamError(
|
||||||
|
"Unable to search bytes with address with bit granularity")
|
||||||
length = len(needle)
|
length = len(needle)
|
||||||
size = max(3 * length, 4096)
|
size = max(3 * length, 4096)
|
||||||
buffer = ''
|
buffer = ''
|
||||||
|
@ -334,7 +338,8 @@ class InputPipe(object):
|
||||||
return ''
|
return ''
|
||||||
buf = self.buffers[index]
|
buf = self.buffers[index]
|
||||||
if buf is None:
|
if buf is None:
|
||||||
raise InputStreamError(_("Error: Buffers too small. Can't seek backward."))
|
raise InputStreamError(
|
||||||
|
"Error: Buffers too small. Can't seek backward.")
|
||||||
if self.last != index:
|
if self.last != index:
|
||||||
next = buf[1]
|
next = buf[1]
|
||||||
prev = buf[2]
|
prev = buf[2]
|
||||||
|
@ -366,7 +371,7 @@ class InputPipe(object):
|
||||||
|
|
||||||
def read(self, size):
|
def read(self, size):
|
||||||
end = self.address + size
|
end = self.address + size
|
||||||
for i in xrange(len(self.buffers), (end >> self.buffer_size) + 1):
|
for i in range(len(self.buffers), (end >> self.buffer_size) + 1):
|
||||||
data = self._input.read(1 << self.buffer_size)
|
data = self._input.read(1 << self.buffer_size)
|
||||||
if len(data) < 1 << self.buffer_size:
|
if len(data) < 1 << self.buffer_size:
|
||||||
self.size = (len(self.buffers) << self.buffer_size) + len(data)
|
self.size = (len(self.buffers) << self.buffer_size) + len(data)
|
||||||
|
@ -386,6 +391,7 @@ class InputPipe(object):
|
||||||
|
|
||||||
|
|
||||||
class InputIOStream(InputStream):
|
class InputIOStream(InputStream):
|
||||||
|
|
||||||
def __init__(self, input, size=None, **args):
|
def __init__(self, input, size=None, **args):
|
||||||
if not hasattr(input, "seek"):
|
if not hasattr(input, "seek"):
|
||||||
if size is None:
|
if size is None:
|
||||||
|
@ -396,14 +402,15 @@ class InputIOStream(InputStream):
|
||||||
try:
|
try:
|
||||||
input.seek(0, 2)
|
input.seek(0, 2)
|
||||||
size = input.tell() * 8
|
size = input.tell() * 8
|
||||||
except IOError, err:
|
except IOError as err:
|
||||||
if err.errno == ESPIPE:
|
if err.errno == ESPIPE:
|
||||||
input = InputPipe(input, self._setSize)
|
input = InputPipe(input, self._setSize)
|
||||||
else:
|
else:
|
||||||
charset = getTerminalCharset()
|
charset = getTerminalCharset()
|
||||||
errmsg = unicode(str(err), charset)
|
errmsg = unicode(str(err), charset)
|
||||||
source = args.get("source", "<inputio:%r>" % input)
|
source = args.get("source", "<inputio:%r>" % input)
|
||||||
raise InputStreamError(_("Unable to get size of %s: %s") % (source, errmsg))
|
raise InputStreamError(
|
||||||
|
"Unable to get size of %s: %s" % (source, errmsg))
|
||||||
self._input = input
|
self._input = input
|
||||||
InputStream.__init__(self, size=size, **args)
|
InputStream.__init__(self, size=size, **args)
|
||||||
|
|
||||||
|
@ -416,7 +423,6 @@ class InputIOStream(InputStream):
|
||||||
if self._input.size:
|
if self._input.size:
|
||||||
return 8 * self._input.size
|
return 8 * self._input.size
|
||||||
return 8 * self._input.current_size
|
return 8 * self._input.current_size
|
||||||
|
|
||||||
_current_size = property(__current_size)
|
_current_size = property(__current_size)
|
||||||
|
|
||||||
def read(self, address, size):
|
def read(self, address, size):
|
||||||
|
@ -445,6 +451,7 @@ class InputIOStream(InputStream):
|
||||||
|
|
||||||
|
|
||||||
class StringInputStream(InputStream):
|
class StringInputStream(InputStream):
|
||||||
|
|
||||||
def __init__(self, data, source="<string>", **args):
|
def __init__(self, data, source="<string>", **args):
|
||||||
self.data = data
|
self.data = data
|
||||||
InputStream.__init__(self, source=source, size=8 * len(data), **args)
|
InputStream.__init__(self, source=source, size=8 * len(data), **args)
|
||||||
|
@ -464,6 +471,7 @@ class StringInputStream(InputStream):
|
||||||
|
|
||||||
|
|
||||||
class InputSubStream(InputStream):
|
class InputSubStream(InputStream):
|
||||||
|
|
||||||
def __init__(self, stream, offset, size=None, source=None, **args):
|
def __init__(self, stream, offset, size=None, source=None, **args):
|
||||||
if offset is None:
|
if offset is None:
|
||||||
offset = 0
|
offset = 0
|
||||||
|
@ -499,6 +507,7 @@ def InputFieldStream(field, **args):
|
||||||
|
|
||||||
|
|
||||||
class FragmentedStream(InputStream):
|
class FragmentedStream(InputStream):
|
||||||
|
|
||||||
def __init__(self, field, **args):
|
def __init__(self, field, **args):
|
||||||
self.stream = field.parent.stream
|
self.stream = field.parent.stream
|
||||||
data = field.getData()
|
data = field.getData()
|
||||||
|
@ -573,6 +582,7 @@ class FragmentedStream(InputStream):
|
||||||
|
|
||||||
class ConcatStream(InputStream):
|
class ConcatStream(InputStream):
|
||||||
# TODO: concatene any number of any type of stream
|
# TODO: concatene any number of any type of stream
|
||||||
|
|
||||||
def __init__(self, streams, **args):
|
def __init__(self, streams, **args):
|
||||||
if len(streams) > 2 or not streams[0].checked:
|
if len(streams) > 2 or not streams[0].checked:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
@ -583,7 +593,8 @@ class ConcatStream(InputStream):
|
||||||
self.__streams = streams
|
self.__streams = streams
|
||||||
InputStream.__init__(self, **args)
|
InputStream.__init__(self, **args)
|
||||||
|
|
||||||
_current_size = property(lambda self: self.__size0 + self.__streams[1]._current_size)
|
_current_size = property(
|
||||||
|
lambda self: self.__size0 + self.__streams[1]._current_size)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.__streams = None
|
self.__streams = None
|
||||||
|
|
|
@ -286,16 +286,18 @@ class Quality:
|
||||||
try:
|
try:
|
||||||
parser = ek.ek(createParser, filename)
|
parser = ek.ek(createParser, filename)
|
||||||
except InputStreamError as e:
|
except InputStreamError as e:
|
||||||
logger.log(msg % (filename, e.text), logger.WARNING)
|
logger.log(msg % (filename, ex(e)), logger.WARNING)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.log(msg % (filename, ex(e)), logger.ERROR)
|
logger.log(msg % (filename, ex(e)), logger.ERROR)
|
||||||
logger.log(traceback.format_exc(), logger.ERROR)
|
logger.log(traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
if parser:
|
if parser:
|
||||||
if '.avi' == filename[-4::].lower():
|
extract = None
|
||||||
extract = extractMetadata(parser, scan_index=False)
|
try:
|
||||||
else:
|
args = ({}, {'scan_index': False})['.avi' == filename[-4::].lower()]
|
||||||
extract = extractMetadata(parser)
|
extract = extractMetadata(parser, **args)
|
||||||
|
except (StandardError, Exception) as e:
|
||||||
|
logger.log(msg % (filename, ex(e)), logger.WARNING)
|
||||||
if extract:
|
if extract:
|
||||||
try:
|
try:
|
||||||
height = extract.get('height')
|
height = extract.get('height')
|
||||||
|
|
|
@ -63,7 +63,7 @@ class BTSceneProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
url = self.url
|
url = self.url
|
||||||
response = self.get_url(url)
|
response = self.get_url(url)
|
||||||
if self.should_skip():
|
if self.should_skip() or not response:
|
||||||
return results
|
return results
|
||||||
|
|
||||||
form = re.findall('(?is)(<form[^>]+)', response)
|
form = re.findall('(?is)(<form[^>]+)', response)
|
||||||
|
|
Loading…
Reference in a new issue