Change handle if BTS returns no data.

Change improve hachoir error handling with bad source metadata.
This commit is contained in:
JackDandy 2018-08-28 12:37:15 +01:00
parent 64dd798ba8
commit aa8b750803
11 changed files with 208 additions and 90 deletions

View file

@ -1,4 +1,11 @@
### 0.17.0 (2018-08-24 23:40:00 UTC)
### 0.17.1 (2018-08-29 23:40:00 UTC)
* Change replace imdb lib with imdb-pie 5.6.3 (df7411d1)
* Change handle if BTS returns no data
* Change improve hachoir error handling with bad source metadata
### 0.17.0 (2018-08-24 23:40:00 UTC)
* Change save config values only where reqd. reduces file by up to 75%
* Add 'Map an NZBGet "DestDir"' setting to config/Search/NZB Results tab (select NZBGet)

View file

@ -4,6 +4,31 @@ import os
import shutil
parent_dir = os.path.abspath(os.path.dirname(__file__))
cleaned_file = os.path.abspath(os.path.join(parent_dir, '.cleaned003.tmp'))
test = os.path.abspath(os.path.join(parent_dir, 'lib', 'imdb'))
if not os.path.isfile(cleaned_file) or os.path.exists(test):
dead_dirs = [os.path.abspath(os.path.join(parent_dir, *d)) for d in [
('lib', 'imdb'),
]]
for dirpath, dirnames, filenames in os.walk(parent_dir):
for dead_dir in filter(lambda x: x in dead_dirs, [os.path.abspath(os.path.join(dirpath, d)) for d in dirnames]):
try:
shutil.rmtree(dead_dir)
except (StandardError, Exception):
pass
for filename in [fn for fn in filenames if os.path.splitext(fn)[-1].lower() in ('.pyc', '.pyo')]:
try:
os.remove(os.path.abspath(os.path.join(dirpath, filename)))
except (StandardError, Exception):
pass
with open(cleaned_file, 'wb') as fp:
fp.write('This file exists to prevent a rerun delete of *.pyc, *.pyo files')
fp.flush()
os.fsync(fp.fileno())
cleaned_file = os.path.abspath(os.path.join(parent_dir, '.cleaned002.tmp'))
test = os.path.abspath(os.path.join(parent_dir, 'lib', 'hachoir_core'))
if not os.path.isfile(cleaned_file) or os.path.exists(test):

View file

@ -93,7 +93,8 @@ class GenericFieldSet(BasicFieldSet):
def __str__(self):
return '<%s path=%s, current_size=%s, current length=%s>' % \
(self.__class__.__name__, self.path, self._current_size, len(self._fields))
(self.__class__.__name__, self.path,
self._current_size, len(self._fields))
def __len__(self):
"""
@ -106,23 +107,22 @@ class GenericFieldSet(BasicFieldSet):
def _getCurrentLength(self):
return len(self._fields)
current_length = property(_getCurrentLength)
def _getSize(self):
if self._size is None:
self._feedAll()
return self._size
size = property(_getSize, doc="Size in bits, may create all fields to get size")
size = property(
_getSize, doc="Size in bits, may create all fields to get size")
def _getCurrentSize(self):
assert not(self.done)
return self._current_size
current_size = property(_getCurrentSize)
eof = property(lambda self: self._checkSize(self._current_size + 1, True) < 0)
eof = property(lambda self: self._checkSize(
self._current_size + 1, True) < 0)
def _checkSize(self, size, strict):
field = self
@ -170,11 +170,13 @@ class GenericFieldSet(BasicFieldSet):
_ = field.size
except Exception as err:
if field.is_field_set and field.current_length and field.eof:
self.warning("Error when getting size of '%s': %s" % (field.name, err))
self.warning(
"Error when getting size of '%s': %s" % (field.name, err))
field._stopFeeding()
ask_stop = True
else:
self.warning("Error when getting size of '%s': delete it" % field.name)
self.warning(
"Error when getting size of '%s': delete it" % field.name)
self.__is_feeding = False
raise
self.__is_feeding = False
@ -190,8 +192,8 @@ class GenericFieldSet(BasicFieldSet):
self._current_size += field.size
try:
self._fields.append(field._name, field)
except UniqKeyError, err:
self.warning("Duplicate field name " + unicode(err))
except UniqKeyError as err:
self.warning("Duplicate field name " + str(err))
field._name += "[]"
self.setUniqueFieldName(field)
self._fields.append(field._name, field)
@ -220,7 +222,7 @@ class GenericFieldSet(BasicFieldSet):
return field
def getField(self, key, const=True):
if isinstance(key, (int, long)):
if isinstance(key, int):
if key < 0:
raise KeyError("Key must be positive!")
if not const:
@ -250,7 +252,7 @@ class GenericFieldSet(BasicFieldSet):
self._fields.append(field._name, field)
self._current_size = self._size
else:
assert size < self._size or self._size is None
assert self._size is None or size < self._size
self._size = size
if self._size == self._current_size:
self._field_generator = None
@ -327,7 +329,7 @@ class GenericFieldSet(BasicFieldSet):
return None
try:
while True:
field = self._field_generator.next()
field = next(self._field_generator)
self._addField(field)
if field.name == field_name:
return field
@ -349,7 +351,7 @@ class GenericFieldSet(BasicFieldSet):
oldlen = len(self._fields)
try:
for index in xrange(number):
self._addField(self._field_generator.next())
self._addField(next(self._field_generator))
except StopIteration:
self._stopFeeding()
except Exception as err:
@ -362,7 +364,7 @@ class GenericFieldSet(BasicFieldSet):
return
try:
while True:
field = self._field_generator.next()
field = next(self._field_generator)
self._addField(field)
except StopIteration:
self._stopFeeding()
@ -381,7 +383,7 @@ class GenericFieldSet(BasicFieldSet):
if done == len(self._fields):
if self._field_generator is None:
break
self._addField(self._field_generator.next())
self._addField(next(self._field_generator))
for field in self._fields.values[done:]:
yield field
done += 1
@ -404,7 +406,6 @@ class GenericFieldSet(BasicFieldSet):
def _isDone(self):
return (self._field_generator is None)
done = property(_isDone, doc="Boolean to know if parsing is done or not")
#
@ -445,7 +446,8 @@ class GenericFieldSet(BasicFieldSet):
# TODO: Check in self and not self.field
# Problem is that "generator is already executing"
if name not in self._fields:
raise ParserError("Unable to replace %s: field doesn't exist!" % name)
raise ParserError(
"Unable to replace %s: field doesn't exist!" % name)
assert 1 <= len(new_fields)
old_field = self[name]
total_size = sum((field.size for field in new_fields))
@ -486,7 +488,8 @@ class GenericFieldSet(BasicFieldSet):
if feed and self._field_generator is not None:
self._feedAll()
if address < self._current_size:
i = lowerBound(self._fields.values, lambda x: x.address + x.size <= address)
i = lowerBound(self._fields.values,
lambda x: x.address + x.size <= address)
if i is not None:
return self._fields.values[i]
return None
@ -499,8 +502,8 @@ class GenericFieldSet(BasicFieldSet):
# Check size
total_size = sum(field.size for field in new_fields)
if old_field.size < total_size:
raise ParserError( \
"Unable to write fields at address %s " \
raise ParserError(
"Unable to write fields at address %s "
"(too big)!" % (address))
# Need padding before?

View file

@ -3,6 +3,7 @@ from hachoir.core.endian import endian_name
from hachoir.core.tools import makePrintable, makeUnicode
from hachoir.core.dict import Dict
from hachoir.core.i18n import _
from hachoir.core.error import error
from hachoir.core.log import Logger
from hachoir.metadata.metadata_item import (
MIN_PRIORITY, MAX_PRIORITY, QUALITY_NORMAL)
@ -351,13 +352,11 @@ def extractMetadata(parser, quality=QUALITY_NORMAL, **kwargs):
meta_extract_error = False
except Exception as err:
error("Error during metadata extraction: %s" % unicode(err))
except Exception as err:
error("Error during metadata extraction: %s" % unicode(err))
if meta_extract_error:
try:
parser.stream._input.close()
except:
except (StandardError, Exception):
pass
return None

View file

@ -12,6 +12,7 @@ from hachoir.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, UInt64,
String, PascalString16,
RawBytes)
from hachoir.stream.input import ReadStreamError
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir.core.tools import makeUnicode
from hachoir.core.endian import LITTLE_ENDIAN
@ -153,6 +154,7 @@ class ExtraField(FieldSet):
class ExtraFields(FieldSet):
def createFields(self):
while self.current_size < self.size:
yield ExtraField(self, "extra[]")
@ -183,6 +185,7 @@ class ZipCentralDirectory(FieldSet):
def createFields(self):
yield ZipVersion(self, "version_made_by", "Version made by")
# yield from ZipStartCommonFields(self) # PY3
for field in ZipStartCommonFields(self):
yield field
@ -266,7 +269,8 @@ class FileEntry(FieldSet):
compression = self["compression"].value
if compression == 0:
return SubFile(self, "data", size, filename=self.filename)
compressed = SubFile(self, "compressed_data", size, filename=self.filename)
compressed = SubFile(self, "compressed_data",
size, filename=self.filename)
if compression == COMPRESSION_DEFLATE:
return Deflate(compressed)
else:
@ -292,6 +296,7 @@ class FileEntry(FieldSet):
(size, data_desc["file_compressed_size"].value))
def createFields(self):
# yield from ZipStartCommonFields(self) # PY3
for field in ZipStartCommonFields(self):
yield field
length = self["filename_length"].value
@ -308,6 +313,7 @@ class FileEntry(FieldSet):
if size > 0:
yield self.data(size)
elif self["flags/incomplete"].value:
# yield from self.resync() # PY3
for field in self.resync():
yield field
if self["flags/has_descriptor"].value and self['crc32'].value == 0:
@ -396,10 +402,24 @@ class ZipFile(Parser):
"min_size": (4 + 26) * 8, # header + file entry
"description": "ZIP archive"
}
CHUNK_TYPES = {
FileEntry.HEADER: (FileEntry, "file[]", None),
ZipDataDescriptor.HEADER: (ZipDataDescriptor, "spanning[]", None),
0x30304b50: (ZipDataDescriptor, "temporary_spanning[]", None),
ZipCentralDirectory.HEADER: (ZipCentralDirectory, "central_directory[]", None),
ZipEndCentralDirectory.HEADER: (ZipEndCentralDirectory, "end_central_directory", "End of central directory"),
Zip64EndCentralDirectory.HEADER: (Zip64EndCentralDirectory, "end64_central_directory", "ZIP64 end of central directory"),
ZipSignature.HEADER: (ZipSignature, "signature", "Signature"),
Zip64EndCentralDirectoryLocator.HEADER: (Zip64EndCentralDirectoryLocator, "end_locator", "ZIP64 Enf of central directory locator"),
}
def validate(self):
try:
if self["header[0]"].value != FileEntry.HEADER:
return "Invalid magic"
except Exception as err:
return "Unable to get header #0"
try:
file0 = self["file[0]"]
except Exception as err:
@ -407,6 +427,7 @@ class ZipFile(Parser):
err = file0.validate()
if err:
return "File #0: %s" % err
return True
def createFields(self):
@ -414,29 +435,30 @@ class ZipFile(Parser):
self.signature = None
self.central_directory = []
while not self.eof:
header = textHandler(
skip = 0
while True:
try:
header = self.stream.readBits(self.absolute_address + self.current_size + skip, 32, self.endian)
if header in self.CHUNK_TYPES:
break
skipdelta = self.stream.searchBytes(b'PK', self.absolute_address + self.current_size + skip + 8)
if skipdelta is None:
if not self.current_size:
raise ParserError("Failed to find any zip headers")
return
skip = skipdelta - (self.absolute_address + self.current_size)
except ReadStreamError:
if not self.current_size:
raise ParserError("Failed to read stream")
return
if skip:
yield RawBytes(self, "unparsed[]", skip // 8)
yield textHandler(
UInt32(self, "header[]", "Header"), hexadecimal)
yield header
header = header.value
if header == FileEntry.HEADER:
yield FileEntry(self, "file[]")
elif header == ZipDataDescriptor.HEADER:
yield ZipDataDescriptor(self, "spanning[]")
elif header == 0x30304b50:
yield ZipDataDescriptor(self, "temporary_spanning[]")
elif header == ZipCentralDirectory.HEADER:
yield ZipCentralDirectory(self, "central_directory[]")
elif header == ZipEndCentralDirectory.HEADER:
yield ZipEndCentralDirectory(self, "end_central_directory", "End of central directory")
elif header == Zip64EndCentralDirectory.HEADER:
yield Zip64EndCentralDirectory(self, "end64_central_directory", "ZIP64 end of central directory")
elif header == ZipSignature.HEADER:
yield ZipSignature(self, "signature", "Signature")
elif header == Zip64EndCentralDirectoryLocator.HEADER:
yield Zip64EndCentralDirectoryLocator(self, "end_locator", "ZIP64 Enf of central directory locator")
else:
raise ParserError(
"Error, unknown ZIP header (0x%08X)." % header)
ftype, fname, fdesc = self.CHUNK_TYPES[header]
yield ftype(self, fname, fdesc)
def createMimeType(self):
if self["file[0]/filename"].value == "mimetype":

View file

@ -91,7 +91,7 @@ class HachoirParser(object):
try:
self._mime_type = self.createMimeType()
except Exception as err:
self.error("Error when creating MIME type: %s" % unicode(err))
error("Error when creating MIME type: %s" % unicode(err))
if not self._mime_type \
and self.createMimeType != Parser.createMimeType:
self._mime_type = Parser.createMimeType(self)

View file

@ -173,6 +173,7 @@ class SectionFlags(FieldSet):
class SymbolStringTableOffset(UInt32):
def createDisplay(self):
section_index = self['/header/shstrndx'].value
section = self['/section[' + str(section_index) + ']']
@ -316,7 +317,8 @@ class ElfFile(HachoirParser, RootSeekableFieldSet):
endian = LITTLE_ENDIAN
def __init__(self, stream, **args):
RootSeekableFieldSet.__init__(self, None, "root", stream, None, stream.askSize(self))
RootSeekableFieldSet.__init__(
self, None, "root", stream, None, stream.askSize(self))
HachoirParser.__init__(self, stream, **args)
def validate(self):
@ -354,7 +356,8 @@ class ElfFile(HachoirParser, RootSeekableFieldSet):
for index in xrange(self["header/shnum"].value):
field = self["section_header[" + str(index) + "]"]
if field['size'].value != 0:
if field['size'].value != 0 and field['type'].value != 8:
# skip NOBITS sections
self.seekByte(field['LMA'].value, relative=False)
yield RawBytes(self, "section[" + str(index) + "]", field['size'].value)

View file

@ -12,7 +12,7 @@ Creation date: 2007-01-19
from hachoir.field import (FieldSet, ParserError, Enum,
Bit, Bits, SeekableFieldSet,
UInt16, UInt32, TimestampUnix32,
RawBytes, PaddingBytes, NullBytes, NullBits,
Bytes, RawBytes, PaddingBytes, NullBytes, NullBits,
CString, String)
from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal
from hachoir.core.tools import createDict, paddingSize, alignValue, makePrintable
@ -82,6 +82,7 @@ FONT_SUBTYPE_NAME = {
class VersionInfoBinary(FieldSet):
def createFields(self):
yield textHandler(UInt32(self, "magic", "File information magic (0xFEEF04BD)"), hexadecimal)
if self["magic"].value != 0xFEEF04BD:
@ -172,6 +173,7 @@ def parseIcon(parent):
class WindowsString(FieldSet):
def createFields(self):
yield UInt16(self, "length", "Number of 16-bit characters")
size = self["length"].value * 2
@ -230,10 +232,25 @@ class Entry(FieldSet):
class NameOffset(FieldSet):
def __init__(self, parent, name):
FieldSet.__init__(self, parent, name)
self.name_field = None
def createFields(self):
yield UInt32(self, "name")
yield Bits(self, "offset", 31)
yield Bits(self, "name_offset", 31)
yield Bit(self, "is_name")
yield Bits(self, "offset", 31)
yield Bit(self, "is_subdir")
def getResType(self):
return self.name_field.value
def createDescription(self):
if self["is_subdir"].value:
return "Sub-directory: %s at %s" % (self.name_field.display, self["offset"].value)
else:
return "Index: %s at %s" % (self.name_field.display, self["offset"].value)
class IndexOffset(FieldSet):
@ -244,18 +261,27 @@ class IndexOffset(FieldSet):
self.res_type = res_type
def createFields(self):
if self.res_type is None:
# immediate subdirectory of the root
yield Enum(UInt32(self, "type"), self.TYPE_DESC)
else:
# sub-subdirectory, "type" field is just an ID
yield textHandler(UInt32(self, "type"), lambda field: "ID %d" % field.value)
yield Bits(self, "offset", 31)
yield Bit(self, "is_subdir")
def getResType(self):
return self["type"].value
def createDescription(self):
if self["is_subdir"].value:
return "Sub-directory: %s at %s" % (self["type"].display, self["offset"].value)
else:
return "Index: ID %s at %s" % (self["type"].display, self["offset"].value)
return "Index: %s at %s" % (self["type"].display, self["offset"].value)
class ResourceContent(FieldSet):
def __init__(self, parent, name, entry, size=None):
FieldSet.__init__(self, parent, name, size=entry["size"].value * 8)
self.entry = entry
@ -273,6 +299,7 @@ class ResourceContent(FieldSet):
def createFields(self):
if self._parser:
# yield from self._parser(self) # PY3
for field in self._parser(self):
yield field
else:
@ -309,15 +336,25 @@ class Header(FieldSet):
return text
class Name(FieldSet):
def createFields(self):
yield UInt16(self, "length")
size = min(self["length"].value, 255)
if size:
yield String(self, "name", size, charset="UTF-16LE")
class WidePascalString16(String):
def __init__(self, parent, name, description=None,
strip=None, nbytes=None, truncate=None):
Bytes.__init__(self, parent, name, 1, description)
self._format = "WidePascalString16"
self._strip = strip
self._truncate = truncate
self._character_size = 2
self._charset = "UTF-16-LE"
self._content_offset = 2
self._content_size = self._character_size * self._parent.stream.readBits(
self.absolute_address, self._content_offset * 8, self._parent.endian)
self._size = (self._content_size + self.content_offset) * 8
class Directory(FieldSet):
def __init__(self, parent, name, res_type=None):
FieldSet.__init__(self, parent, name)
nb_entries = self["header/nb_name"].value + \
@ -346,12 +383,21 @@ class Directory(FieldSet):
class PE_Resource(SeekableFieldSet):
def __init__(self, parent, name, section, size):
SeekableFieldSet.__init__(self, parent, name, size=size)
self.section = section
def parseSub(self, directory, name, depth):
indexes = []
for index in directory.array("name"):
if index["is_subdir"].value:
indexes.append(index)
self.seekByte(index["name_offset"].value)
field = WidePascalString16(self, name.replace("directory", "name"))
index.name_field = field
yield field
for index in directory.array("index"):
if index["is_subdir"].value:
indexes.append(index)
@ -360,7 +406,7 @@ class PE_Resource(SeekableFieldSet):
for index in indexes:
self.seekByte(index["offset"].value)
if depth == 1:
res_type = index["type"].value
res_type = index.getResType()
else:
res_type = directory.res_type
yield Directory(self, name, res_type)

View file

@ -4,7 +4,6 @@ from hachoir.core.log import Logger
from hachoir.core.bits import str2long
from hachoir.core.i18n import getTerminalCharset
from hachoir.core.tools import lowerBound
from hachoir.core.i18n import _
from hachoir.core.tools import alignValue
from errno import ESPIPE
from weakref import ref as weakref_ref
@ -16,21 +15,25 @@ class InputStreamError(StreamError):
class ReadStreamError(InputStreamError):
def __init__(self, size, address, got=None):
self.size = size
self.address = address
self.got = got
if self.got is not None:
msg = _("Can't read %u bits at address %u (got %u bits)") % (self.size, self.address, self.got)
msg = "Can't read %u bits at address %u (got %u bits)" % (
self.size, self.address, self.got)
else:
msg = _("Can't read %u bits at address %u") % (self.size, self.address)
msg = "Can't read %u bits at address %u" % (
self.size, self.address)
InputStreamError.__init__(self, msg)
class NullStreamError(InputStreamError):
def __init__(self, source):
self.source = source
msg = _("Input size is nul (source='%s')!") % self.source
msg = "Input size is nul (source='%s')!" % self.source
InputStreamError.__init__(self, msg)
@ -76,7 +79,6 @@ class FileFromInputStream:
if shift:
raise InputStreamError("TODO: handle non-byte-aligned data")
return data
if self._size or size is not None and not self._from_end:
# We don't want self.tell() to read anything
# and the size must be known if we read until the end.
@ -96,7 +98,8 @@ class FileFromInputStream:
if size <= 0:
return ''
data = '', ''
self._offset = max(0, self.stream._current_size // 8 + self._offset)
self._offset = max(
0, self.stream._current_size // 8 + self._offset)
self._from_end = False
bs = max(max_size, 1 << 16)
while True:
@ -245,7 +248,8 @@ class InputStream(Logger):
None else.
"""
if start_address % 8:
raise InputStreamError("Unable to search bytes with address with bit granularity")
raise InputStreamError(
"Unable to search bytes with address with bit granularity")
length = len(needle)
size = max(3 * length, 4096)
buffer = ''
@ -334,7 +338,8 @@ class InputPipe(object):
return ''
buf = self.buffers[index]
if buf is None:
raise InputStreamError(_("Error: Buffers too small. Can't seek backward."))
raise InputStreamError(
"Error: Buffers too small. Can't seek backward.")
if self.last != index:
next = buf[1]
prev = buf[2]
@ -366,7 +371,7 @@ class InputPipe(object):
def read(self, size):
end = self.address + size
for i in xrange(len(self.buffers), (end >> self.buffer_size) + 1):
for i in range(len(self.buffers), (end >> self.buffer_size) + 1):
data = self._input.read(1 << self.buffer_size)
if len(data) < 1 << self.buffer_size:
self.size = (len(self.buffers) << self.buffer_size) + len(data)
@ -386,6 +391,7 @@ class InputPipe(object):
class InputIOStream(InputStream):
def __init__(self, input, size=None, **args):
if not hasattr(input, "seek"):
if size is None:
@ -396,14 +402,15 @@ class InputIOStream(InputStream):
try:
input.seek(0, 2)
size = input.tell() * 8
except IOError, err:
except IOError as err:
if err.errno == ESPIPE:
input = InputPipe(input, self._setSize)
else:
charset = getTerminalCharset()
errmsg = unicode(str(err), charset)
source = args.get("source", "<inputio:%r>" % input)
raise InputStreamError(_("Unable to get size of %s: %s") % (source, errmsg))
raise InputStreamError(
"Unable to get size of %s: %s" % (source, errmsg))
self._input = input
InputStream.__init__(self, size=size, **args)
@ -416,7 +423,6 @@ class InputIOStream(InputStream):
if self._input.size:
return 8 * self._input.size
return 8 * self._input.current_size
_current_size = property(__current_size)
def read(self, address, size):
@ -445,6 +451,7 @@ class InputIOStream(InputStream):
class StringInputStream(InputStream):
def __init__(self, data, source="<string>", **args):
self.data = data
InputStream.__init__(self, source=source, size=8 * len(data), **args)
@ -464,6 +471,7 @@ class StringInputStream(InputStream):
class InputSubStream(InputStream):
def __init__(self, stream, offset, size=None, source=None, **args):
if offset is None:
offset = 0
@ -499,6 +507,7 @@ def InputFieldStream(field, **args):
class FragmentedStream(InputStream):
def __init__(self, field, **args):
self.stream = field.parent.stream
data = field.getData()
@ -573,6 +582,7 @@ class FragmentedStream(InputStream):
class ConcatStream(InputStream):
# TODO: concatene any number of any type of stream
def __init__(self, streams, **args):
if len(streams) > 2 or not streams[0].checked:
raise NotImplementedError
@ -583,7 +593,8 @@ class ConcatStream(InputStream):
self.__streams = streams
InputStream.__init__(self, **args)
_current_size = property(lambda self: self.__size0 + self.__streams[1]._current_size)
_current_size = property(
lambda self: self.__size0 + self.__streams[1]._current_size)
def close(self):
self.__streams = None

View file

@ -286,16 +286,18 @@ class Quality:
try:
parser = ek.ek(createParser, filename)
except InputStreamError as e:
logger.log(msg % (filename, e.text), logger.WARNING)
logger.log(msg % (filename, ex(e)), logger.WARNING)
except Exception as e:
logger.log(msg % (filename, ex(e)), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
if parser:
if '.avi' == filename[-4::].lower():
extract = extractMetadata(parser, scan_index=False)
else:
extract = extractMetadata(parser)
extract = None
try:
args = ({}, {'scan_index': False})['.avi' == filename[-4::].lower()]
extract = extractMetadata(parser, **args)
except (StandardError, Exception) as e:
logger.log(msg % (filename, ex(e)), logger.WARNING)
if extract:
try:
height = extract.get('height')

View file

@ -63,7 +63,7 @@ class BTSceneProvider(generic.TorrentProvider):
url = self.url
response = self.get_url(url)
if self.should_skip():
if self.should_skip() or not response:
return results
form = re.findall('(?is)(<form[^>]+)', response)