foo
\tbar\n \n baz ")
- # Everything outside the
tag is reformatted, but everything
- # inside is left alone.
- self.assertEqual(
- u'\n foo\n
\tbar\n \n \n baz\n
',
- soup.div.prettify())
-
- def test_prettify_accepts_formatter(self):
- soup = BeautifulSoup("foo")
- pretty = soup.prettify(formatter = lambda x: x.upper())
- self.assertTrue("FOO" in pretty)
-
- def test_prettify_outputs_unicode_by_default(self):
- soup = self.soup(" ")
- self.assertEqual(unicode, type(soup.prettify()))
-
- def test_prettify_can_encode_data(self):
- soup = self.soup(" ")
- self.assertEqual(bytes, type(soup.prettify("utf-8")))
-
- def test_html_entity_substitution_off_by_default(self):
- markup = u"Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu! "
- soup = self.soup(markup)
- encoded = soup.b.encode("utf-8")
- self.assertEqual(encoded, markup.encode('utf-8'))
-
- def test_encoding_substitution(self):
- # Here's the tag saying that a document is
- # encoded in Shift-JIS.
- meta_tag = (' ')
- soup = self.soup(meta_tag)
-
- # Parse the document, and the charset apprears unchanged.
- self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis')
-
- # Encode the document into some encoding, and the encoding is
- # substituted into the meta tag.
- utf_8 = soup.encode("utf-8")
- self.assertTrue(b"charset=utf-8" in utf_8)
-
- euc_jp = soup.encode("euc_jp")
- self.assertTrue(b"charset=euc_jp" in euc_jp)
-
- shift_jis = soup.encode("shift-jis")
- self.assertTrue(b"charset=shift-jis" in shift_jis)
-
- utf_16_u = soup.encode("utf-16").decode("utf-16")
- self.assertTrue("charset=utf-16" in utf_16_u)
-
- def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self):
- markup = ('foo ')
-
- # Beautiful Soup used to try to rewrite the meta tag even if the
- # meta tag got filtered out by the strainer. This test makes
- # sure that doesn't happen.
- strainer = SoupStrainer('pre')
- soup = self.soup(markup, parse_only=strainer)
- self.assertEqual(soup.contents[0].name, 'pre')
-
-class TestEncoding(SoupTest):
- """Test the ability to encode objects into strings."""
-
- def test_unicode_string_can_be_encoded(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertEqual(soup.b.string.encode("utf-8"),
- u"\N{SNOWMAN}".encode("utf-8"))
-
- def test_tag_containing_unicode_string_can_be_encoded(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertEqual(
- soup.b.encode("utf-8"), html.encode("utf-8"))
-
- def test_encoding_substitutes_unrecognized_characters_by_default(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertEqual(soup.b.encode("ascii"), b"☃ ")
-
- def test_encoding_can_be_made_strict(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertRaises(
- UnicodeEncodeError, soup.encode, "ascii", errors="strict")
-
- def test_decode_contents(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertEqual(u"\N{SNOWMAN}", soup.b.decode_contents())
-
- def test_encode_contents(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertEqual(
- u"\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents(
- encoding="utf8"))
-
- def test_deprecated_renderContents(self):
- html = u"\N{SNOWMAN} "
- soup = self.soup(html)
- self.assertEqual(
- u"\N{SNOWMAN}".encode("utf8"), soup.b.renderContents())
-
-class TestNavigableStringSubclasses(SoupTest):
-
- def test_cdata(self):
- # None of the current builders turn CDATA sections into CData
- # objects, but you can create them manually.
- soup = self.soup("")
- cdata = CData("foo")
- soup.insert(1, cdata)
- self.assertEqual(str(soup), "")
- self.assertEqual(soup.find(text="foo"), "foo")
- self.assertEqual(soup.contents[0], "foo")
-
- def test_cdata_is_never_formatted(self):
- """Text inside a CData object is passed into the formatter.
-
- But the return value is ignored.
- """
-
- self.count = 0
- def increment(*args):
- self.count += 1
- return "BITTER FAILURE"
-
- soup = self.soup("")
- cdata = CData("<><><>")
- soup.insert(1, cdata)
- self.assertEqual(
- b"<><>]]>", soup.encode(formatter=increment))
- self.assertEqual(1, self.count)
-
- def test_doctype_ends_in_newline(self):
- # Unlike other NavigableString subclasses, a DOCTYPE always ends
- # in a newline.
- doctype = Doctype("foo")
- soup = self.soup("")
- soup.insert(1, doctype)
- self.assertEqual(soup.encode(), b"\n")
-
-
-class TestSoupSelector(TreeTest):
-
- HTML = """
-
-
-
-The title
-
-
-
-
-
-
-
English
-
English UK
-
English US
-
French
-
-
-
-"""
-
- def setUp(self):
- self.soup = BeautifulSoup(self.HTML)
-
- def assertSelects(self, selector, expected_ids):
- el_ids = [el['id'] for el in self.soup.select(selector)]
- el_ids.sort()
- expected_ids.sort()
- self.assertEqual(expected_ids, el_ids,
- "Selector %s, expected [%s], got [%s]" % (
- selector, ', '.join(expected_ids), ', '.join(el_ids)
- )
- )
-
- assertSelect = assertSelects
-
- def assertSelectMultiple(self, *tests):
- for selector, expected_ids in tests:
- self.assertSelect(selector, expected_ids)
-
- def test_one_tag_one(self):
- els = self.soup.select('title')
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].name, 'title')
- self.assertEqual(els[0].contents, [u'The title'])
-
- def test_one_tag_many(self):
- els = self.soup.select('div')
- self.assertEqual(len(els), 3)
- for div in els:
- self.assertEqual(div.name, 'div')
-
- def test_tag_in_tag_one(self):
- els = self.soup.select('div div')
- self.assertSelects('div div', ['inner'])
-
- def test_tag_in_tag_many(self):
- for selector in ('html div', 'html body div', 'body div'):
- self.assertSelects(selector, ['main', 'inner', 'footer'])
-
- def test_tag_no_match(self):
- self.assertEqual(len(self.soup.select('del')), 0)
-
- def test_invalid_tag(self):
- self.assertRaises(ValueError, self.soup.select, 'tag%t')
-
- def test_header_tags(self):
- self.assertSelectMultiple(
- ('h1', ['header1']),
- ('h2', ['header2', 'header3']),
- )
-
- def test_class_one(self):
- for selector in ('.onep', 'p.onep', 'html p.onep'):
- els = self.soup.select(selector)
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].name, 'p')
- self.assertEqual(els[0]['class'], ['onep'])
-
- def test_class_mismatched_tag(self):
- els = self.soup.select('div.onep')
- self.assertEqual(len(els), 0)
-
- def test_one_id(self):
- for selector in ('div#inner', '#inner', 'div div#inner'):
- self.assertSelects(selector, ['inner'])
-
- def test_bad_id(self):
- els = self.soup.select('#doesnotexist')
- self.assertEqual(len(els), 0)
-
- def test_items_in_id(self):
- els = self.soup.select('div#inner p')
- self.assertEqual(len(els), 3)
- for el in els:
- self.assertEqual(el.name, 'p')
- self.assertEqual(els[1]['class'], ['onep'])
- self.assertFalse(els[0].has_attr('class'))
-
- def test_a_bunch_of_emptys(self):
- for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
- self.assertEqual(len(self.soup.select(selector)), 0)
-
- def test_multi_class_support(self):
- for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
- '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
- self.assertSelects(selector, ['pmulti'])
-
- def test_multi_class_selection(self):
- for selector in ('.class1.class3', '.class3.class2',
- '.class1.class2.class3'):
- self.assertSelects(selector, ['pmulti'])
-
- def test_child_selector(self):
- self.assertSelects('.s1 > a', ['s1a1', 's1a2'])
- self.assertSelects('.s1 > a span', ['s1a2s1'])
-
- def test_child_selector_id(self):
- self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1'])
-
- def test_attribute_equals(self):
- self.assertSelectMultiple(
- ('p[class="onep"]', ['p1']),
- ('p[id="p1"]', ['p1']),
- ('[class="onep"]', ['p1']),
- ('[id="p1"]', ['p1']),
- ('link[rel="stylesheet"]', ['l1']),
- ('link[type="text/css"]', ['l1']),
- ('link[href="blah.css"]', ['l1']),
- ('link[href="no-blah.css"]', []),
- ('[rel="stylesheet"]', ['l1']),
- ('[type="text/css"]', ['l1']),
- ('[href="blah.css"]', ['l1']),
- ('[href="no-blah.css"]', []),
- ('p[href="no-blah.css"]', []),
- ('[href="no-blah.css"]', []),
- )
-
- def test_attribute_tilde(self):
- self.assertSelectMultiple(
- ('p[class~="class1"]', ['pmulti']),
- ('p[class~="class2"]', ['pmulti']),
- ('p[class~="class3"]', ['pmulti']),
- ('[class~="class1"]', ['pmulti']),
- ('[class~="class2"]', ['pmulti']),
- ('[class~="class3"]', ['pmulti']),
- ('a[rel~="friend"]', ['bob']),
- ('a[rel~="met"]', ['bob']),
- ('[rel~="friend"]', ['bob']),
- ('[rel~="met"]', ['bob']),
- )
-
- def test_attribute_startswith(self):
- self.assertSelectMultiple(
- ('[rel^="style"]', ['l1']),
- ('link[rel^="style"]', ['l1']),
- ('notlink[rel^="notstyle"]', []),
- ('[rel^="notstyle"]', []),
- ('link[rel^="notstyle"]', []),
- ('link[href^="bla"]', ['l1']),
- ('a[href^="http://"]', ['bob', 'me']),
- ('[href^="http://"]', ['bob', 'me']),
- ('[id^="p"]', ['pmulti', 'p1']),
- ('[id^="m"]', ['me', 'main']),
- ('div[id^="m"]', ['main']),
- ('a[id^="m"]', ['me']),
- )
-
- def test_attribute_endswith(self):
- self.assertSelectMultiple(
- ('[href$=".css"]', ['l1']),
- ('link[href$=".css"]', ['l1']),
- ('link[id$="1"]', ['l1']),
- ('[id$="1"]', ['l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1']),
- ('div[id$="1"]', []),
- ('[id$="noending"]', []),
- )
-
- def test_attribute_contains(self):
- self.assertSelectMultiple(
- # From test_attribute_startswith
- ('[rel*="style"]', ['l1']),
- ('link[rel*="style"]', ['l1']),
- ('notlink[rel*="notstyle"]', []),
- ('[rel*="notstyle"]', []),
- ('link[rel*="notstyle"]', []),
- ('link[href*="bla"]', ['l1']),
- ('a[href*="http://"]', ['bob', 'me']),
- ('[href*="http://"]', ['bob', 'me']),
- ('[id*="p"]', ['pmulti', 'p1']),
- ('div[id*="m"]', ['main']),
- ('a[id*="m"]', ['me']),
- # From test_attribute_endswith
- ('[href*=".css"]', ['l1']),
- ('link[href*=".css"]', ['l1']),
- ('link[id*="1"]', ['l1']),
- ('[id*="1"]', ['l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1']),
- ('div[id*="1"]', []),
- ('[id*="noending"]', []),
- # New for this test
- ('[href*="."]', ['bob', 'me', 'l1']),
- ('a[href*="."]', ['bob', 'me']),
- ('link[href*="."]', ['l1']),
- ('div[id*="n"]', ['main', 'inner']),
- ('div[id*="nn"]', ['inner']),
- )
-
- def test_attribute_exact_or_hypen(self):
- self.assertSelectMultiple(
- ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
- ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
- ('p[lang|="fr"]', ['lang-fr']),
- ('p[lang|="gb"]', []),
- )
-
- def test_attribute_exists(self):
- self.assertSelectMultiple(
- ('[rel]', ['l1', 'bob', 'me']),
- ('link[rel]', ['l1']),
- ('a[rel]', ['bob', 'me']),
- ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
- ('p[class]', ['p1', 'pmulti']),
- ('[blah]', []),
- ('p[blah]', []),
- )
-
- def test_nth_of_type(self):
- # Try to select first paragraph
- els = self.soup.select('div#inner p:nth-of-type(1)')
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].string, u'Some text')
-
- # Try to select third paragraph
- els = self.soup.select('div#inner p:nth-of-type(3)')
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].string, u'Another')
-
- # Try to select (non-existent!) fourth paragraph
- els = self.soup.select('div#inner p:nth-of-type(4)')
- self.assertEqual(len(els), 0)
-
- # Pass in an invalid value.
- self.assertRaises(
- ValueError, self.soup.select, 'div p:nth-of-type(0)')
-
- def test_nth_of_type_direct_descendant(self):
- els = self.soup.select('div#inner > p:nth-of-type(1)')
- self.assertEqual(len(els), 1)
- self.assertEqual(els[0].string, u'Some text')
-
- def test_id_child_selector_nth_of_type(self):
- self.assertSelects('#inner > p:nth-of-type(2)', ['p1'])
-
- def test_select_on_element(self):
- # Other tests operate on the tree; this operates on an element
- # within the tree.
- inner = self.soup.find("div", id="main")
- selected = inner.select("div")
- # The tag was selected. The ]]>
- ÐÐµÐ²Ð¸Ð´Ð¸Ð¼Ð°Ñ Ð±Ñ€Ð¾Ð½Ñ Ð´Ð»Ñ Ð’Ð°ÑˆÐµÐ³Ð¾ девайÑа.
-
-
Ð’Ñ‹ из тех, кто хотел бы защитить ÐºÐ¾Ñ€Ð¿ÑƒÑ Ð¸ диÑплей Ñвоего девайÑа от царапин и грÑзи, но при Ñтом не хотели бы нарушать ÑÑтетику дизайна девайÑа, например, чехлом? Ð’ÐµÐ»Ð¸ÐºÐ¾Ð»ÐµÐ¿Ð½Ð°Ñ Ð½Ð¾Ð²Ð¾ÑÑ‚ÑŒ Ð´Ð»Ñ Ð²Ð°Ñ - в РоÑÑии поÑвилÑÑ ÑпоÑоб надежной защиты девайÑов от царапин и грÑзи - защитный комплект SaveCase. Пленка, Ð²Ð¾ÑˆÐµÐ´ÑˆÐ°Ñ Ð² оÑнову данного защитного комплекта, ÑовмеÑтно Ñ Ð»Ð°Ð±Ð¾Ñ€Ð°Ñ‚Ð¾Ñ€Ð¸ÐµÐ¹ SavePatterns, изначально нашла Ñвое применение в автомобильной промышленноÑти Ð´Ð»Ñ Ð·Ð°Ñ‰Ð¸Ñ‚Ñ‹ дорогоÑтоÑщего лакокраÑочного Ð¿Ð¾ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð°Ð²Ñ‚Ð¾Ð¼Ð¾Ð±Ð¸Ð»ÐµÐ¹. Теперь Ñто технологичеÑкое новшеÑтво применÑÑŽÑ‚ и Ð´Ð»Ñ Ð·Ð°Ñ‰Ð¸Ñ‚Ñ‹ девайÑов.
-
-
ОпиÑание Ñ Ð¾Ñ„Ð¸Ñ†Ð¸Ð°Ð»ÑŒÐ½Ð¾Ð³Ð¾ Ñайта www.savecase.ru :
-
УÑтановив защитный комплект SaveCase на Ñвой телефон, плеер, ноутбук или игровую конÑоль, Ð’Ñ‹ получаете 100% невидимую защиту от изноÑа, царапин и трещин на протÑжении вÑего Ñрока иÑпользованиÑ. SaveCase защищает ваше уÑтройÑтво Ñо вÑех Ñторон, Ð´ÐµÐ»Ð°Ñ ÐµÐ³Ð¾ практичеÑки неуÑзвимым. Ðе удивлÑйтеÑÑŒ, еÑли ваше уÑтройÑтво Ñ SaveCase, даже ÑпуÑÑ‚Ñ Ð¼ÐµÑÑцы иÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¾ÑтаетÑÑ Ñ‚Ð°ÐºÐ¸Ð¼ же как и в первый день, ведь Ð´Ð»Ñ Ñтого применÑетÑÑ Ð½ÐµÐ¾Ð±Ñ‹Ñ‡Ð°Ð¹Ð½Ð¾ прочный материал, в народе получивший название - "Ð»ÐµÐ³ÐºÐ°Ñ Ð±Ñ€Ð¾Ð½Ñ"! Мы наÑтолько уверены в качеÑтве защитного комплекта SaveCase, что даем на него пожизненную гарантию.
-
-
-
- ]]>
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: ZXLightControl v.1.0.0
-
- tag:touchnokia.ru,2009://1.814
-
- 2009-05-31T10:22:58Z
- 2009-05-31T10:23:41Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ZXLightControl v.1.0.0 Уменьшено до 50%
360 x 640 (38,06 килобайт)
Уменьшено до 50%
360 x 640 (37,13 килобайт)
Разработчик: ZXStylesЯзык: EnglishСоÑтоÑние: FreeОпиÑание: Программа Ð´Ð»Ñ ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ñветкой Ñкрана и клавиш на Ñмартфонах Symbian 9.2, 9.3, 9.4 Программа ÑвлÑетÑÑ Ð²Ñ‹Ð´ÐµÐ»ÐµÐ½Ð½Ñ‹Ð¼ в отдельное приложение блоком ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°Ñширенной подÑветкой из программы ZXReader.ВозможноÑти: * наÑтройка ÑркоÑти подÑветки Ñкрана * Отключение подÑветки клавиш * Удерживание подÑветки Ñкрана включенной беÑконечно ZXLightControl_v1.0.0_S60_5th_signed.zip ( 12,71 килобайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: Voice Inbox v.1.09.112
-
- tag:touchnokia.ru,2009://1.813
-
- 2009-05-31T09:57:06Z
- 2009-05-31T09:59:46Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Voice Inbox v.1.09.112 Уменьшено до 50%
360 x 640 (53,6 килобайт)
Уменьшено до 50%
360 x 640 (50,23 килобайт)
Разработчик: WebGateЯзык: РуÑÑкий ( Ð’ архиве лежит руÑификатор) СоÑтоÑние: FreeОпиÑание: Программа автоответчик[*] ЕÑли у Ð²Ð°Ñ Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½ Ñмартфон то качаем и Ñтавим, еÑли нет то файл необходимо подпиÑать Voice_Inbox_v.1.09.112.zip ( 152,48 килобайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: Smart Movie v.4.15
-
- tag:touchnokia.ru,2009://1.812
-
- 2009-05-31T09:24:12Z
- 2009-05-31T09:26:29Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Smart Movie v.4.15 Уменьшено до 50%
360 x 640 (61,1 килобайт)
Уменьшено до 50%
360 x 640 (63,72 килобайт)
Разработчик: Lonely Cat GamesЯзык: EnglishСоÑтоÑние: TrialОпиÑание: Ðаонецто Ð²Ð·Ð»Ð¾Ð¼Ð°Ð½Ð°Ñ Ð²ÐµÑ€ÑÐ¸Ñ Ð’Ð¾Ð·Ð¼Ð¾Ð¶Ð½Ð¾Ñти SmartMovie: - Стандартный формат AVI - ИÑпользует подгружаемые кодеки - Портретный и пейзажный режимы проÑмотра видео - Первый мобильный плеер Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶ÐºÐ¾Ð¹ Ñубтитров, можно Ñмотреть видео на многих Ñзыках - Поддержка кодеков DirectShow - Очень быÑтрое конвертирование - БикубичеÑÐºÐ°Ñ Ð¸Ð½Ñ‚ÐµÑ€Ð¿Ð¾Ð»ÑÑ†Ð¸Ñ - макÑимальное качеÑтво ужатого видео - ÐаÑтраиваемые параметры качеÑтва/объема в Smart Movie - Богатые наÑтройки плеера Ðовое в SmartMovie v.4.15: - ВозможноÑÑ‚ÑŒ выбора папки воÑÐ¿Ñ€Ð¾Ð¸Ð·Ð²ÐµÐ´ÐµÐ½Ð¸Ñ - ЯпонÑÐºÐ°Ñ Ð¸ китайÑÐºÐ°Ñ ÐºÐ¾Ð´Ð¸Ñ€Ð¾Ð²ÐºÐ¸ Ð´Ð»Ñ Ñубтитров]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: PowerMP3 v.1.15 Beta
-
- tag:touchnokia.ru,2009://1.811
-
- 2009-05-31T09:13:04Z
- 2009-05-31T09:21:41Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- PowerMP3 v.1.15 Beta Уменьшено до 50%
360 x 640 (52,76 килобайт)
Уменьшено до 50%
360 x 640 (48,81 килобайт)
Разработчик: MobiFactorЯзык: РуÑÑкийСоÑтоÑние: FreeОпиÑание: Музыкальный плеер. Теперь Ñо Ñпециальными Ñкинами полноценно работает и на 5800.ОÑобенноÑти: - поддержка большинÑтва популÑрных аудио форматов (MP3/OGG/AAC/MP4) - отображение обложки альбома Ñ Ð¿Ð¾Ð¸Ñком в интернете - Ñффекты визуализации - поддержка Ñкинов - поддержка пультов AD-4Ñ… - Ñлип-таймер - воÑпроизведение по выбранным папкам - Ñоздание плей-лиÑтов - Ñквалайзер - раÑширенное ÑтереоÐовое в PowerMP3 v1.15: -ИÑправлена ошибка Ñ Ð¿ÐµÑ€ÐµÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸ÐµÐ¼ дорожек минигарнитурой -ИÑправлена куча небольших ошибок -Сделали ÑобÑтвенный Ñквалайзер Ð´Ð»Ñ Ð²Ñех телефонов -Добавлена поддержка папок. Теперь, когда вы удалÑете композиции из ÑпиÑка воÑпроизведениÑ, поÑвитÑÑ Ð´Ð¸Ð°Ð»Ð¾Ð³Ð¾Ð²Ð¾Ðµ окно, где вы можете выбрать Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿ÐµÑни Ñ Ð´Ð¸Ñка PowerMP3_v.1.15b.zip ( 1,04 мегабайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: XpressCut v.0.1
-
- tag:touchnokia.ru,2009://1.810
-
- 2009-05-31T08:57:55Z
- 2009-05-31T09:00:27Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- XpressCut v.0.1 Разработчик : kAIST Язык : РуÑÑкийСоÑтоÑние : FreeОпиÑание : программа Ð´Ð»Ñ Nokia 5800 и N97, Ð¿Ñ€ÐµÐ´Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð½Ð°Ñ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñтой и быÑтрой нарезки mp3 файлов на звонок. Теперь нарезать любимую мелодию Ñебе или друзьÑм можно "на ходу" без помощи компьютера, вÑего за неÑколько Ñекунд. ДоÑтаточно открыть mp3 файл, уÑтановить точку начала, точку окончаниÑ, нажать "save" и рингтон готов! ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð¿Ð¾ применению : Ñверху "трека" раÑпологаетÑÑ Ð¿Ð¾Ð»Ð¾Ñа покрутки, перемеÑтив которую, выбирите нужную позицию. УÑтановите курÑор в нужную позицию, ткнув ÑтилуÑом на "треке". С меÑта, где уÑтановлен курÑор, трек можно проиграть, нажав на "play" (оÑтановить можно, нажав на ту же кнопку). УÑтановите метки начала и конца нарезки, воÑпользовавшиÑÑŒ ÑоответÑтвующими кнопками. ПоÑле Ñтого, можно нажать на "save". ПоÑле ÑÐ¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñ‚Ñ€ÐµÐºÐ°, его можно проÑлушать. Ð’ÐИМÐÐИЕ!! Программа работает только Ñ mp3 файлами,Ñозданными Ñ Ð¿Ð¾ÑтоÑнным битрейтом! Ð”Ð»Ñ Ñ€Ð°Ð±Ð¾Ñ‚Ð¾ÑпоÑобноÑти нужен python не ниже верÑии 1.9.4. Python и программу Ñтавить на один и тот же диÑк! Уменьшено до 63%
640 x 360 (38,46 килобайт)
Уменьшено до 63%
640 x 360 (59,26 килобайт)
Python_1.9.5.zip ( 2,75 мегабайт ) XpressCut_0.1_ru.zip ( 142,57 килобайт ) ]]>
-
-
-
-
-
- Игры Ð´Ð»Ñ Nokia 5800 и Nokia N97: Asphalt 4 Elite Racing 3D v.1.0.1
-
- tag:touchnokia.ru,2009://1.808
-
- 2009-05-31T08:37:27Z
- 2009-05-31T08:44:42Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Asphalt 4 Elite Racing 3D v1.0.1 Разработчик: GameloftЯзык: ÐнглийÑкий (Ð’ sis верÑии РуÑÑкий)СоÑтоÑние: FreeОпиÑание: ДолгожданнаÑ, великолепнаÑ, Ð±Ñ€Ð¸Ð»Ð»Ð¸Ð°Ð½Ñ‚Ð¾Ð²Ð°Ñ 3D верÑÐ¸Ñ Ñамых лучших гонок Ð´Ð»Ñ Ð¼Ð¾Ð±Ð¸Ð»ÑŒÐ½Ð¾Ð³Ð¾ Asphalt 4: Elite Racing 3D! Ты до Ñих пор Ñто читаешь? БыÑтро качать, Ñ‚Ñ‹ не должен пропуÑтить Ñту игру! -10 удивительных автомобилей и мотоциклов, Ð²ÐºÐ»ÑŽÑ‡Ð°Ñ Ñ‚Ð°ÐºÐ¸Ðµ, как Bugatti Veyron, Ferrari F430 Spider, Nissan GT-R, Ducati 1098. -ГонÑй по шеÑти Ñамых горÑчих городам мира: Монте Карло, Беверли Хиллз, Шангай, Ðью-Йорк, Дюбай, Париж. -Ð£Ð´Ð¸Ð²Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ 3D графика проÑто захватывает дыхание! -Тюнингуй Ñвой автомобиль в ÑобÑтвенном гараже! -ÐаÑлаждайÑÑ Ð¿Ñтью различными режимами игры, такими как преÑледование, дрифт, и Ñбей вÑех. -ÐдаптируйÑÑ Ð² различных погодных уÑловиÑÑ…: Ñможешь ли управлÑÑ‚ÑŒ Ñвоим автомобилем в ливень? Уменьшено до 63%
640 x 360 (61,07 килобайт)
Уменьшено до 63%
640 x 360 (63,54 килобайт)
Asphalt4.zip ( 684,82 килобайт )И верÑÐ¸Ñ Asphalt 4 HD в sis Уменьшено до 63%
639 x 359 (72,42 килобайт)
Уменьшено до 63%
639 x 359 (60,41 килобайт)
Asphalt4.N5800.Unsigned.zip ( 5,35 мегабайт ) ]]>
-
-
-
-
-
- Игры Ð´Ð»Ñ Nokia 5800 и Nokia N97: 3D Fast and Furious
-
- tag:touchnokia.ru,2009://1.809
-
- 2009-05-30T19:47:24Z
- 2009-05-31T08:24:20Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 3D Fast and Furious Разработчик: i-playЯзык: ÐнглийÑкийСоÑтоÑние: ОпиÑание: Ð¥Ð¾Ñ€Ð¾ÑˆÐ°Ñ Ð¸Ð³Ñ€Ð° Ñ 3D графикой Уменьшено до 50%
360 x 640 (34,56 килобайт)
Уменьшено до 50%
360 x 640 (44,54 килобайт)
3D_Fast_and_Furious_n5800.zip ( 675,77 килобайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: Kaspersky Mobile Security 8.0 release v.8.0.51 - Обновление
-
- tag:touchnokia.ru,2009://1.807
-
- 2009-05-30T19:16:24Z
- 2009-05-30T19:46:19Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Kaspersky Mobile Security 8.0 release v.8.0.51 Разработчик: ЗÐО "Ð›Ð°Ð±Ð¾Ñ€Ð°Ñ‚Ð¾Ñ€Ð¸Ñ ÐšÐ°ÑперÑкого"Язык: РуÑÑкийСоÑтоÑние: Free (дейÑтвительна до 1.02.2048 года) ОпиÑание: ÐÐ½Ñ‚Ð¸Ð²Ð¸Ñ€ÑƒÑ ÐšÐ°ÑперÑкого® Mobile Ñто удобное и надежное решение Ð´Ð»Ñ Ð·Ð°Ñ‰Ð¸Ñ‚Ñ‹ Ñмартфонов под управлением Symbian OS и Windows Mobile от вредоноÑных программ Ð´Ð»Ñ Ð¼Ð¾Ð±Ð¸Ð»ÑŒÐ½Ñ‹Ñ… платформ. ПреимущеÑтва Защита от мобильных вируÑов в режиме реального времени Защита от Ñпама в SMS/EMS/MMS Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ черных и белых ÑпиÑков ÐÐµÐ·Ð°Ð¼ÐµÑ‚Ð½Ð°Ñ Ñ€Ð°Ð±Ð¾Ñ‚Ð° в фоновом режиме ÐвтоматичеÑкое обновление антивируÑных баз ОÑновные функции ÐнтивируÑÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° вÑтроенной памÑти телефона, а также карт памÑти по требованию Проверка вÑех входÑщих или модифицируемых объектов без учаÑÑ‚Ð¸Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ ÐŸÐ¾Ð»Ð½Ð°Ñ Ð°Ð½Ñ‚Ð¸Ð²Ð¸Ñ€ÑƒÑÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° по раÑпиÑанию в удобное Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð²Ñ€ÐµÐ¼Ñ Ð‘ÐµÑÐ¿Ð»Ð°Ñ‚Ð½Ð°Ñ Ñ‚ÐµÑ…Ð½Ð¸Ñ‡ÐµÑÐºÐ°Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶ÐºÐ° по вопроÑам уÑтановки, активации, наÑтройки и ÑкÑплуатации продукта Уменьшено до 50%
360 x 640 (42,68 килобайт)
Уменьшено до 50%
360 x 640 (69,83 килобайт)
kaspersky_mobil.zip ( 885,88 килобайт ) - в архиве находитÑÑ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð½Ð° английÑком по региÑтрации программы ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: Dudu Recorder v.3.20
-
- tag:touchnokia.ru,2009://1.806
-
- 2009-05-30T18:37:40Z
- 2009-05-30T18:39:45Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Dudu Recorder v.3.20 Разработчик: HongDi TechnologyЯзык: English СоÑтоÑние: Free (Ð´Ð»Ñ Ñ€ÐµÐ³Ð¸Ñтрации вводим 12090704 ОпиÑание: Маленький диктофон, имеющий функции: запиÑывает в формате ÐÐœR, умеет Ñортировать запиÑи, еÑÑ‚ÑŒ авто-Ñтарт, Ð½ÐµÐ¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð½Ð°Ñ Ð´Ð»Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾ÑÑ‚ÑŒ запиÑи (завиÑит от количеÑтва Ñвободного меÑта на диÑке), Ñкрывает запиÑи, выбор памÑти Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи. ЕÑли при уÑтановке выдает ошибка Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑƒÐ´Ð°Ð»Ð¸Ñ‚Ðµ предыдущие верÑии!Ð˜Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð² v.3.20: -Добавлена защита паролем -горÑчии клавиши -Ñкрытый режим (Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð½Ðµ видно в ÑпиÑке запущенных) Уменьшено до 50%
360 x 640 (49,76 килобайт)
Уменьшено до 50%
360 x 640 (50,93 килобайт)
dudu320.zip ( 114,99 килобайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: MyPhone v.2.11 - Обновление
-
- tag:touchnokia.ru,2009://1.805
-
- 2009-05-30T17:58:41Z
- 2009-05-30T17:59:38Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- MyPhone v.2.11 Разработчик: immmoooЯзык: ÐнглийÑкийСоÑтоÑние: TrialОпиÑание: ÐмулÑтор интерфейÑа iPhone на 5800 Уменьшено до 50%
360 x 640 (32,26 килобайт)
Уменьшено до 50%
360 x 640 (63,16 килобайт)
myphone.zip ( 410,09 килобайт ) ОбÑуждение ведем здеÑÑŒ ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: Eye Call v.1.04
-
- tag:touchnokia.ru,2009://1.804
-
- 2009-05-30T17:47:01Z
- 2009-05-30T17:58:12Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Eye Call v.1.04 Разработчик: EyeSight.Язык: ÐнглийÑкийСоÑтоÑние: FreeОпиÑание: Бывает так что кто-то звонит, а звук так неумеÑтен, а Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ вот Ñтой программы доÑтаточно махнуть рукой и телефон заглушит его. Также можно наÑтроить что-бы на 2-ой взмах отÑылалоÑÑŒ SMS звонÑщему.[*] ЕÑли у Ð²Ð°Ñ Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½ Ñмартфон то качаем и Ñтавим, еÑли нет то файл необходимо подпиÑать Уменьшено до 50%
360 x 640 (30,56 килобайт)
Уменьшено до 50%
360 x 640 (47,39 килобайт)
eyesight.zip ( 1,34 мегабайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: OVI Store
-
- tag:touchnokia.ru,2009://1.803
-
- 2009-05-28T19:57:54Z
- 2009-05-28T19:59:03Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- OVI Store Разработчик: Язык: РуÑÑкийСоÑтоÑние: FreeОпиÑание: Программа Ð´Ð»Ñ ÑÐºÐ°Ñ‡Ð¸Ð²Ð°Ð½Ð¸Ñ ÐºÐ¾Ð½Ñ‚ÐµÐ½Ñ‚Ð° из Nokia OVI Store (программы, игры, картинки и Ñ‚.д) прÑмо на телефоне. ПриÑутÑтвует возможноÑÑ‚ÑŒ фильтрации предÑтавленного - только беÑплатные. Уменьшено до 50%
360 x 640 (61,05 килобайт)
Уменьшено до 50%
360 x 640 (25,44 килобайт)
OVIStore.zip ( 616,74 килобайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: Talkonaut v.5.51.68
-
- tag:touchnokia.ru,2009://1.802
-
- 2009-05-28T19:16:28Z
- 2009-05-28T19:18:50Z
-
- Talkonaut v.5.51.68Разработчик: talkonaut.comЯзык: РуÑÑкийСоÑтоÑние: FreeОпиÑание: Talkonaut - Ñто беÑплатный GoogleTalk/Jabber IM клиент Ñ Ð³Ð¾Ð»Ð¾Ñовыми функциÑми.Функции:беÑплатные звонки на GTalk, MSN Messenger, Yahoo, SIP;отправка и получение Ñообщений из: GTalk, Jabber, ICQ, MSN, AIM и Yahoo;дешевые звонки через GTalk;беÑплатные звонки Ñ Ð²Ð°ÑˆÐµÐ³Ð¾ ÑущеÑтвующего...
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Talkonaut v.5.51.68 Разработчик : talkonaut.comЯзык : РуÑÑкийСоÑтоÑние : FreeОпиÑание : Talkonaut - Ñто беÑплатный GoogleTalk/Jabber IM клиент Ñ Ð³Ð¾Ð»Ð¾Ñовыми функциÑми.Функции: беÑплатные звонки на GTalk, MSN Messenger, Yahoo, SIP; отправка и получение Ñообщений из: GTalk, Jabber, ICQ, MSN, AIM и Yahoo; дешевые звонки через GTalk; беÑплатные звонки Ñ Ð²Ð°ÑˆÐµÐ³Ð¾ ÑущеÑтвующего SIP-Ñчета; наличие Ñмайликов, Ñигналов, передачи файлов через Jabber; поддержка Jabber Service Discovery Ð´Ð»Ñ Ñ€Ð°Ð±Ð¾Ñ‚Ñ‹ Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼Ð¸ IM клиентами, поиÑк контактов ICQ и отправка SMS Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ Mail.RU Agent; поддержка ÑÐ¶Ð°Ñ‚Ð¸Ñ Ñ‚Ñ€Ð°Ñ„Ð¸ÐºÐ° в 5-10 раз. Что нового: Добавлено раÑширение (PNG, JPG или GIF) к имени файла при Ñохранении аватара. Уменьшено до 50%
360 x 640 (74,56 килобайт)
Уменьшено до 50%
360 x 640 (34,58 килобайт)
talkonaut.zip ( 1,29 мегабайт ) ]]>
-
-
-
-
-
- Программы Ð´Ð»Ñ Nokia 5800 и Nokia N97: MobileAgent v.1.51
-
- tag:touchnokia.ru,2009://1.801
-
- 2009-05-27T20:38:05Z
- 2009-05-28T19:27:55Z
-
-
-
- iPod
- http://TouchNokia.ru
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- MobileAgent v.1.51
Разработчик: mail.ruЯзык: РуÑÑкийСоÑтоÑние: FreeОпиÑание: - ÐžÐ´Ð½Ð¾Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð°Ñ Ñ€Ð°Ð±Ð¾Ñ‚Ð° в агенте, аÑьке и Jabber/GTalk - передача файлов - анимированные Ñмайлы - доÑтуп к почте и многое другое...ИзменениÑ: - Ð°Ð½Ð¸Ð¼Ð°Ñ†Ð¸Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñа; - поддержка ÑиÑтемных Ñкинов; - поддержка графичеÑких Ñкинов; - поддержка Jabber/GTalk/Я.Онлайн/Live Journal и Ñ‚.п. - раÑширенный поиÑк по Ðгенту; - автоÑтатуÑÑ‹ и автоответчик; - работа Ñ ÐºÐ°Ñ€Ñ‚Ð°Ð¼Ð¸ (пока МоÑква Питер детально, вÑÑ Ð Ð¾ÑÑÐ¸Ñ Ð² небольшом зуме); - возможноÑÑ‚ÑŒ заранее загрузить файлы кÑша МоÑквы и Питера; - показ пробок; - поиÑк по карте; - позиционирование по GPS или Ñотовым вышкам; - отправка Ñвоего меÑÑ‚Ð¾Ð¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ ÑобеÑедникам; - проÑмотр чужих локаций; - виджет на рабочий Ñтол Ð´Ð»Ñ Ð±Ñ‹Ñтрого доÑтупа к данным о погоде, пробках и валюте. Уменьшено до 50%
360 x 640 (45,85 килобайт)
Уменьшено до 50%
360 x 640 (48,64 килобайт)
- mobileagent.zip ( 1,36 мегабайт )]]>
-
-
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_background_repeat_repeat_x.xml b/lib/feedparser/tests/wellformed/sanitize/style_background_repeat_repeat_x.xml
deleted file mode 100644
index a18a4ed2..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_background_repeat_repeat_x.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="background-repeat: repeat-x;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_background_url.xml b/lib/feedparser/tests/wellformed/sanitize/style_background_url.xml
deleted file mode 100644
index 6a11b45b..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_background_url.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="background-color: #000; background-image: url(/category/images/cafeneon_small.jpg); background-repeat: no-repeat;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_background_yellow.xml b/lib/feedparser/tests/wellformed/sanitize/style_background_yellow.xml
deleted file mode 100644
index a96c7fe3..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_background_yellow.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="background: yellow;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_0.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_0.xml
deleted file mode 100644
index 082aeb47..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border: 0;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_1px_solid_rgb_0_0_0_.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_1px_solid_rgb_0_0_0_.xml
deleted file mode 100644
index ead5a4df..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_1px_solid_rgb_0_0_0_.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border: 1px solid rgb(0, 0, 0);">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_3px_solid_ccc.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_3px_solid_ccc.xml
deleted file mode 100644
index 211b47f1..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_3px_solid_ccc.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border: 3px solid #ccc;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_0pt.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_0pt.xml
deleted file mode 100644
index 0a615704..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_0pt.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-bottom: 0pt;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_dashed.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_dashed.xml
deleted file mode 100644
index 3938ecca..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_dashed.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-bottom: dashed;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_dotted.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_dotted.xml
deleted file mode 100644
index 6f22f48f..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_bottom_dotted.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-bottom: dotted;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_collapse_collapse.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_collapse_collapse.xml
deleted file mode 100644
index 6e6d62e9..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_collapse_collapse.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-collapse: collapse;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_left_0pt.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_left_0pt.xml
deleted file mode 100644
index 3f00fe00..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_left_0pt.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-left: 0pt;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_medium_none_.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_medium_none_.xml
deleted file mode 100644
index bea1e643..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_medium_none_.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border: medium none ;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_none_important.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_none_important.xml
deleted file mode 100644
index 1b9dfa94..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_none_important.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border: none !important;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_right_0pt.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_right_0pt.xml
deleted file mode 100644
index d9066f52..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_right_0pt.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-right: 0pt;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_solid_2px_000000.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_solid_2px_000000.xml
deleted file mode 100644
index 2fc5f4b6..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_solid_2px_000000.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border: solid 2px #000000;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_border_top_0pt.xml b/lib/feedparser/tests/wellformed/sanitize/style_border_top_0pt.xml
deleted file mode 100644
index d1294711..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_border_top_0pt.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="border-top: 0pt;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_clear_both.xml b/lib/feedparser/tests/wellformed/sanitize/style_clear_both.xml
deleted file mode 100644
index 2efb73de..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_clear_both.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="clear: both;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_000080.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_000080.xml
deleted file mode 100644
index a03fde1e..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_000080.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: #000080;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_008.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_008.xml
deleted file mode 100644
index 4f412b30..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_008.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: #008;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_999999.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_999999.xml
deleted file mode 100644
index e741bfa7..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_999999.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: #999999;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_blue.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_blue.xml
deleted file mode 100644
index 06eb8fe7..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_blue.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: blue;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_maroon.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_maroon.xml
deleted file mode 100644
index 42649ebc..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_maroon.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: maroon;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_red.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_red.xml
deleted file mode 100644
index 6cacbbec..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_red.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: red;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_rgb_0_128_0_.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_rgb_0_128_0_.xml
deleted file mode 100644
index b269a17f..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_rgb_0_128_0_.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: rgb(0, 128, 0);">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_color_teal.xml b/lib/feedparser/tests/wellformed/sanitize/style_color_teal.xml
deleted file mode 100644
index b5afc8f1..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_color_teal.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="color: teal;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_cursor_pointer.xml b/lib/feedparser/tests/wellformed/sanitize/style_cursor_pointer.xml
deleted file mode 100644
index ee69ca17..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_cursor_pointer.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="cursor: pointer;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_display_block.xml b/lib/feedparser/tests/wellformed/sanitize/style_display_block.xml
deleted file mode 100644
index 95642617..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_display_block.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="display: block;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_float_left.xml b/lib/feedparser/tests/wellformed/sanitize/style_float_left.xml
deleted file mode 100644
index b9e11fa7..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_float_left.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="float: left;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_float_right.xml b/lib/feedparser/tests/wellformed/sanitize/style_float_right.xml
deleted file mode 100644
index eb4009cf..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_float_right.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="float: right;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_family__comic_sans_ms.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_family__comic_sans_ms.xml
deleted file mode 100644
index 3ae327dc..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_family__comic_sans_ms.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-family : Comic Sans MS;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_family_arial_sans_serif.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_family_arial_sans_serif.xml
deleted file mode 100644
index 5b21c424..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_family_arial_sans_serif.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-family: arial,sans-serif;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_family_lucida_console_.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_family_lucida_console_.xml
deleted file mode 100644
index a8a6fe5f..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_family_lucida_console_.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-family: 'Lucida Console';">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_family_symbol.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_family_symbol.xml
deleted file mode 100644
index 0221e7a8..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_family_symbol.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-family: Symbol;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_size_0_9em.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_size_0_9em.xml
deleted file mode 100644
index 4b658fcf..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_size_0_9em.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-size: 0.9em;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_size_10pt.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_size_10pt.xml
deleted file mode 100644
index 27a79943..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_size_10pt.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-size: 10pt;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_size_10px.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_size_10px.xml
deleted file mode 100644
index ba00024a..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_size_10px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-size: 10px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_size_smaller.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_size_smaller.xml
deleted file mode 100644
index 04bf3641..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_size_smaller.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-size: smaller;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_style_italic.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_style_italic.xml
deleted file mode 100644
index 31aa7539..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_style_italic.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-style: italic;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_font_weight_bold.xml b/lib/feedparser/tests/wellformed/sanitize/style_font_weight_bold.xml
deleted file mode 100644
index f63a008f..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_font_weight_bold.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="font-weight: bold;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_height_100px.xml b/lib/feedparser/tests/wellformed/sanitize/style_height_100px.xml
deleted file mode 100644
index bba108a5..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_height_100px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="height: 100px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_height_2px.xml b/lib/feedparser/tests/wellformed/sanitize/style_height_2px.xml
deleted file mode 100644
index 4b229161..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_height_2px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="height: 2px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_letter_spacing_1px.xml b/lib/feedparser/tests/wellformed/sanitize/style_letter_spacing_1px.xml
deleted file mode 100644
index 23e7b90c..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_letter_spacing_1px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="letter-spacing:1px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_line_height_normal.xml b/lib/feedparser/tests/wellformed/sanitize/style_line_height_normal.xml
deleted file mode 100644
index 1637198b..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_line_height_normal.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="line-height: normal;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_0.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_0.xml
deleted file mode 100644
index 572664ff..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin: 0;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_0_15px_0_0.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_0_15px_0_0.xml
deleted file mode 100644
index 0ff41b46..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_0_15px_0_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin: 0 15px 0 0;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_0px_important.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_0px_important.xml
deleted file mode 100644
index 2c9fb9cd..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_0px_important.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin: 0px !important;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_5px.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_5px.xml
deleted file mode 100644
index e64fcfff..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_5px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin: 5px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_99999em.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_99999em.xml
deleted file mode 100644
index 09e9b2b7..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_99999em.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin: 99999em;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_bottom_0pt.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_bottom_0pt.xml
deleted file mode 100644
index 2b7f4a71..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_bottom_0pt.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin-bottom: 0pt;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_bottom_10px.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_bottom_10px.xml
deleted file mode 100644
index ab6801a8..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_bottom_10px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin-bottom: 10px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_left_5px.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_left_5px.xml
deleted file mode 100644
index be252c76..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_left_5px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin-left: 5px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_right_0px.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_right_0px.xml
deleted file mode 100644
index 14fc788c..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_right_0px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin-right: 0px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_top_0in.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_top_0in.xml
deleted file mode 100644
index 906cbb55..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_top_0in.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin-top: 0in;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_margin_top_10px.xml b/lib/feedparser/tests/wellformed/sanitize/style_margin_top_10px.xml
deleted file mode 100644
index 78fb3c85..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_margin_top_10px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="margin-top: 10px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_moz_background_clip_initial.xml b/lib/feedparser/tests/wellformed/sanitize/style_moz_background_clip_initial.xml
deleted file mode 100644
index f4ab5068..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_moz_background_clip_initial.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<tr style="background: yellow none repeat scroll 0%; -moz-background-clip: initial; -moz-background-origin: initial; -moz-background-inline-policy: initial;">Some text</tr>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_ansi_language_nl.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_ansi_language_nl.xml
deleted file mode 100644
index 3fea651e..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_ansi_language_nl.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-ansi-language: NL;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_bidi_font_weight_normal.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_bidi_font_weight_normal.xml
deleted file mode 100644
index d075aa0d..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_bidi_font_weight_normal.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-bidi-font-weight: normal;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_highlight_yellow.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_highlight_yellow.xml
deleted file mode 100644
index b44c6dc4..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_highlight_yellow.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-highlight: yellow;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_layout_grid_align_none.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_layout_grid_align_none.xml
deleted file mode 100644
index 275bfcae..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_layout_grid_align_none.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-layout-grid-align: none;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_list_l0_level1_lfo1.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_list_l0_level1_lfo1.xml
deleted file mode 100644
index d743dd10..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_list_l0_level1_lfo1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-list: l0 level1 lfo1;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_no_proof_yes.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_no_proof_yes.xml
deleted file mode 100644
index 8a9f5b1c..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_no_proof_yes.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-no-proof: yes;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_spacerun_yes.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_spacerun_yes.xml
deleted file mode 100644
index a7ed12c6..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_spacerun_yes.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-spacerun: yes;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_mso_tab_count_3.xml b/lib/feedparser/tests/wellformed/sanitize/style_mso_tab_count_3.xml
deleted file mode 100644
index 665118d1..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_mso_tab_count_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="mso-tab-count: 3;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_overflow_auto.xml b/lib/feedparser/tests/wellformed/sanitize/style_overflow_auto.xml
deleted file mode 100644
index bc347ec5..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_overflow_auto.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="overflow: auto;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_padding_0.xml b/lib/feedparser/tests/wellformed/sanitize/style_padding_0.xml
deleted file mode 100644
index 7f3ee3fc..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_padding_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="padding: 0;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_padding_0_0_12px_12px.xml b/lib/feedparser/tests/wellformed/sanitize/style_padding_0_0_12px_12px.xml
deleted file mode 100644
index 4a4310cd..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_padding_0_0_12px_12px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="padding: 0 0 12px 12px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_padding_2ex.xml b/lib/feedparser/tests/wellformed/sanitize/style_padding_2ex.xml
deleted file mode 100644
index 6dc1b5b3..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_padding_2ex.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="padding: 2ex;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_padding_99999em.xml b/lib/feedparser/tests/wellformed/sanitize/style_padding_99999em.xml
deleted file mode 100644
index eca4144c..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_padding_99999em.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="padding: 99999em;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_padding_left_4px.xml b/lib/feedparser/tests/wellformed/sanitize/style_padding_left_4px.xml
deleted file mode 100644
index 7cbe16ef..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_padding_left_4px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="padding-left: 4px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_padding_right_0in.xml b/lib/feedparser/tests/wellformed/sanitize/style_padding_right_0in.xml
deleted file mode 100644
index 1077f170..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_padding_right_0in.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="padding-right: 0in;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_position_absolute.xml b/lib/feedparser/tests/wellformed/sanitize/style_position_absolute.xml
deleted file mode 100644
index 4b436e00..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_position_absolute.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="position: absolute;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_tab_stops_list_5in.xml b/lib/feedparser/tests/wellformed/sanitize/style_tab_stops_list_5in.xml
deleted file mode 100644
index 6bd2c9ab..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_tab_stops_list_5in.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="tab-stops: list .5in;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_text_align_center.xml b/lib/feedparser/tests/wellformed/sanitize/style_text_align_center.xml
deleted file mode 100644
index f3055eae..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_text_align_center.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="text-align: center;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_text_align_left.xml b/lib/feedparser/tests/wellformed/sanitize/style_text_align_left.xml
deleted file mode 100644
index cc9b3e52..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_text_align_left.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="text-align: left;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_text_align_right.xml b/lib/feedparser/tests/wellformed/sanitize/style_text_align_right.xml
deleted file mode 100644
index aaa3c757..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_text_align_right.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="text-align: right;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_text_decoration_underline.xml b/lib/feedparser/tests/wellformed/sanitize/style_text_decoration_underline.xml
deleted file mode 100644
index b2461ecd..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_text_decoration_underline.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="text-decoration: underline;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_text_indent_0_5in.xml b/lib/feedparser/tests/wellformed/sanitize/style_text_indent_0_5in.xml
deleted file mode 100644
index 87d1cacd..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_text_indent_0_5in.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="TEXT-INDENT: 0.5in;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_vertical_align_bottom.xml b/lib/feedparser/tests/wellformed/sanitize/style_vertical_align_bottom.xml
deleted file mode 100644
index 9c3da9f0..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_vertical_align_bottom.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="vertical-align: bottom;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_vertical_align_top.xml b/lib/feedparser/tests/wellformed/sanitize/style_vertical_align_top.xml
deleted file mode 100644
index 55b45cc1..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_vertical_align_top.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="vertical-align: top;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_white_space_nowrap.xml b/lib/feedparser/tests/wellformed/sanitize/style_white_space_nowrap.xml
deleted file mode 100644
index 61e0c04d..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_white_space_nowrap.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="white-space: nowrap;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_white_space_top.xml b/lib/feedparser/tests/wellformed/sanitize/style_white_space_top.xml
deleted file mode 100644
index c48d32a8..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_white_space_top.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="white-space: top;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/style_width_300px.xml b/lib/feedparser/tests/wellformed/sanitize/style_width_300px.xml
deleted file mode 100644
index f7ae6d70..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/style_width_300px.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
--
-
<span style="width: 300px;">Some text</span>
-
-
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/xml_declaration_unexpected_character.xml b/lib/feedparser/tests/wellformed/sanitize/xml_declaration_unexpected_character.xml
deleted file mode 100644
index 089dceae..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/xml_declaration_unexpected_character.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
- <!DOCTYPE ~
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/xml_malicious_comment.xml b/lib/feedparser/tests/wellformed/sanitize/xml_malicious_comment.xml
deleted file mode 100644
index db79e86c..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/xml_malicious_comment.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
- sa<!-- -- nonwhitespace >fe<script>alert(1);</script>
-
diff --git a/lib/feedparser/tests/wellformed/sanitize/xml_unclosed_comment.xml b/lib/feedparser/tests/wellformed/sanitize/xml_unclosed_comment.xml
deleted file mode 100644
index 75a64279..00000000
--- a/lib/feedparser/tests/wellformed/sanitize/xml_unclosed_comment.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
- safe<!--
-
diff --git a/lib/feedparser/tests/wellformed/sgml/charref_uppercase_x.xml b/lib/feedparser/tests/wellformed/sgml/charref_uppercase_x.xml
deleted file mode 100644
index 51a13b73..00000000
--- a/lib/feedparser/tests/wellformed/sgml/charref_uppercase_x.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
- a
-
-
diff --git a/lib/feedparser/tests/wellformed/xml/empty_xmlns_uri.xml b/lib/feedparser/tests/wellformed/xml/empty_xmlns_uri.xml
deleted file mode 100644
index da68ece2..00000000
--- a/lib/feedparser/tests/wellformed/xml/empty_xmlns_uri.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
- empty
-
-
diff --git a/lib/feedparser/tests/wellformed/xml/escaped_apos.xml b/lib/feedparser/tests/wellformed/xml/escaped_apos.xml
deleted file mode 100644
index 6340ca9b..00000000
--- a/lib/feedparser/tests/wellformed/xml/escaped_apos.xml
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
- it's correct
-
-
diff --git a/lib/feedparser/tests/wellformed/xml/xlink_ns_no_prefix.xml b/lib/feedparser/tests/wellformed/xml/xlink_ns_no_prefix.xml
deleted file mode 100644
index a0c376cd..00000000
--- a/lib/feedparser/tests/wellformed/xml/xlink_ns_no_prefix.xml
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
- xlink
-
-
diff --git a/lib/jsonrpclib/SimpleJSONRPCServer.py b/lib/jsonrpclib/SimpleJSONRPCServer.py
index e4b20afc..d76da73e 100644
--- a/lib/jsonrpclib/SimpleJSONRPCServer.py
+++ b/lib/jsonrpclib/SimpleJSONRPCServer.py
@@ -1,6 +1,6 @@
-import lib.jsonrpclib
-from lib.jsonrpclib import Fault
-from lib.jsonrpclib.jsonrpc import USE_UNIX_SOCKETS
+import jsonrpclib
+from jsonrpclib import Fault
+from jsonrpclib.jsonrpc import USE_UNIX_SOCKETS
import SimpleXMLRPCServer
import SocketServer
import socket
diff --git a/lib/jsonrpclib/__init__.py b/lib/jsonrpclib/__init__.py
index 92c29b39..6e884b83 100644
--- a/lib/jsonrpclib/__init__.py
+++ b/lib/jsonrpclib/__init__.py
@@ -1,7 +1,6 @@
-from config import Config
+from jsonrpclib.config import Config
config = Config.instance()
-from history import History
+from jsonrpclib.history import History
history = History.instance()
-import jsonrpc
-from jsonrpc import Server, MultiCall, Fault
-from jsonrpc import ProtocolError, loads, dumps
+from jsonrpclib.jsonrpc import Server, MultiCall, Fault
+from jsonrpclib.jsonrpc import ProtocolError, loads, dumps
diff --git a/lib/jsonrpclib/jsonclass.py b/lib/jsonrpclib/jsonclass.py
index 519bdec7..1d86d5fc 100644
--- a/lib/jsonrpclib/jsonclass.py
+++ b/lib/jsonrpclib/jsonclass.py
@@ -3,7 +3,7 @@ import inspect
import re
import traceback
-from lib.jsonrpclib import config
+from jsonrpclib import config
iter_types = [
types.DictType,
@@ -129,6 +129,13 @@ def load(obj):
except ImportError:
raise TranslationError('Could not import %s from module %s.' %
(json_class_name, json_module_tree))
+
+ # The returned class is the top-level module, not the one we really
+ # want. (E.g., if we import a.b.c, we now have a.) Walk through other
+ # path components to get to b and c.
+ for i in json_module_parts[1:]:
+ temp_module = getattr(temp_module, i)
+
json_class = getattr(temp_module, json_class_name)
# Creating the object...
new_obj = None
diff --git a/lib/jsonrpclib/jsonrpc.py b/lib/jsonrpclib/jsonrpc.py
index a4d4c33a..5bde5510 100644
--- a/lib/jsonrpclib/jsonrpc.py
+++ b/lib/jsonrpclib/jsonrpc.py
@@ -57,9 +57,9 @@ import string
import random
# Library includes
-import lib.jsonrpclib
-from lib.jsonrpclib import config
-from lib.jsonrpclib import history
+import jsonrpclib
+from jsonrpclib import config
+from jsonrpclib import history
# JSON library importing
cjson = None
@@ -71,7 +71,7 @@ except ImportError:
import json
except ImportError:
try:
- import lib.simplejson as json
+ import simplejson as json
except ImportError:
raise ImportError(
'You must have the cjson, json, or simplejson ' +
@@ -148,10 +148,15 @@ class JSONTarget(object):
return ''.join(self.data)
class Transport(TransportMixIn, XMLTransport):
- pass
+ def __init__(self):
+ TransportMixIn.__init__(self)
+ XMLTransport.__init__(self)
class SafeTransport(TransportMixIn, XMLSafeTransport):
- pass
+ def __init__(self):
+ TransportMixIn.__init__(self)
+ XMLSafeTransport.__init__(self)
+
from httplib import HTTP, HTTPConnection
from socket import socket
@@ -481,7 +486,7 @@ def dumps(params=[], methodname=None, methodresponse=None,
raise ValueError('Method name must be a string, or methodresponse '+
'must be set to True.')
if config.use_jsonclass == True:
- from lib.jsonrpclib import jsonclass
+ from jsonrpclib import jsonclass
params = jsonclass.dump(params)
if methodresponse is True:
if rpcid is None:
@@ -509,7 +514,7 @@ def loads(data):
# should return something like the following:
# { 'jsonrpc':'2.0', 'error': fault.error(), id: None }
if config.use_jsonclass == True:
- from lib.jsonrpclib import jsonclass
+ from jsonrpclib import jsonclass
result = jsonclass.load(result)
return result
diff --git a/lib/pynma/__init__.py b/lib/pynma/__init__.py
index f90424eb..f884b504 100644
--- a/lib/pynma/__init__.py
+++ b/lib/pynma/__init__.py
@@ -1,4 +1,5 @@
#!/usr/bin/python
-from pynma import PyNMA
+__version__ = '1.01'
+from .pynma import PyNMA
diff --git a/lib/pynma/pynma.py b/lib/pynma/pynma.py
index fc7d8de2..e735eddd 100644
--- a/lib/pynma/pynma.py
+++ b/lib/pynma/pynma.py
@@ -1,137 +1,151 @@
#!/usr/bin/python
+from . import __version__
from xml.dom.minidom import parseString
-from httplib import HTTPSConnection
-from urllib import urlencode
-__version__ = "0.1"
+import requests
-API_SERVER = 'nma.usk.bz'
-ADD_PATH = '/publicapi/notify'
-
-USER_AGENT="PyNMA/v%s"%__version__
-
-def uniq_preserve(seq): # Dave Kirby
- # Order preserving
- seen = set()
- return [x for x in seq if x not in seen and not seen.add(x)]
-
-def uniq(seq):
- # Not order preserving
- return {}.fromkeys(seq).keys()
class PyNMA(object):
- """PyNMA(apikey=[], developerkey=None)
+ """
+ http://www.notifymyandroid.com/api.jsp
+ PyNMA(apikey=None, developerkey=None)
takes 2 optional arguments:
- - (opt) apykey: might me a string containing 1 key or an array of keys
+ - (opt) apykey: a string containing 1 key or an array of keys
- (opt) developerkey: where you can store your developer key
"""
- def __init__(self, apikey=[], developerkey=None):
+ def __init__(self, apikey=None, developerkey=None):
+
self._developerkey = None
self.developerkey(developerkey)
+
+ self.api_server = 'https://www.notifymyandroid.com'
+ self.add_path = '/publicapi/notify'
+ self.user_agent = 'PyNMA/v%s' % __version__
+
+ key = []
if apikey:
- if type(apikey) == str:
- apikey = [apikey]
- self._apikey = uniq(apikey)
+ key = (apikey, [apikey])[str == type(apikey)]
+
+ self._apikey = self.uniq(key)
+
+ @staticmethod
+ def uniq(seq):
+ # Not order preserving
+ return list({}.fromkeys(seq).keys())
def addkey(self, key):
- "Add a key (register ?)"
- if type(key) == str:
- if not key in self._apikey:
+ """
+ Add a key (register ?)
+ """
+ if str == type(key):
+ if key not in self._apikey:
self._apikey.append(key)
- elif type(key) == list:
+
+ elif list == type(key):
for k in key:
- if not k in self._apikey:
+ if k not in self._apikey:
self._apikey.append(k)
def delkey(self, key):
- "Removes a key (unregister ?)"
- if type(key) == str:
+ """
+ Removes a key (unregister ?)
+ """
+ if str == type(key):
if key in self._apikey:
self._apikey.remove(key)
- elif type(key) == list:
+
+ elif list == type(key):
for k in key:
if key in self._apikey:
self._apikey.remove(k)
def developerkey(self, developerkey):
- "Sets the developer key (and check it has the good length)"
- if type(developerkey) == str and len(developerkey) == 48:
+ """
+ Sets the developer key (and check it has the good length)
+ """
+ if str == type(developerkey) and 48 == len(developerkey):
self._developerkey = developerkey
- def push(self, application="", event="", description="", url="", priority=0, batch_mode=False):
- """Pushes a message on the registered API keys.
+ def push(self, application='', event='', description='', url='', content_type=None, priority=0, batch_mode=False, html=False):
+ """
+ Pushes a message on the registered API keys.
takes 5 arguments:
- (req) application: application name [256]
- (req) event: event name [1000]
- (req) description: description [10000]
- (opt) url: url [512]
+ - (opt) contenttype: Content Type (act: None (plain text) or text/html)
- (opt) priority: from -2 (lowest) to 2 (highest) (def:0)
- (opt) batch_mode: call API 5 by 5 (def:False)
+ - (opt) html: shortcut for content_type=text/html
Warning: using batch_mode will return error only if all API keys are bad
- cf: http://nma.usk.bz/api.php
+ http://www.notifymyandroid.com/api.jsp
"""
- datas = {
- 'application': application[:256].encode('utf8'),
- 'event': event[:1024].encode('utf8'),
- 'description': description[:10000].encode('utf8'),
- 'priority': priority
- }
+ datas = {'application': application[:256].encode('utf8'),
+ 'event': event[:1000].encode('utf8'),
+ 'description': description[:10000].encode('utf8'),
+ 'priority': priority}
if url:
- datas['url'] = url[:512]
+ datas['url'] = url[:2000]
if self._developerkey:
datas['developerkey'] = self._developerkey
+ if 'text/html' == content_type or True == html: # Currently only accepted content type
+ datas['content-type'] = 'text/html'
+
results = {}
if not batch_mode:
for key in self._apikey:
datas['apikey'] = key
- res = self.callapi('POST', ADD_PATH, datas)
+ res = self.callapi('POST', self.add_path, datas)
results[key] = res
else:
- for i in range(0, len(self._apikey), 5):
- datas['apikey'] = ",".join(self._apikey[i:i+5])
- res = self.callapi('POST', ADD_PATH, datas)
- results[datas['apikey']] = res
+ datas['apikey'] = ','.join(self._apikey)
+ res = self.callapi('POST', self.add_path, datas)
+ results[datas['apikey']] = res
+
return results
-
+
def callapi(self, method, path, args):
- headers = { 'User-Agent': USER_AGENT }
- if method == "POST":
- headers['Content-type'] = "application/x-www-form-urlencoded"
- http_handler = HTTPSConnection(API_SERVER)
- http_handler.request(method, path, urlencode(args), headers)
- resp = http_handler.getresponse()
+ headers = {'User-Agent': self.user_agent}
+
+ if 'POST' == method:
+ headers['Content-type'] = 'application/x-www-form-urlencoded'
try:
- res = self._parse_reponse(resp.read())
- except Exception, e:
- res = {'type': "pynmaerror",
- 'code': 600,
- 'message': str(e)
- }
+ resp = requests.post('%s:443%s' % (self.api_server, path), data=args, headers=headers).text
+ res = self._parse_response(resp)
+ except Exception as e:
+ res = {'type': 'pynmaerror',
+ 'code': 600,
+ 'message': str(e)}
pass
-
+
return res
- def _parse_reponse(self, response):
+ @staticmethod
+ def _parse_response(response):
+
root = parseString(response).firstChild
+
for elem in root.childNodes:
- if elem.nodeType == elem.TEXT_NODE: continue
- if elem.tagName == 'success':
- res = dict(elem.attributes.items())
- res['message'] = ""
- res['type'] = elem.tagName
+ if elem.TEXT_NODE == elem.nodeType:
+ continue
+
+ if 'success' == elem.tagName:
+ res = dict(list(elem.attributes.items()))
+ res['message'] = ''
+ res['type'] = elem.tagName
return res
- if elem.tagName == 'error':
- res = dict(elem.attributes.items())
+
+ if 'error' == elem.tagName:
+ res = dict(list(elem.attributes.items()))
res['message'] = elem.firstChild.nodeValue
- res['type'] = elem.tagName
+ res['type'] = elem.tagName
return res
-
-
diff --git a/lib/requests/LICENSE b/lib/requests/LICENSE
new file mode 100644
index 00000000..a103fc91
--- /dev/null
+++ b/lib/requests/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2015 Kenneth Reitz
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/lib/requests/NOTICE b/lib/requests/NOTICE
new file mode 100644
index 00000000..f583e47a
--- /dev/null
+++ b/lib/requests/NOTICE
@@ -0,0 +1,54 @@
+Requests includes some vendorized python libraries to ease installation.
+
+Urllib3 License
+===============
+
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt),
+Modifications copyright 2012 Kenneth Reitz.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Chardet License
+===============
+
+This library is free software; you can redistribute it and/or
+modify it under the terms of the GNU Lesser General Public
+License as published by the Free Software Foundation; either
+version 2.1 of the License, or (at your option) any later version.
+
+This library is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+02110-1301 USA
+
+
+CA Bundle License
+=================
+
+This Source Code Form is subject to the terms of the Mozilla Public
+License, v. 2.0. If a copy of the MPL was not distributed with this
+file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/lib/requests/__init__.py b/lib/requests/__init__.py
index 46116bea..d2471284 100644
--- a/lib/requests/__init__.py
+++ b/lib/requests/__init__.py
@@ -6,7 +6,7 @@
# /
"""
-requests HTTP library
+Requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
@@ -42,8 +42,8 @@ is at
.
"""
__title__ = 'requests'
-__version__ = '2.6.2'
-__build__ = 0x020602
+__version__ = '2.7.0'
+__build__ = 0x020700
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Kenneth Reitz'
diff --git a/lib/requests/api.py b/lib/requests/api.py
index 98c92298..d40fa380 100644
--- a/lib/requests/api.py
+++ b/lib/requests/api.py
@@ -55,17 +55,18 @@ def request(method, url, **kwargs):
return response
-def get(url, **kwargs):
+def get(url, params=None, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response ` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
- return request('get', url, **kwargs)
+ return request('get', url, params=params, **kwargs)
def options(url, **kwargs):
diff --git a/lib/requests/auth.py b/lib/requests/auth.py
index 0ff9c298..03c3302a 100644
--- a/lib/requests/auth.py
+++ b/lib/requests/auth.py
@@ -179,7 +179,7 @@ class HTTPDigestAuth(AuthBase):
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
- r.raw.release_conn()
+ r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
diff --git a/lib/requests/cookies.py b/lib/requests/cookies.py
index 1fbc934c..88b478c7 100644
--- a/lib/requests/cookies.py
+++ b/lib/requests/cookies.py
@@ -415,11 +415,14 @@ def morsel_to_cookie(morsel):
expires = None
if morsel['max-age']:
- expires = time.time() + morsel['max-age']
+ try:
+ expires = int(time.time() + int(morsel['max-age']))
+ except ValueError:
+ raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
- expires = time.mktime(
- time.strptime(morsel['expires'], time_template)) - time.timezone
+ expires = int(time.mktime(
+ time.strptime(morsel['expires'], time_template)) - time.timezone)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
diff --git a/lib/requests/models.py b/lib/requests/models.py
index 45b3ea96..7ab21f78 100644
--- a/lib/requests/models.py
+++ b/lib/requests/models.py
@@ -30,7 +30,8 @@ from .utils import (
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
- is_py2, chardet, json, builtin_str, basestring)
+ is_py2, chardet, builtin_str, basestring)
+from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
@@ -42,12 +43,11 @@ REDIRECT_STATI = (
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
+
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
-json_dumps = json.dumps
-
class RequestEncodingMixin(object):
@property
@@ -149,8 +149,7 @@ class RequestEncodingMixin(object):
else:
fdata = fp.read()
- rf = RequestField(name=k, data=fdata,
- filename=fn, headers=fh)
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
@@ -207,17 +206,8 @@ class Request(RequestHooksMixin):
"""
- def __init__(self,
- method=None,
- url=None,
- headers=None,
- files=None,
- data=None,
- params=None,
- auth=None,
- cookies=None,
- hooks=None,
- json=None):
+ def __init__(self, method=None, url=None, headers=None, files=None,
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
@@ -296,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
- data=None, params=None, auth=None, cookies=None, hooks=None,
- json=None):
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
@@ -306,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
+
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
@@ -357,9 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args)
if not scheme:
- raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
- "Perhaps you meant http://{0}?".format(
- to_native_string(url, 'utf8')))
+ error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
+ error = error.format(to_native_string(url, 'utf8'))
+
+ raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
@@ -425,7 +416,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if json is not None:
content_type = 'application/json'
- body = json_dumps(json)
+ body = complexjson.dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
@@ -537,16 +528,8 @@ class Response(object):
"""
__attrs__ = [
- '_content',
- 'status_code',
- 'headers',
- 'url',
- 'history',
- 'encoding',
- 'reason',
- 'cookies',
- 'elapsed',
- 'request',
+ '_content', 'status_code', 'headers', 'url', 'history',
+ 'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
@@ -666,9 +649,10 @@ class Response(object):
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
+
def generate():
- try:
- # Special case for urllib3.
+ # Special case for urllib3.
+ if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
@@ -678,7 +662,7 @@ class Response(object):
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
- except AttributeError:
+ else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
@@ -809,14 +793,16 @@ class Response(object):
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
- return json.loads(self.content.decode(encoding), **kwargs)
+ return complexjson.loads(
+ self.content.decode(encoding), **kwargs
+ )
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
- return json.loads(self.text, **kwargs)
+ return complexjson.loads(self.text, **kwargs)
@property
def links(self):
@@ -856,4 +842,7 @@ class Response(object):
*Note: Should not normally need to be called explicitly.*
"""
+ if not self._content_consumed:
+ return self.raw.close()
+
return self.raw.release_conn()
diff --git a/lib/requests/packages/urllib3/__init__.py b/lib/requests/packages/urllib3/__init__.py
index 333060c2..f48ac4af 100644
--- a/lib/requests/packages/urllib3/__init__.py
+++ b/lib/requests/packages/urllib3/__init__.py
@@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
-__version__ = '1.10.3'
+__version__ = '1.10.4'
from .connectionpool import (
@@ -57,9 +57,10 @@ del NullHandler
import warnings
# SecurityWarning's always go off by default.
-warnings.simplefilter('always', exceptions.SecurityWarning)
+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
-warnings.simplefilter('default', exceptions.InsecurePlatformWarning)
+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
+ append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
diff --git a/lib/requests/packages/urllib3/response.py b/lib/requests/packages/urllib3/response.py
index f1ea9bb5..24140c4c 100644
--- a/lib/requests/packages/urllib3/response.py
+++ b/lib/requests/packages/urllib3/response.py
@@ -126,14 +126,15 @@ class HTTPResponse(io.IOBase):
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
- tr_enc = self.headers.get('transfer-encoding', '')
- if tr_enc.lower() == "chunked":
+ tr_enc = self.headers.get('transfer-encoding', '').lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
self.chunked = True
# We certainly don't want to preload content when the response is chunked.
- if not self.chunked:
- if preload_content and not self._body:
- self._body = self.read(decode_content=decode_content)
+ if not self.chunked and preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
@@ -179,9 +180,8 @@ class HTTPResponse(io.IOBase):
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
- if self._decoder is None:
- if content_encoding in self.CONTENT_DECODERS:
- self._decoder = _get_decoder(content_encoding)
+ if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
def _decode(self, data, decode_content, flush_decoder):
"""
@@ -299,10 +299,9 @@ class HTTPResponse(io.IOBase):
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
- self._init_decoder()
if self.chunked:
- for line in self.read_chunked(amt):
- yield self._decode(line, decode_content, True)
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
@@ -387,48 +386,70 @@ class HTTPResponse(io.IOBase):
b[:len(temp)] = temp
return len(temp)
- def read_chunked(self, amt=None):
- # FIXME: Rewrite this method and make it a class with
- # a better structured logic.
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b';', 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise httplib.IncompleteRead(line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked("Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
+
+ if self._original_response and self._original_response._method.upper() == 'HEAD':
+ # Don't bother reading the body of a HEAD request.
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ self._original_response.close()
+ return
+
while True:
- # First, we'll figure out length of a chunk and then
- # we'll try to read it from socket.
- if self.chunk_left is None:
- line = self._fp.fp.readline()
- line = line.decode()
- # See RFC 7230: Chunked Transfer Coding.
- i = line.find(';')
- if i >= 0:
- line = line[:i] # Strip chunk-extensions.
- try:
- self.chunk_left = int(line, 16)
- except ValueError:
- # Invalid chunked protocol response, abort.
- self.close()
- raise httplib.IncompleteRead(''.join(line))
- if self.chunk_left == 0:
- break
- if amt is None:
- chunk = self._fp._safe_read(self.chunk_left)
- yield chunk
- self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
- self.chunk_left = None
- elif amt < self.chunk_left:
- value = self._fp._safe_read(amt)
- self.chunk_left = self.chunk_left - amt
- yield value
- elif amt == self.chunk_left:
- value = self._fp._safe_read(amt)
- self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
- self.chunk_left = None
- yield value
- else: # amt > self.chunk_left
- yield self._fp._safe_read(self.chunk_left)
- self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
- self.chunk_left = None
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ yield self._decode(chunk, decode_content=decode_content,
+ flush_decoder=True)
# Chunk content ends with \r\n: discard it.
while True:
@@ -440,5 +461,6 @@ class HTTPResponse(io.IOBase):
break
# We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
self.release_conn()
-
diff --git a/lib/requests/packages/urllib3/util/url.py b/lib/requests/packages/urllib3/util/url.py
index b2ec834f..e58050cd 100644
--- a/lib/requests/packages/urllib3/util/url.py
+++ b/lib/requests/packages/urllib3/util/url.py
@@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)):
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None):
+ if path and not path.startswith('/'):
+ path = '/' + path
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment)
diff --git a/lib/shove/__init__.py b/lib/shove/__init__.py
deleted file mode 100644
index 3be119b4..00000000
--- a/lib/shove/__init__.py
+++ /dev/null
@@ -1,519 +0,0 @@
-# -*- coding: utf-8 -*-
-'''Common object storage frontend.'''
-
-import os
-import zlib
-import urllib
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-from collections import deque
-
-try:
- # Import store and cache entry points if setuptools installed
- import pkg_resources
- stores = dict((_store.name, _store) for _store in
- pkg_resources.iter_entry_points('shove.stores'))
- caches = dict((_cache.name, _cache) for _cache in
- pkg_resources.iter_entry_points('shove.caches'))
- # Pass if nothing loaded
- if not stores and not caches:
- raise ImportError()
-except ImportError:
- # Static store backend registry
- stores = dict(
- bsddb='shove.store.bsdb:BsdStore',
- cassandra='shove.store.cassandra:CassandraStore',
- dbm='shove.store.dbm:DbmStore',
- durus='shove.store.durusdb:DurusStore',
- file='shove.store.file:FileStore',
- firebird='shove.store.db:DbStore',
- ftp='shove.store.ftp:FtpStore',
- hdf5='shove.store.hdf5:HDF5Store',
- leveldb='shove.store.leveldbstore:LevelDBStore',
- memory='shove.store.memory:MemoryStore',
- mssql='shove.store.db:DbStore',
- mysql='shove.store.db:DbStore',
- oracle='shove.store.db:DbStore',
- postgres='shove.store.db:DbStore',
- redis='shove.store.redisdb:RedisStore',
- s3='shove.store.s3:S3Store',
- simple='shove.store.simple:SimpleStore',
- sqlite='shove.store.db:DbStore',
- svn='shove.store.svn:SvnStore',
- zodb='shove.store.zodb:ZodbStore',
- )
- # Static cache backend registry
- caches = dict(
- bsddb='shove.cache.bsdb:BsdCache',
- file='shove.cache.file:FileCache',
- filelru='shove.cache.filelru:FileLRUCache',
- firebird='shove.cache.db:DbCache',
- memcache='shove.cache.memcached:MemCached',
- memlru='shove.cache.memlru:MemoryLRUCache',
- memory='shove.cache.memory:MemoryCache',
- mssql='shove.cache.db:DbCache',
- mysql='shove.cache.db:DbCache',
- oracle='shove.cache.db:DbCache',
- postgres='shove.cache.db:DbCache',
- redis='shove.cache.redisdb:RedisCache',
- simple='shove.cache.simple:SimpleCache',
- simplelru='shove.cache.simplelru:SimpleLRUCache',
- sqlite='shove.cache.db:DbCache',
- )
-
-
-def getbackend(uri, engines, **kw):
- '''
- Loads the right backend based on a URI.
-
- @param uri Instance or name string
- @param engines A dictionary of scheme/class pairs
- '''
- if isinstance(uri, basestring):
- mod = engines[uri.split('://', 1)[0]]
- # Load module if setuptools not present
- if isinstance(mod, basestring):
- # Isolate classname from dot path
- module, klass = mod.split(':')
- # Load module
- mod = getattr(__import__(module, '', '', ['']), klass)
- # Load appropriate class from setuptools entry point
- else:
- mod = mod.load()
- # Return instance
- return mod(uri, **kw)
- # No-op for existing instances
- return uri
-
-
-def synchronized(func):
- '''
- Decorator to lock and unlock a method (Phillip J. Eby).
-
- @param func Method to decorate
- '''
- def wrapper(self, *__args, **__kw):
- self._lock.acquire()
- try:
- return func(self, *__args, **__kw)
- finally:
- self._lock.release()
- wrapper.__name__ = func.__name__
- wrapper.__dict__ = func.__dict__
- wrapper.__doc__ = func.__doc__
- return wrapper
-
-
-class Base(object):
-
- '''Base Mapping class.'''
-
- def __init__(self, engine, **kw):
- '''
- @keyword compress True, False, or an integer compression level (1-9).
- '''
- self._compress = kw.get('compress', False)
- self._protocol = kw.get('protocol', pickle.HIGHEST_PROTOCOL)
-
- def __getitem__(self, key):
- raise NotImplementedError()
-
- def __setitem__(self, key, value):
- raise NotImplementedError()
-
- def __delitem__(self, key):
- raise NotImplementedError()
-
- def __contains__(self, key):
- try:
- value = self[key]
- except KeyError:
- return False
- return True
-
- def get(self, key, default=None):
- '''
- Fetch a given key from the mapping. If the key does not exist,
- return the default.
-
- @param key Keyword of item in mapping.
- @param default Default value (default: None)
- '''
- try:
- return self[key]
- except KeyError:
- return default
-
- def dumps(self, value):
- '''Optionally serializes and compresses an object.'''
- # Serialize everything but ASCII strings
- value = pickle.dumps(value, protocol=self._protocol)
- if self._compress:
- level = 9 if self._compress is True else self._compress
- value = zlib.compress(value, level)
- return value
-
- def loads(self, value):
- '''Deserializes and optionally decompresses an object.'''
- if self._compress:
- try:
- value = zlib.decompress(value)
- except zlib.error:
- pass
- value = pickle.loads(value)
- return value
-
-
-class BaseStore(Base):
-
- '''Base Store class (based on UserDict.DictMixin).'''
-
- def __init__(self, engine, **kw):
- super(BaseStore, self).__init__(engine, **kw)
- self._store = None
-
- def __cmp__(self, other):
- if other is None:
- return False
- if isinstance(other, BaseStore):
- return cmp(dict(self.iteritems()), dict(other.iteritems()))
-
- def __del__(self):
- # __init__ didn't succeed, so don't bother closing
- if not hasattr(self, '_store'):
- return
- self.close()
-
- def __iter__(self):
- for k in self.keys():
- yield k
-
- def __len__(self):
- return len(self.keys())
-
- def __repr__(self):
- return repr(dict(self.iteritems()))
-
- def close(self):
- '''Closes internal store and clears object references.'''
- try:
- self._store.close()
- except AttributeError:
- pass
- self._store = None
-
- def clear(self):
- '''Removes all keys and values from a store.'''
- for key in self.keys():
- del self[key]
-
- def items(self):
- '''Returns a list with all key/value pairs in the store.'''
- return list(self.iteritems())
-
- def iteritems(self):
- '''Lazily returns all key/value pairs in a store.'''
- for k in self:
- yield (k, self[k])
-
- def iterkeys(self):
- '''Lazy returns all keys in a store.'''
- return self.__iter__()
-
- def itervalues(self):
- '''Lazily returns all values in a store.'''
- for _, v in self.iteritems():
- yield v
-
- def keys(self):
- '''Returns a list with all keys in a store.'''
- raise NotImplementedError()
-
- def pop(self, key, *args):
- '''
- Removes and returns a value from a store.
-
- @param args Default to return if key not present.
- '''
- if len(args) > 1:
- raise TypeError('pop expected at most 2 arguments, got ' + repr(
- 1 + len(args))
- )
- try:
- value = self[key]
- # Return default if key not in store
- except KeyError:
- if args:
- return args[0]
- del self[key]
- return value
-
- def popitem(self):
- '''Removes and returns a key, value pair from a store.'''
- try:
- k, v = self.iteritems().next()
- except StopIteration:
- raise KeyError('Store is empty.')
- del self[k]
- return (k, v)
-
- def setdefault(self, key, default=None):
- '''
- Returns the value corresponding to an existing key or sets the
- to key to the default and returns the default.
-
- @param default Default value (default: None)
- '''
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
-
- def update(self, other=None, **kw):
- '''
- Adds to or overwrites the values in this store with values from
- another store.
-
- other Another store
- kw Additional keys and values to store
- '''
- if other is None:
- pass
- elif hasattr(other, 'iteritems'):
- for k, v in other.iteritems():
- self[k] = v
- elif hasattr(other, 'keys'):
- for k in other.keys():
- self[k] = other[k]
- else:
- for k, v in other:
- self[k] = v
- if kw:
- self.update(kw)
-
- def values(self):
- '''Returns a list with all values in a store.'''
- return list(v for _, v in self.iteritems())
-
-
-class Shove(BaseStore):
-
- '''Common object frontend class.'''
-
- def __init__(self, store='simple://', cache='simple://', **kw):
- super(Shove, self).__init__(store, **kw)
- # Load store
- self._store = getbackend(store, stores, **kw)
- # Load cache
- self._cache = getbackend(cache, caches, **kw)
- # Buffer for lazy writing and setting for syncing frequency
- self._buffer, self._sync = dict(), kw.get('sync', 2)
-
- def __getitem__(self, key):
- '''Gets a item from shove.'''
- try:
- return self._cache[key]
- except KeyError:
- # Synchronize cache and store
- self.sync()
- value = self._store[key]
- self._cache[key] = value
- return value
-
- def __setitem__(self, key, value):
- '''Sets an item in shove.'''
- self._cache[key] = self._buffer[key] = value
- # When the buffer reaches self._limit, writes the buffer to the store
- if len(self._buffer) >= self._sync:
- self.sync()
-
- def __delitem__(self, key):
- '''Deletes an item from shove.'''
- try:
- del self._cache[key]
- except KeyError:
- pass
- self.sync()
- del self._store[key]
-
- def keys(self):
- '''Returns a list of keys in shove.'''
- self.sync()
- return self._store.keys()
-
- def sync(self):
- '''Writes buffer to store.'''
- for k, v in self._buffer.iteritems():
- self._store[k] = v
- self._buffer.clear()
-
- def close(self):
- '''Finalizes and closes shove.'''
- # If close has been called, pass
- if self._store is not None:
- try:
- self.sync()
- except AttributeError:
- pass
- self._store.close()
- self._store = self._cache = self._buffer = None
-
-
-class FileBase(Base):
-
- '''Base class for file based storage.'''
-
- def __init__(self, engine, **kw):
- super(FileBase, self).__init__(engine, **kw)
- if engine.startswith('file://'):
- engine = urllib.url2pathname(engine.split('://')[1])
- self._dir = engine
- # Create directory
- if not os.path.exists(self._dir):
- self._createdir()
-
- def __getitem__(self, key):
- # (per Larry Meyn)
- try:
- item = open(self._key_to_file(key), 'rb')
- data = item.read()
- item.close()
- return self.loads(data)
- except:
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- # (per Larry Meyn)
- try:
- item = open(self._key_to_file(key), 'wb')
- item.write(self.dumps(value))
- item.close()
- except (IOError, OSError):
- raise KeyError(key)
-
- def __delitem__(self, key):
- try:
- os.remove(self._key_to_file(key))
- except (IOError, OSError):
- raise KeyError(key)
-
- def __contains__(self, key):
- return os.path.exists(self._key_to_file(key))
-
- def __len__(self):
- return len(os.listdir(self._dir))
-
- def _createdir(self):
- '''Creates the store directory.'''
- try:
- os.makedirs(self._dir)
- except OSError:
- raise EnvironmentError(
- 'Cache directory "%s" does not exist and ' \
- 'could not be created' % self._dir
- )
-
- def _key_to_file(self, key):
- '''Gives the filesystem path for a key.'''
- return os.path.join(self._dir, urllib.quote_plus(key))
-
- def keys(self):
- '''Returns a list of keys in the store.'''
- return [urllib.unquote_plus(name) for name in os.listdir(self._dir)]
-
-
-class SimpleBase(Base):
-
- '''Single-process in-memory store base class.'''
-
- def __init__(self, engine, **kw):
- super(SimpleBase, self).__init__(engine, **kw)
- self._store = dict()
-
- def __getitem__(self, key):
- try:
- return self._store[key]
- except:
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- self._store[key] = value
-
- def __delitem__(self, key):
- try:
- del self._store[key]
- except:
- raise KeyError(key)
-
- def __len__(self):
- return len(self._store)
-
- def keys(self):
- '''Returns a list of keys in the store.'''
- return self._store.keys()
-
-
-class LRUBase(SimpleBase):
-
- def __init__(self, engine, **kw):
- super(LRUBase, self).__init__(engine, **kw)
- self._max_entries = kw.get('max_entries', 300)
- self._hits = 0
- self._misses = 0
- self._queue = deque()
- self._refcount = dict()
-
- def __getitem__(self, key):
- try:
- value = super(LRUBase, self).__getitem__(key)
- self._hits += 1
- except KeyError:
- self._misses += 1
- raise
- self._housekeep(key)
- return value
-
- def __setitem__(self, key, value):
- super(LRUBase, self).__setitem__(key, value)
- self._housekeep(key)
- if len(self._store) > self._max_entries:
- while len(self._store) > self._max_entries:
- k = self._queue.popleft()
- self._refcount[k] -= 1
- if not self._refcount[k]:
- super(LRUBase, self).__delitem__(k)
- del self._refcount[k]
-
- def _housekeep(self, key):
- self._queue.append(key)
- self._refcount[key] = self._refcount.get(key, 0) + 1
- if len(self._queue) > self._max_entries * 4:
- self._purge_queue()
-
- def _purge_queue(self):
- for i in [None] * len(self._queue):
- k = self._queue.popleft()
- if self._refcount[k] == 1:
- self._queue.append(k)
- else:
- self._refcount[k] -= 1
-
-
-class DbBase(Base):
-
- '''Database common base class.'''
-
- def __init__(self, engine, **kw):
- super(DbBase, self).__init__(engine, **kw)
-
- def __delitem__(self, key):
- self._store.delete(self._store.c.key == key).execute()
-
- def __len__(self):
- return self._store.count().execute().fetchone()[0]
-
-
-__all__ = ['Shove']
diff --git a/lib/shove/cache/__init__.py b/lib/shove/cache/__init__.py
deleted file mode 100644
index 40a96afc..00000000
--- a/lib/shove/cache/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-
diff --git a/lib/shove/cache/db.py b/lib/shove/cache/db.py
deleted file mode 100644
index 21fea01f..00000000
--- a/lib/shove/cache/db.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Database object cache.
-
-The shove psuedo-URL used for database object caches is the format used by
-SQLAlchemy:
-
-://:@:/
-
- is the database engine. The engines currently supported SQLAlchemy are
-sqlite, mysql, postgres, oracle, mssql, and firebird.
- is the database account user name
- is the database accound password
- is the database location
- is the database port
- is the name of the specific database
-
-For more information on specific databases see:
-
-http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
-'''
-
-import time
-import random
-from datetime import datetime
-try:
- from sqlalchemy import (
- MetaData, Table, Column, String, Binary, DateTime, select, update,
- insert, delete,
- )
- from shove import DbBase
-except ImportError:
- raise ImportError('Requires SQLAlchemy >= 0.4')
-
-__all__ = ['DbCache']
-
-
-class DbCache(DbBase):
-
- '''database cache backend'''
-
- def __init__(self, engine, **kw):
- super(DbCache, self).__init__(engine, **kw)
- # Get table name
- tablename = kw.get('tablename', 'cache')
- # Bind metadata
- self._metadata = MetaData(engine)
- # Make cache table
- self._store = Table(tablename, self._metadata,
- Column('key', String(60), primary_key=True, nullable=False),
- Column('value', Binary, nullable=False),
- Column('expires', DateTime, nullable=False),
- )
- # Create cache table if it does not exist
- if not self._store.exists():
- self._store.create()
- # Set maximum entries
- self._max_entries = kw.get('max_entries', 300)
- # Maximum number of entries to cull per call if cache is full
- self._maxcull = kw.get('maxcull', 10)
- # Set timeout
- self.timeout = kw.get('timeout', 300)
-
- def __getitem__(self, key):
- row = select(
- [self._store.c.value, self._store.c.expires],
- self._store.c.key == key
- ).execute().fetchone()
- if row is not None:
- # Remove if item expired
- if row.expires < datetime.now().replace(microsecond=0):
- del self[key]
- raise KeyError(key)
- return self.loads(str(row.value))
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- timeout, value, cache = self.timeout, self.dumps(value), self._store
- # Cull if too many items
- if len(self) >= self._max_entries:
- self._cull()
- # Generate expiration time
- expires = datetime.fromtimestamp(
- time.time() + timeout
- ).replace(microsecond=0)
- # Update database if key already present
- if key in self:
- update(
- cache,
- cache.c.key == key,
- dict(value=value, expires=expires),
- ).execute()
- # Insert new key if key not present
- else:
- insert(
- cache, dict(key=key, value=value, expires=expires)
- ).execute()
-
- def _cull(self):
- '''Remove items in cache to make more room.'''
- cache, maxcull = self._store, self._maxcull
- # Remove items that have timed out
- now = datetime.now().replace(microsecond=0)
- delete(cache, cache.c.expires < now).execute()
- # Remove any items over the maximum allowed number in the cache
- if len(self) >= self._max_entries:
- # Upper limit for key query
- ul = maxcull * 2
- # Get list of keys
- keys = [
- i[0] for i in select(
- [cache.c.key], limit=ul
- ).execute().fetchall()
- ]
- # Get some keys at random
- delkeys = list(random.choice(keys) for i in xrange(maxcull))
- delete(cache, cache.c.key.in_(delkeys)).execute()
diff --git a/lib/shove/cache/file.py b/lib/shove/cache/file.py
deleted file mode 100644
index 7b9a4ae7..00000000
--- a/lib/shove/cache/file.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-File-based cache
-
-shove's psuedo-URL for file caches follows the form:
-
-file://
-
-Where the path is a URL path to a directory on a local filesystem.
-Alternatively, a native pathname to the directory can be passed as the 'engine'
-argument.
-'''
-
-import time
-
-from shove import FileBase
-from shove.cache.simple import SimpleCache
-
-
-class FileCache(FileBase, SimpleCache):
-
- '''File-based cache backend'''
-
- def __init__(self, engine, **kw):
- super(FileCache, self).__init__(engine, **kw)
-
- def __getitem__(self, key):
- try:
- exp, value = super(FileCache, self).__getitem__(key)
- # Remove item if time has expired.
- if exp < time.time():
- del self[key]
- raise KeyError(key)
- return value
- except:
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- if len(self) >= self._max_entries:
- self._cull()
- super(FileCache, self).__setitem__(
- key, (time.time() + self.timeout, value)
- )
-
-
-__all__ = ['FileCache']
diff --git a/lib/shove/cache/filelru.py b/lib/shove/cache/filelru.py
deleted file mode 100644
index de076613..00000000
--- a/lib/shove/cache/filelru.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-File-based LRU cache
-
-shove's psuedo-URL for file caches follows the form:
-
-file://
-
-Where the path is a URL path to a directory on a local filesystem.
-Alternatively, a native pathname to the directory can be passed as the 'engine'
-argument.
-'''
-
-from shove import FileBase
-from shove.cache.simplelru import SimpleLRUCache
-
-
-class FileCache(FileBase, SimpleLRUCache):
-
- '''File-based LRU cache backend'''
-
-
-__all__ = ['FileCache']
diff --git a/lib/shove/cache/memcached.py b/lib/shove/cache/memcached.py
deleted file mode 100644
index aedfe282..00000000
--- a/lib/shove/cache/memcached.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-"memcached" cache.
-
-The shove psuedo-URL for a memcache cache is:
-
-memcache://
-'''
-
-try:
- import memcache
-except ImportError:
- raise ImportError("Memcache cache requires the 'memcache' library")
-
-from shove import Base
-
-
-class MemCached(Base):
-
- '''Memcached cache backend'''
-
- def __init__(self, engine, **kw):
- super(MemCached, self).__init__(engine, **kw)
- if engine.startswith('memcache://'):
- engine = engine.split('://')[1]
- self._store = memcache.Client(engine.split(';'))
- # Set timeout
- self.timeout = kw.get('timeout', 300)
-
- def __getitem__(self, key):
- value = self._store.get(key)
- if value is None:
- raise KeyError(key)
- return self.loads(value)
-
- def __setitem__(self, key, value):
- self._store.set(key, self.dumps(value), self.timeout)
-
- def __delitem__(self, key):
- self._store.delete(key)
-
-
-__all__ = ['MemCached']
diff --git a/lib/shove/cache/memlru.py b/lib/shove/cache/memlru.py
deleted file mode 100644
index 7db61ec5..00000000
--- a/lib/shove/cache/memlru.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Thread-safe in-memory cache using LRU.
-
-The shove psuedo-URL for a memory cache is:
-
-memlru://
-'''
-
-import copy
-import threading
-
-from shove import synchronized
-from shove.cache.simplelru import SimpleLRUCache
-
-
-class MemoryLRUCache(SimpleLRUCache):
-
- '''Thread-safe in-memory cache backend using LRU.'''
-
- def __init__(self, engine, **kw):
- super(MemoryLRUCache, self).__init__(engine, **kw)
- self._lock = threading.Condition()
-
- @synchronized
- def __setitem__(self, key, value):
- super(MemoryLRUCache, self).__setitem__(key, value)
-
- @synchronized
- def __getitem__(self, key):
- return copy.deepcopy(super(MemoryLRUCache, self).__getitem__(key))
-
- @synchronized
- def __delitem__(self, key):
- super(MemoryLRUCache, self).__delitem__(key)
-
-
-__all__ = ['MemoryLRUCache']
diff --git a/lib/shove/cache/memory.py b/lib/shove/cache/memory.py
deleted file mode 100644
index e70f9bbb..00000000
--- a/lib/shove/cache/memory.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Thread-safe in-memory cache.
-
-The shove psuedo-URL for a memory cache is:
-
-memory://
-'''
-
-import copy
-import threading
-
-from shove import synchronized
-from shove.cache.simple import SimpleCache
-
-
-class MemoryCache(SimpleCache):
-
- '''Thread-safe in-memory cache backend.'''
-
- def __init__(self, engine, **kw):
- super(MemoryCache, self).__init__(engine, **kw)
- self._lock = threading.Condition()
-
- @synchronized
- def __setitem__(self, key, value):
- super(MemoryCache, self).__setitem__(key, value)
-
- @synchronized
- def __getitem__(self, key):
- return copy.deepcopy(super(MemoryCache, self).__getitem__(key))
-
- @synchronized
- def __delitem__(self, key):
- super(MemoryCache, self).__delitem__(key)
-
-
-__all__ = ['MemoryCache']
diff --git a/lib/shove/cache/redisdb.py b/lib/shove/cache/redisdb.py
deleted file mode 100644
index c53536c1..00000000
--- a/lib/shove/cache/redisdb.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Redis-based object cache
-
-The shove psuedo-URL for a redis cache is:
-
-redis://:/
-'''
-
-import urlparse
-
-try:
- import redis
-except ImportError:
- raise ImportError('This store requires the redis library')
-
-from shove import Base
-
-
-class RedisCache(Base):
-
- '''Redis cache backend'''
-
- init = 'redis://'
-
- def __init__(self, engine, **kw):
- super(RedisCache, self).__init__(engine, **kw)
- spliturl = urlparse.urlsplit(engine)
- host, port = spliturl[1].split(':')
- db = spliturl[2].replace('/', '')
- self._store = redis.Redis(host, int(port), db)
- # Set timeout
- self.timeout = kw.get('timeout', 300)
-
- def __getitem__(self, key):
- return self.loads(self._store[key])
-
- def __setitem__(self, key, value):
- self._store.setex(key, self.dumps(value), self.timeout)
-
- def __delitem__(self, key):
- self._store.delete(key)
-
-
-__all__ = ['RedisCache']
diff --git a/lib/shove/cache/simple.py b/lib/shove/cache/simple.py
deleted file mode 100644
index 6855603e..00000000
--- a/lib/shove/cache/simple.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Single-process in-memory cache.
-
-The shove psuedo-URL for a simple cache is:
-
-simple://
-'''
-
-import time
-import random
-
-from shove import SimpleBase
-
-
-class SimpleCache(SimpleBase):
-
- '''Single-process in-memory cache.'''
-
- def __init__(self, engine, **kw):
- super(SimpleCache, self).__init__(engine, **kw)
- # Get random seed
- random.seed()
- # Set maximum number of items to cull if over max
- self._maxcull = kw.get('maxcull', 10)
- # Set max entries
- self._max_entries = kw.get('max_entries', 300)
- # Set timeout
- self.timeout = kw.get('timeout', 300)
-
- def __getitem__(self, key):
- exp, value = super(SimpleCache, self).__getitem__(key)
- # Delete if item timed out.
- if exp < time.time():
- super(SimpleCache, self).__delitem__(key)
- raise KeyError(key)
- return value
-
- def __setitem__(self, key, value):
- # Cull values if over max # of entries
- if len(self) >= self._max_entries:
- self._cull()
- # Set expiration time and value
- exp = time.time() + self.timeout
- super(SimpleCache, self).__setitem__(key, (exp, value))
-
- def _cull(self):
- '''Remove items in cache to make room.'''
- num, maxcull = 0, self._maxcull
- # Cull number of items allowed (set by self._maxcull)
- for key in self.keys():
- # Remove only maximum # of items allowed by maxcull
- if num <= maxcull:
- # Remove items if expired
- try:
- self[key]
- except KeyError:
- num += 1
- else:
- break
- # Remove any additional items up to max # of items allowed by maxcull
- while len(self) >= self._max_entries and num <= maxcull:
- # Cull remainder of allowed quota at random
- del self[random.choice(self.keys())]
- num += 1
-
-
-__all__ = ['SimpleCache']
diff --git a/lib/shove/cache/simplelru.py b/lib/shove/cache/simplelru.py
deleted file mode 100644
index fbb6e446..00000000
--- a/lib/shove/cache/simplelru.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Single-process in-memory LRU cache.
-
-The shove psuedo-URL for a simple cache is:
-
-simplelru://
-'''
-
-from shove import LRUBase
-
-
-class SimpleLRUCache(LRUBase):
-
- '''In-memory cache that purges based on least recently used item.'''
-
-
-__all__ = ['SimpleLRUCache']
diff --git a/lib/shove/store/__init__.py b/lib/shove/store/__init__.py
deleted file mode 100644
index 5d639a07..00000000
--- a/lib/shove/store/__init__.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urllib import url2pathname
-from shove.store.simple import SimpleStore
-
-
-class ClientStore(SimpleStore):
-
- '''Base class for stores where updates have to be committed.'''
-
- def __init__(self, engine, **kw):
- super(ClientStore, self).__init__(engine, **kw)
- if engine.startswith(self.init):
- self._engine = url2pathname(engine.split('://')[1])
-
- def __getitem__(self, key):
- return self.loads(super(ClientStore, self).__getitem__(key))
-
- def __setitem__(self, key, value):
- super(ClientStore, self).__setitem__(key, self.dumps(value))
-
-
-class SyncStore(ClientStore):
-
- '''Base class for stores where updates have to be committed.'''
-
- def __getitem__(self, key):
- return self.loads(super(SyncStore, self).__getitem__(key))
-
- def __setitem__(self, key, value):
- super(SyncStore, self).__setitem__(key, value)
- try:
- self.sync()
- except AttributeError:
- pass
-
- def __delitem__(self, key):
- super(SyncStore, self).__delitem__(key)
- try:
- self.sync()
- except AttributeError:
- pass
-
-
-__all__ = [
- 'bsdb', 'db', 'dbm', 'durusdb', 'file', 'ftp', 'memory', 's3', 'simple',
- 'svn', 'zodb', 'redisdb', 'hdf5db', 'leveldbstore', 'cassandra',
-]
diff --git a/lib/shove/store/bsdb.py b/lib/shove/store/bsdb.py
deleted file mode 100644
index d1f9c6dc..00000000
--- a/lib/shove/store/bsdb.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Berkeley Source Database Store.
-
-shove's psuedo-URL for BSDDB stores follows the form:
-
-bsddb://
-
-Where the path is a URL path to a Berkeley database. Alternatively, the native
-pathname to a Berkeley database can be passed as the 'engine' parameter.
-'''
-try:
- import bsddb
-except ImportError:
- raise ImportError('requires bsddb library')
-
-import threading
-
-from shove import synchronized
-from shove.store import SyncStore
-
-
-class BsdStore(SyncStore):
-
- '''Class for Berkeley Source Database Store.'''
-
- init = 'bsddb://'
-
- def __init__(self, engine, **kw):
- super(BsdStore, self).__init__(engine, **kw)
- self._store = bsddb.hashopen(self._engine)
- self._lock = threading.Condition()
- self.sync = self._store.sync
-
- @synchronized
- def __getitem__(self, key):
- return super(BsdStore, self).__getitem__(key)
-
- @synchronized
- def __setitem__(self, key, value):
- super(BsdStore, self).__setitem__(key, value)
-
- @synchronized
- def __delitem__(self, key):
- super(BsdStore, self).__delitem__(key)
-
-
-__all__ = ['BsdStore']
diff --git a/lib/shove/store/cassandra.py b/lib/shove/store/cassandra.py
deleted file mode 100644
index 1f6532ee..00000000
--- a/lib/shove/store/cassandra.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Cassandra-based object store
-
-The shove psuedo-URL for a cassandra-based store is:
-
-cassandra://://
-'''
-
-import urlparse
-
-try:
- import pycassa
-except ImportError:
- raise ImportError('This store requires the pycassa library')
-
-from shove import BaseStore
-
-
-class CassandraStore(BaseStore):
-
- '''Cassandra based store'''
-
- init = 'cassandra://'
-
- def __init__(self, engine, **kw):
- super(CassandraStore, self).__init__(engine, **kw)
- spliturl = urlparse.urlsplit(engine)
- _, keyspace, column_family = spliturl[2].split('/')
- try:
- self._pool = pycassa.connect(keyspace, [spliturl[1]])
- self._store = pycassa.ColumnFamily(self._pool, column_family)
- except pycassa.InvalidRequestException:
- from pycassa.system_manager import SystemManager
- system_manager = SystemManager(spliturl[1])
- system_manager.create_keyspace(
- keyspace,
- pycassa.system_manager.SIMPLE_STRATEGY,
- {'replication_factor': str(kw.get('replication', 1))}
- )
- system_manager.create_column_family(keyspace, column_family)
- self._pool = pycassa.connect(keyspace, [spliturl[1]])
- self._store = pycassa.ColumnFamily(self._pool, column_family)
-
- def __getitem__(self, key):
- try:
- item = self._store.get(key).get(key)
- if item is not None:
- return self.loads(item)
- raise KeyError(key)
- except pycassa.NotFoundException:
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- self._store.insert(key, dict(key=self.dumps(value)))
-
- def __delitem__(self, key):
- # beware eventual consistency
- try:
- self._store.remove(key)
- except pycassa.NotFoundException:
- raise KeyError(key)
-
- def clear(self):
- # beware eventual consistency
- self._store.truncate()
-
- def keys(self):
- return list(i[0] for i in self._store.get_range())
-
-
-__all__ = ['CassandraStore']
diff --git a/lib/shove/store/db.py b/lib/shove/store/db.py
deleted file mode 100644
index 0004e6f8..00000000
--- a/lib/shove/store/db.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Database object store.
-
-The shove psuedo-URL used for database object stores is the format used by
-SQLAlchemy:
-
-://:@:/
-
- is the database engine. The engines currently supported SQLAlchemy are
-sqlite, mysql, postgres, oracle, mssql, and firebird.
- is the database account user name
- is the database accound password
- is the database location
- is the database port
- is the name of the specific database
-
-For more information on specific databases see:
-
-http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
-'''
-
-try:
- from sqlalchemy import MetaData, Table, Column, String, Binary, select
- from shove import BaseStore, DbBase
-except ImportError, e:
- raise ImportError('Error: ' + e + ' Requires SQLAlchemy >= 0.4')
-
-
-class DbStore(BaseStore, DbBase):
-
- '''Database cache backend.'''
-
- def __init__(self, engine, **kw):
- super(DbStore, self).__init__(engine, **kw)
- # Get tablename
- tablename = kw.get('tablename', 'store')
- # Bind metadata
- self._metadata = MetaData(engine)
- # Make store table
- self._store = Table(tablename, self._metadata,
- Column('key', String(255), primary_key=True, nullable=False),
- Column('value', Binary, nullable=False),
- )
- # Create store table if it does not exist
- if not self._store.exists():
- self._store.create()
-
- def __getitem__(self, key):
- row = select(
- [self._store.c.value], self._store.c.key == key,
- ).execute().fetchone()
- if row is not None:
- return self.loads(str(row.value))
- raise KeyError(key)
-
- def __setitem__(self, k, v):
- v, store = self.dumps(v), self._store
- # Update database if key already present
- if k in self:
- store.update(store.c.key == k).execute(value=v)
- # Insert new key if key not present
- else:
- store.insert().execute(key=k, value=v)
-
- def keys(self):
- '''Returns a list of keys in the store.'''
- return list(i[0] for i in select(
- [self._store.c.key]
- ).execute().fetchall())
-
-
-__all__ = ['DbStore']
diff --git a/lib/shove/store/dbm.py b/lib/shove/store/dbm.py
deleted file mode 100644
index 323d2484..00000000
--- a/lib/shove/store/dbm.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-DBM Database Store.
-
-shove's psuedo-URL for DBM stores follows the form:
-
-dbm://
-
-Where is a URL path to a DBM database. Alternatively, the native
-pathname to a DBM database can be passed as the 'engine' parameter.
-'''
-
-import anydbm
-
-from shove.store import SyncStore
-
-
-class DbmStore(SyncStore):
-
- '''Class for variants of the DBM database.'''
-
- init = 'dbm://'
-
- def __init__(self, engine, **kw):
- super(DbmStore, self).__init__(engine, **kw)
- self._store = anydbm.open(self._engine, 'c')
- try:
- self.sync = self._store.sync
- except AttributeError:
- pass
-
-
-__all__ = ['DbmStore']
diff --git a/lib/shove/store/durusdb.py b/lib/shove/store/durusdb.py
deleted file mode 100644
index 8e27670e..00000000
--- a/lib/shove/store/durusdb.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Durus object database frontend.
-
-shove's psuedo-URL for Durus stores follows the form:
-
-durus://
-
-
-Where the path is a URL path to a durus FileStorage database. Alternatively, a
-native pathname to a durus database can be passed as the 'engine' parameter.
-'''
-
-try:
- from durus.connection import Connection
- from durus.file_storage import FileStorage
-except ImportError:
- raise ImportError('Requires Durus library')
-
-from shove.store import SyncStore
-
-
-class DurusStore(SyncStore):
-
- '''Class for Durus object database frontend.'''
-
- init = 'durus://'
-
- def __init__(self, engine, **kw):
- super(DurusStore, self).__init__(engine, **kw)
- self._db = FileStorage(self._engine)
- self._connection = Connection(self._db)
- self.sync = self._connection.commit
- self._store = self._connection.get_root()
-
- def close(self):
- '''Closes all open storage and connections.'''
- self.sync()
- self._db.close()
- super(DurusStore, self).close()
-
-
-__all__ = ['DurusStore']
diff --git a/lib/shove/store/file.py b/lib/shove/store/file.py
deleted file mode 100644
index e66e9c4f..00000000
--- a/lib/shove/store/file.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Filesystem-based object store
-
-shove's psuedo-URL for filesystem-based stores follows the form:
-
-file://
-
-Where the path is a URL path to a directory on a local filesystem.
-Alternatively, a native pathname to the directory can be passed as the 'engine'
-argument.
-'''
-
-from shove import BaseStore, FileBase
-
-
-class FileStore(FileBase, BaseStore):
-
- '''File-based store.'''
-
- def __init__(self, engine, **kw):
- super(FileStore, self).__init__(engine, **kw)
-
-
-__all__ = ['FileStore']
diff --git a/lib/shove/store/ftp.py b/lib/shove/store/ftp.py
deleted file mode 100644
index c2d4aec6..00000000
--- a/lib/shove/store/ftp.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-FTP-accessed stores
-
-shove's URL for FTP accessed stores follows the standard form for FTP URLs
-defined in RFC-1738:
-
-ftp://:@:/
-'''
-
-import urlparse
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-from ftplib import FTP, error_perm
-
-from shove import BaseStore
-
-
-class FtpStore(BaseStore):
-
- def __init__(self, engine, **kw):
- super(FtpStore, self).__init__(engine, **kw)
- user = kw.get('user', 'anonymous')
- password = kw.get('password', '')
- spliturl = urlparse.urlsplit(engine)
- # Set URL, path, and strip 'ftp://' off
- base, path = spliturl[1], spliturl[2] + '/'
- if '@' in base:
- auth, base = base.split('@')
- user, password = auth.split(':')
- self._store = FTP(base, user, password)
- # Change to remote path if it exits
- try:
- self._store.cwd(path)
- except error_perm:
- self._makedir(path)
- self._base, self._user, self._password = base, user, password
- self._updated, self ._keys = True, None
-
- def __getitem__(self, key):
- try:
- local = StringIO()
- # Download item
- self._store.retrbinary('RETR %s' % key, local.write)
- self._updated = False
- return self.loads(local.getvalue())
- except:
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- local = StringIO(self.dumps(value))
- self._store.storbinary('STOR %s' % key, local)
- self._updated = True
-
- def __delitem__(self, key):
- try:
- self._store.delete(key)
- self._updated = True
- except:
- raise KeyError(key)
-
- def _makedir(self, path):
- '''Makes remote paths on an FTP server.'''
- paths = list(reversed([i for i in path.split('/') if i != '']))
- while paths:
- tpath = paths.pop()
- self._store.mkd(tpath)
- self._store.cwd(tpath)
-
- def keys(self):
- '''Returns a list of keys in a store.'''
- if self._updated or self._keys is None:
- rlist, nlist = list(), list()
- # Remote directory listing
- self._store.retrlines('LIST -a', rlist.append)
- for rlisting in rlist:
- # Split remote file based on whitespace
- rfile = rlisting.split()
- # Append tuple of remote item type & name
- if rfile[-1] not in ('.', '..') and rfile[0].startswith('-'):
- nlist.append(rfile[-1])
- self._keys = nlist
- return self._keys
-
-
-__all__ = ['FtpStore']
diff --git a/lib/shove/store/hdf5.py b/lib/shove/store/hdf5.py
deleted file mode 100644
index a9b618e5..00000000
--- a/lib/shove/store/hdf5.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-HDF5 Database Store.
-
-shove's psuedo-URL for HDF5 stores follows the form:
-
-hdf5:///
-
-Where is a URL path to a HDF5 database. Alternatively, the native
-pathname to a HDF5 database can be passed as the 'engine' parameter.
- is the name of the database.
-'''
-
-try:
- import h5py
-except ImportError:
- raise ImportError('This store requires h5py library')
-
-from shove.store import ClientStore
-
-
-class HDF5Store(ClientStore):
-
- '''LevelDB based store'''
-
- init = 'hdf5://'
-
- def __init__(self, engine, **kw):
- super(HDF5Store, self).__init__(engine, **kw)
- engine, group = self._engine.rsplit('/')
- self._store = h5py.File(engine).require_group(group).attrs
-
-
-__all__ = ['HDF5Store']
diff --git a/lib/shove/store/leveldbstore.py b/lib/shove/store/leveldbstore.py
deleted file mode 100644
index ca73a494..00000000
--- a/lib/shove/store/leveldbstore.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-LevelDB Database Store.
-
-shove's psuedo-URL for LevelDB stores follows the form:
-
-leveldb://
-
-Where is a URL path to a LevelDB database. Alternatively, the native
-pathname to a LevelDB database can be passed as the 'engine' parameter.
-'''
-
-try:
- import leveldb
-except ImportError:
- raise ImportError('This store requires py-leveldb library')
-
-from shove.store import ClientStore
-
-
-class LevelDBStore(ClientStore):
-
- '''LevelDB based store'''
-
- init = 'leveldb://'
-
- def __init__(self, engine, **kw):
- super(LevelDBStore, self).__init__(engine, **kw)
- self._store = leveldb.LevelDB(self._engine)
-
- def __getitem__(self, key):
- item = self.loads(self._store.Get(key))
- if item is not None:
- return item
- raise KeyError(key)
-
- def __setitem__(self, key, value):
- self._store.Put(key, self.dumps(value))
-
- def __delitem__(self, key):
- self._store.Delete(key)
-
- def keys(self):
- return list(k for k in self._store.RangeIter(include_value=False))
-
-
-__all__ = ['LevelDBStore']
diff --git a/lib/shove/store/memory.py b/lib/shove/store/memory.py
deleted file mode 100644
index 525ae69e..00000000
--- a/lib/shove/store/memory.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Thread-safe in-memory store.
-
-The shove psuedo-URL for a memory store is:
-
-memory://
-'''
-
-import copy
-import threading
-
-from shove import synchronized
-from shove.store.simple import SimpleStore
-
-
-class MemoryStore(SimpleStore):
-
- '''Thread-safe in-memory store.'''
-
- def __init__(self, engine, **kw):
- super(MemoryStore, self).__init__(engine, **kw)
- self._lock = threading.Condition()
-
- @synchronized
- def __getitem__(self, key):
- return copy.deepcopy(super(MemoryStore, self).__getitem__(key))
-
- @synchronized
- def __setitem__(self, key, value):
- super(MemoryStore, self).__setitem__(key, value)
-
- @synchronized
- def __delitem__(self, key):
- super(MemoryStore, self).__delitem__(key)
-
-
-__all__ = ['MemoryStore']
diff --git a/lib/shove/store/redisdb.py b/lib/shove/store/redisdb.py
deleted file mode 100644
index 67fa2ebd..00000000
--- a/lib/shove/store/redisdb.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Redis-based object store
-
-The shove psuedo-URL for a redis-based store is:
-
-redis://:/
-'''
-
-import urlparse
-
-try:
- import redis
-except ImportError:
- raise ImportError('This store requires the redis library')
-
-from shove.store import ClientStore
-
-
-class RedisStore(ClientStore):
-
- '''Redis based store'''
-
- init = 'redis://'
-
- def __init__(self, engine, **kw):
- super(RedisStore, self).__init__(engine, **kw)
- spliturl = urlparse.urlsplit(engine)
- host, port = spliturl[1].split(':')
- db = spliturl[2].replace('/', '')
- self._store = redis.Redis(host, int(port), db)
-
- def __contains__(self, key):
- return self._store.exists(key)
-
- def clear(self):
- self._store.flushdb()
-
- def keys(self):
- return self._store.keys()
-
- def setdefault(self, key, default=None):
- return self._store.getset(key, default)
-
- def update(self, other=None, **kw):
- args = kw if other is not None else other
- self._store.mset(args)
-
-
-__all__ = ['RedisStore']
diff --git a/lib/shove/store/s3.py b/lib/shove/store/s3.py
deleted file mode 100644
index dbf12f21..00000000
--- a/lib/shove/store/s3.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-S3-accessed stores
-
-shove's psuedo-URL for stores found on Amazon.com's S3 web service follows this
-form:
-
-s3://:@
-
- is the Access Key issued by Amazon
- is the Secret Access Key issued by Amazon
- is the name of the bucket accessed through the S3 service
-'''
-
-try:
- from boto.s3.connection import S3Connection
- from boto.s3.key import Key
-except ImportError:
- raise ImportError('Requires boto library')
-
-from shove import BaseStore
-
-
-class S3Store(BaseStore):
-
- def __init__(self, engine=None, **kw):
- super(S3Store, self).__init__(engine, **kw)
- # key = Access Key, secret=Secret Access Key, bucket=bucket name
- key, secret, bucket = kw.get('key'), kw.get('secret'), kw.get('bucket')
- if engine is not None:
- auth, bucket = engine.split('://')[1].split('@')
- key, secret = auth.split(':')
- # kw 'secure' = (True or False, use HTTPS)
- self._conn = S3Connection(key, secret, kw.get('secure', False))
- buckets = self._conn.get_all_buckets()
- # Use bucket if it exists
- for b in buckets:
- if b.name == bucket:
- self._store = b
- break
- # Create bucket if it doesn't exist
- else:
- self._store = self._conn.create_bucket(bucket)
- # Set bucket permission ('private', 'public-read',
- # 'public-read-write', 'authenticated-read'
- self._store.set_acl(kw.get('acl', 'private'))
- # Updated flag used for avoiding network calls
- self._updated, self._keys = True, None
-
- def __getitem__(self, key):
- rkey = self._store.lookup(key)
- if rkey is None:
- raise KeyError(key)
- # Fetch string
- value = self.loads(rkey.get_contents_as_string())
- # Flag that the store has not been updated
- self._updated = False
- return value
-
- def __setitem__(self, key, value):
- rkey = Key(self._store)
- rkey.key = key
- rkey.set_contents_from_string(self.dumps(value))
- # Flag that the store has been updated
- self._updated = True
-
- def __delitem__(self, key):
- try:
- self._store.delete_key(key)
- # Flag that the store has been updated
- self._updated = True
- except:
- raise KeyError(key)
-
- def keys(self):
- '''Returns a list of keys in the store.'''
- return list(i[0] for i in self.items())
-
- def items(self):
- '''Returns a list of items from the store.'''
- if self._updated or self._keys is None:
- self._keys = self._store.get_all_keys()
- return list((str(k.key), k) for k in self._keys)
-
- def iteritems(self):
- '''Lazily returns items from the store.'''
- for k in self.items():
- yield (k.key, k)
-
-
-__all__ = ['S3Store']
diff --git a/lib/shove/store/simple.py b/lib/shove/store/simple.py
deleted file mode 100644
index 8f7ebb33..00000000
--- a/lib/shove/store/simple.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Single-process in-memory store.
-
-The shove psuedo-URL for a simple store is:
-
-simple://
-'''
-
-from shove import BaseStore, SimpleBase
-
-
-class SimpleStore(SimpleBase, BaseStore):
-
- '''Single-process in-memory store.'''
-
- def __init__(self, engine, **kw):
- super(SimpleStore, self).__init__(engine, **kw)
-
-
-__all__ = ['SimpleStore']
diff --git a/lib/shove/store/svn.py b/lib/shove/store/svn.py
deleted file mode 100644
index 5bb8c33e..00000000
--- a/lib/shove/store/svn.py
+++ /dev/null
@@ -1,110 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-subversion managed store.
-
-The shove psuedo-URL used for a subversion store that is password protected is:
-
-svn::?url=
-
-or for non-password protected repositories:
-
-svn://?url=
-
- is the local repository copy
- is the URL of the subversion repository
-'''
-
-import os
-import urllib
-import threading
-
-try:
- import pysvn
-except ImportError:
- raise ImportError('Requires Python Subversion library')
-
-from shove import BaseStore, synchronized
-
-
-class SvnStore(BaseStore):
-
- '''Class for subversion store.'''
-
- def __init__(self, engine=None, **kw):
- super(SvnStore, self).__init__(engine, **kw)
- # Get path, url from keywords if used
- path, url = kw.get('path'), kw.get('url')
- # Get username. password from keywords if used
- user, password = kw.get('user'), kw.get('password')
- # Process psuedo URL if used
- if engine is not None:
- path, query = engine.split('n://')[1].split('?')
- url = query.split('=')[1]
- # Check for username, password
- if '@' in path:
- auth, path = path.split('@')
- user, password = auth.split(':')
- path = urllib.url2pathname(path)
- # Create subversion client
- self._client = pysvn.Client()
- # Assign username, password
- if user is not None:
- self._client.set_username(user)
- if password is not None:
- self._client.set_password(password)
- # Verify that store exists in repository
- try:
- self._client.info2(url)
- # Create store in repository if it doesn't exist
- except pysvn.ClientError:
- self._client.mkdir(url, 'Adding directory')
- # Verify that local copy exists
- try:
- if self._client.info(path) is None:
- self._client.checkout(url, path)
- # Check it out if it doesn't exist
- except pysvn.ClientError:
- self._client.checkout(url, path)
- self._path, self._url = path, url
- # Lock
- self._lock = threading.Condition()
-
- @synchronized
- def __getitem__(self, key):
- try:
- return self.loads(self._client.cat(self._key_to_file(key)))
- except:
- raise KeyError(key)
-
- @synchronized
- def __setitem__(self, key, value):
- fname = self._key_to_file(key)
- # Write value to file
- open(fname, 'wb').write(self.dumps(value))
- # Add to repository
- if key not in self:
- self._client.add(fname)
- self._client.checkin([fname], 'Adding %s' % fname)
-
- @synchronized
- def __delitem__(self, key):
- try:
- fname = self._key_to_file(key)
- self._client.remove(fname)
- # Remove deleted value from repository
- self._client.checkin([fname], 'Removing %s' % fname)
- except:
- raise KeyError(key)
-
- def _key_to_file(self, key):
- '''Gives the filesystem path for a key.'''
- return os.path.join(self._path, urllib.quote_plus(key))
-
- @synchronized
- def keys(self):
- '''Returns a list of keys in the subversion repository.'''
- return list(str(i.name.split('/')[-1]) for i
- in self._client.ls(self._path))
-
-
-__all__ = ['SvnStore']
diff --git a/lib/shove/store/zodb.py b/lib/shove/store/zodb.py
deleted file mode 100644
index 43768dde..00000000
--- a/lib/shove/store/zodb.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-'''
-Zope Object Database store frontend.
-
-shove's psuedo-URL for ZODB stores follows the form:
-
-zodb:
-
-
-Where the path is a URL path to a ZODB FileStorage database. Alternatively, a
-native pathname to a ZODB database can be passed as the 'engine' argument.
-'''
-
-try:
- import transaction
- from ZODB import FileStorage, DB
-except ImportError:
- raise ImportError('Requires ZODB library')
-
-from shove.store import SyncStore
-
-
-class ZodbStore(SyncStore):
-
- '''ZODB store front end.'''
-
- init = 'zodb://'
-
- def __init__(self, engine, **kw):
- super(ZodbStore, self).__init__(engine, **kw)
- # Handle psuedo-URL
- self._storage = FileStorage.FileStorage(self._engine)
- self._db = DB(self._storage)
- self._connection = self._db.open()
- self._store = self._connection.root()
- # Keeps DB in synch through commits of transactions
- self.sync = transaction.commit
-
- def close(self):
- '''Closes all open storage and connections.'''
- self.sync()
- super(ZodbStore, self).close()
- self._connection.close()
- self._db.close()
- self._storage.close()
-
-
-__all__ = ['ZodbStore']
diff --git a/lib/shove/tests/__init__.py b/lib/shove/tests/__init__.py
deleted file mode 100644
index 40a96afc..00000000
--- a/lib/shove/tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-
diff --git a/lib/shove/tests/test_bsddb_store.py b/lib/shove/tests/test_bsddb_store.py
deleted file mode 100644
index 3de7896e..00000000
--- a/lib/shove/tests/test_bsddb_store.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestBsdbStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('bsddb://test.db', compress=True)
-
- def tearDown(self):
- import os
- self.store.close()
- os.remove('test.db')
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_cassandra_store.py b/lib/shove/tests/test_cassandra_store.py
deleted file mode 100644
index a5c60f6a..00000000
--- a/lib/shove/tests/test_cassandra_store.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestCassandraStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- from pycassa.system_manager import SystemManager
- system_manager = SystemManager('localhost:9160')
- try:
- system_manager.create_column_family('Foo', 'shove')
- except:
- pass
- self.store = Shove('cassandra://localhost:9160/Foo/shove')
-
- def tearDown(self):
- self.store.clear()
- self.store.close()
- from pycassa.system_manager import SystemManager
- system_manager = SystemManager('localhost:9160')
- system_manager.drop_column_family('Foo', 'shove')
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
-# def test_clear(self):
-# self.store['max'] = 3
-# self.store['min'] = 6
-# self.store['pow'] = 7
-# self.store.clear()
-# self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
-# def test_popitem(self):
-# self.store['max'] = 3
-# self.store['min'] = 6
-# self.store['pow'] = 7
-# item = self.store.popitem()
-# self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
-# self.store['pow'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store.setdefault('pow', 8), 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_db_cache.py b/lib/shove/tests/test_db_cache.py
deleted file mode 100644
index 9dd27a06..00000000
--- a/lib/shove/tests/test_db_cache.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestDbCache(unittest.TestCase):
-
- initstring = 'sqlite:///'
-
- def setUp(self):
- from shove.cache.db import DbCache
- self.cache = DbCache(self.initstring)
-
- def tearDown(self):
- self.cache = None
-
- def test_getitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_setitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_delitem(self):
- self.cache['test'] = 'test'
- del self.cache['test']
- self.assertEqual('test' in self.cache, False)
-
- def test_get(self):
- self.assertEqual(self.cache.get('min'), None)
-
- def test_timeout(self):
- import time
- from shove.cache.db import DbCache
- cache = DbCache(self.initstring, timeout=1)
- cache['test'] = 'test'
- time.sleep(2)
-
- def tmp():
- cache['test']
- self.assertRaises(KeyError, tmp)
-
- def test_cull(self):
- from shove.cache.db import DbCache
- cache = DbCache(self.initstring, max_entries=1)
- cache['test'] = 'test'
- cache['test2'] = 'test'
- cache['test2'] = 'test'
- self.assertEquals(len(cache), 1)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_db_store.py b/lib/shove/tests/test_db_store.py
deleted file mode 100644
index 1d9ad616..00000000
--- a/lib/shove/tests/test_db_store.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestDbStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('sqlite://', compress=True)
-
- def tearDown(self):
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_dbm_store.py b/lib/shove/tests/test_dbm_store.py
deleted file mode 100644
index e64ac9e7..00000000
--- a/lib/shove/tests/test_dbm_store.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestDbmStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('dbm://test.dbm', compress=True)
-
- def tearDown(self):
- import os
- self.store.close()
- try:
- os.remove('test.dbm.db')
- except OSError:
- pass
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.setdefault('how', 8)
- self.assertEqual(self.store['how'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_durus_store.py b/lib/shove/tests/test_durus_store.py
deleted file mode 100644
index 006fcc41..00000000
--- a/lib/shove/tests/test_durus_store.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestDurusStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('durus://test.durus', compress=True)
-
- def tearDown(self):
- import os
- self.store.close()
- os.remove('test.durus')
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_file_cache.py b/lib/shove/tests/test_file_cache.py
deleted file mode 100644
index b288ce82..00000000
--- a/lib/shove/tests/test_file_cache.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestFileCache(unittest.TestCase):
-
- initstring = 'file://test'
-
- def setUp(self):
- from shove.cache.file import FileCache
- self.cache = FileCache(self.initstring)
-
- def tearDown(self):
- import os
- self.cache = None
- for x in os.listdir('test'):
- os.remove(os.path.join('test', x))
- os.rmdir('test')
-
- def test_getitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_setitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_delitem(self):
- self.cache['test'] = 'test'
- del self.cache['test']
- self.assertEqual('test' in self.cache, False)
-
- def test_get(self):
- self.assertEqual(self.cache.get('min'), None)
-
- def test_timeout(self):
- import time
- from shove.cache.file import FileCache
- cache = FileCache(self.initstring, timeout=1)
- cache['test'] = 'test'
- time.sleep(2)
-
- def tmp():
- cache['test']
- self.assertRaises(KeyError, tmp)
-
- def test_cull(self):
- from shove.cache.file import FileCache
- cache = FileCache(self.initstring, max_entries=1)
- cache['test'] = 'test'
- cache['test2'] = 'test'
- num = len(cache)
- self.assertEquals(num, 1)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_file_store.py b/lib/shove/tests/test_file_store.py
deleted file mode 100644
index 35643ced..00000000
--- a/lib/shove/tests/test_file_store.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestFileStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('file://test', compress=True)
-
- def tearDown(self):
- import os
- self.store.close()
- for x in os.listdir('test'):
- os.remove(os.path.join('test', x))
- os.rmdir('test')
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.store.sync()
- tstore.sync()
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_ftp_store.py b/lib/shove/tests/test_ftp_store.py
deleted file mode 100644
index 17679a2c..00000000
--- a/lib/shove/tests/test_ftp_store.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestFtpStore(unittest.TestCase):
-
- ftpstring = 'put ftp string here'
-
- def setUp(self):
- from shove import Shove
- self.store = Shove(self.ftpstring, compress=True)
-
- def tearDown(self):
- self.store.clear()
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.store.sync()
- tstore.sync()
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store.sync()
- self.assertEqual(len(self.store), 2)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store.sync()
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- item = self.store.popitem()
- self.store.sync()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.store.sync()
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.sync()
- self.store.update(tstore)
- self.store.sync()
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_hdf5_store.py b/lib/shove/tests/test_hdf5_store.py
deleted file mode 100644
index b1342ecf..00000000
--- a/lib/shove/tests/test_hdf5_store.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest2
-
-
-class TestHDF5Store(unittest2.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('hdf5://test.hdf5/test')
-
- def tearDown(self):
- import os
- self.store.close()
- try:
- os.remove('test.hdf5')
- except OSError:
- pass
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.setdefault('bow', 8)
- self.assertEqual(self.store['bow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-if __name__ == '__main__':
- unittest2.main()
diff --git a/lib/shove/tests/test_leveldb_store.py b/lib/shove/tests/test_leveldb_store.py
deleted file mode 100644
index b3a3d177..00000000
--- a/lib/shove/tests/test_leveldb_store.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest2
-
-
-class TestLevelDBStore(unittest2.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('leveldb://test', compress=True)
-
- def tearDown(self):
- import shutil
- shutil.rmtree('test')
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.setdefault('bow', 8)
- self.assertEqual(self.store['bow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest2.main()
diff --git a/lib/shove/tests/test_memcached_cache.py b/lib/shove/tests/test_memcached_cache.py
deleted file mode 100644
index 98f0b96d..00000000
--- a/lib/shove/tests/test_memcached_cache.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestMemcached(unittest.TestCase):
-
- initstring = 'memcache://localhost:11211'
-
- def setUp(self):
- from shove.cache.memcached import MemCached
- self.cache = MemCached(self.initstring)
-
- def tearDown(self):
- self.cache = None
-
- def test_getitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_setitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_delitem(self):
- self.cache['test'] = 'test'
- del self.cache['test']
- self.assertEqual('test' in self.cache, False)
-
- def test_get(self):
- self.assertEqual(self.cache.get('min'), None)
-
- def test_timeout(self):
- import time
- from shove.cache.memcached import MemCached
- cache = MemCached(self.initstring, timeout=1)
- cache['test'] = 'test'
- time.sleep(1)
-
- def tmp():
- cache['test']
- self.assertRaises(KeyError, tmp)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_memory_cache.py b/lib/shove/tests/test_memory_cache.py
deleted file mode 100644
index 87749cdb..00000000
--- a/lib/shove/tests/test_memory_cache.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestMemoryCache(unittest.TestCase):
-
- initstring = 'memory://'
-
- def setUp(self):
- from shove.cache.memory import MemoryCache
- self.cache = MemoryCache(self.initstring)
-
- def tearDown(self):
- self.cache = None
-
- def test_getitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_setitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_delitem(self):
- self.cache['test'] = 'test'
- del self.cache['test']
- self.assertEqual('test' in self.cache, False)
-
- def test_get(self):
- self.assertEqual(self.cache.get('min'), None)
-
- def test_timeout(self):
- import time
- from shove.cache.memory import MemoryCache
- cache = MemoryCache(self.initstring, timeout=1)
- cache['test'] = 'test'
- time.sleep(1)
-
- def tmp():
- cache['test']
- self.assertRaises(KeyError, tmp)
-
- def test_cull(self):
- from shove.cache.memory import MemoryCache
- cache = MemoryCache(self.initstring, max_entries=1)
- cache['test'] = 'test'
- cache['test2'] = 'test'
- cache['test2'] = 'test'
- self.assertEquals(len(cache), 1)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_memory_store.py b/lib/shove/tests/test_memory_store.py
deleted file mode 100644
index 12e505dd..00000000
--- a/lib/shove/tests/test_memory_store.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestMemoryStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('memory://', compress=True)
-
- def tearDown(self):
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.store.sync()
- tstore.sync()
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_redis_cache.py b/lib/shove/tests/test_redis_cache.py
deleted file mode 100644
index c8e9b8db..00000000
--- a/lib/shove/tests/test_redis_cache.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestRedisCache(unittest.TestCase):
-
- initstring = 'redis://localhost:6379/0'
-
- def setUp(self):
- from shove.cache.redisdb import RedisCache
- self.cache = RedisCache(self.initstring)
-
- def tearDown(self):
- self.cache = None
-
- def test_getitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_setitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_delitem(self):
- self.cache['test'] = 'test'
- del self.cache['test']
- self.assertEqual('test' in self.cache, False)
-
- def test_get(self):
- self.assertEqual(self.cache.get('min'), None)
-
- def test_timeout(self):
- import time
- from shove.cache.redisdb import RedisCache
- cache = RedisCache(self.initstring, timeout=1)
- cache['test'] = 'test'
- time.sleep(3)
- def tmp(): #@IgnorePep8
- return cache['test']
- self.assertRaises(KeyError, tmp)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_redis_store.py b/lib/shove/tests/test_redis_store.py
deleted file mode 100644
index 06b1e0e9..00000000
--- a/lib/shove/tests/test_redis_store.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestRedisStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('redis://localhost:6379/0')
-
- def tearDown(self):
- self.store.clear()
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store.setdefault('pow', 8), 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_s3_store.py b/lib/shove/tests/test_s3_store.py
deleted file mode 100644
index 8a0f08d7..00000000
--- a/lib/shove/tests/test_s3_store.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestS3Store(unittest.TestCase):
-
- s3string = 's3 test string here'
-
- def setUp(self):
- from shove import Shove
- self.store = Shove(self.s3string, compress=True)
-
- def tearDown(self):
- self.store.clear()
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.store.sync()
- tstore.sync()
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store.sync()
- self.assertEqual(len(self.store), 2)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store.sync()
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- item = self.store.popitem()
- self.store.sync()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.store.sync()
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.sync()
- self.store.update(tstore)
- self.store.sync()
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_simple_cache.py b/lib/shove/tests/test_simple_cache.py
deleted file mode 100644
index 8cd1830c..00000000
--- a/lib/shove/tests/test_simple_cache.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestSimpleCache(unittest.TestCase):
-
- initstring = 'simple://'
-
- def setUp(self):
- from shove.cache.simple import SimpleCache
- self.cache = SimpleCache(self.initstring)
-
- def tearDown(self):
- self.cache = None
-
- def test_getitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_setitem(self):
- self.cache['test'] = 'test'
- self.assertEqual(self.cache['test'], 'test')
-
- def test_delitem(self):
- self.cache['test'] = 'test'
- del self.cache['test']
- self.assertEqual('test' in self.cache, False)
-
- def test_get(self):
- self.assertEqual(self.cache.get('min'), None)
-
- def test_timeout(self):
- import time
- from shove.cache.simple import SimpleCache
- cache = SimpleCache(self.initstring, timeout=1)
- cache['test'] = 'test'
- time.sleep(1)
-
- def tmp():
- cache['test']
- self.assertRaises(KeyError, tmp)
-
- def test_cull(self):
- from shove.cache.simple import SimpleCache
- cache = SimpleCache(self.initstring, max_entries=1)
- cache['test'] = 'test'
- cache['test2'] = 'test'
- cache['test2'] = 'test'
- self.assertEquals(len(cache), 1)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_simple_store.py b/lib/shove/tests/test_simple_store.py
deleted file mode 100644
index d2431ec5..00000000
--- a/lib/shove/tests/test_simple_store.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestSimpleStore(unittest.TestCase):
-
- def setUp(self):
- from shove import Shove
- self.store = Shove('simple://', compress=True)
-
- def tearDown(self):
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.store.sync()
- tstore.sync()
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_svn_store.py b/lib/shove/tests/test_svn_store.py
deleted file mode 100644
index b3103816..00000000
--- a/lib/shove/tests/test_svn_store.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestSvnStore(unittest.TestCase):
-
- svnstring = 'SVN test string here'
-
- def setUp(self):
- from shove import Shove
- self.store = Shove(self.svnstring, compress=True)
-
- def tearDown(self):
- self.store.clear()
- self.store.close()
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.store.sync()
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.store.sync()
- tstore.sync()
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store.sync()
- self.assertEqual(len(self.store), 2)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store.sync()
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- item = self.store.popitem()
- self.store.sync()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.store.sync()
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.sync()
- self.store.update(tstore)
- self.store.sync()
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.sync()
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/shove/tests/test_zodb_store.py b/lib/shove/tests/test_zodb_store.py
deleted file mode 100644
index 9d979fea..00000000
--- a/lib/shove/tests/test_zodb_store.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import unittest
-
-
-class TestZodbStore(unittest.TestCase):
-
- init = 'zodb://test.db'
-
- def setUp(self):
- from shove import Shove
- self.store = Shove(self.init, compress=True)
-
- def tearDown(self):
- self.store.close()
- import os
- os.remove('test.db')
- os.remove('test.db.index')
- os.remove('test.db.tmp')
- os.remove('test.db.lock')
-
- def test__getitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__setitem__(self):
- self.store['max'] = 3
- self.assertEqual(self.store['max'], 3)
-
- def test__delitem__(self):
- self.store['max'] = 3
- del self.store['max']
- self.assertEqual('max' in self.store, False)
-
- def test_get(self):
- self.store['max'] = 3
- self.assertEqual(self.store.get('min'), None)
-
- def test__cmp__(self):
- from shove import Shove
- tstore = Shove()
- self.store['max'] = 3
- tstore['max'] = 3
- self.assertEqual(self.store, tstore)
-
- def test__len__(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.assertEqual(len(self.store), 2)
-
- def test_close(self):
- self.store.close()
- self.assertEqual(self.store, None)
-
- def test_clear(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- self.store.clear()
- self.assertEqual(len(self.store), 0)
-
- def test_items(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.items())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iteritems(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iteritems())
- self.assertEqual(('min', 6) in slist, True)
-
- def test_iterkeys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.iterkeys())
- self.assertEqual('min' in slist, True)
-
- def test_itervalues(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = list(self.store.itervalues())
- self.assertEqual(6 in slist, True)
-
- def test_pop(self):
- self.store['max'] = 3
- self.store['min'] = 6
- item = self.store.pop('min')
- self.assertEqual(item, 6)
-
- def test_popitem(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- item = self.store.popitem()
- self.assertEqual(len(item) + len(self.store), 4)
-
- def test_setdefault(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['powl'] = 7
- self.store.setdefault('pow', 8)
- self.assertEqual(self.store['pow'], 8)
-
- def test_update(self):
- from shove import Shove
- tstore = Shove()
- tstore['max'] = 3
- tstore['min'] = 6
- tstore['pow'] = 7
- self.store['max'] = 2
- self.store['min'] = 3
- self.store['pow'] = 7
- self.store.update(tstore)
- self.assertEqual(self.store['min'], 6)
-
- def test_values(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.values()
- self.assertEqual(6 in slist, True)
-
- def test_keys(self):
- self.store['max'] = 3
- self.store['min'] = 6
- self.store['pow'] = 7
- slist = self.store.keys()
- self.assertEqual('min' in slist, True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib/simplejson/__init__.py b/lib/simplejson/__init__.py
index d5b4d399..b8d50978 100644
--- a/lib/simplejson/__init__.py
+++ b/lib/simplejson/__init__.py
@@ -14,15 +14,15 @@ Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
- >>> print json.dumps("\"foo\bar")
+ >>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
- >>> print json.dumps(u'\u1234')
+ >>> print(json.dumps(u'\u1234'))
"\u1234"
- >>> print json.dumps('\\')
+ >>> print(json.dumps('\\'))
"\\"
- >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+ >>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
- >>> from StringIO import StringIO
+ >>> from simplejson.compat import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
@@ -31,14 +31,14 @@ Encoding basic Python object hierarchies::
Compact encoding::
>>> import simplejson as json
- >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+ >>> obj = [1,2,3,{'4': 5, '6': 7}]
+ >>> json.dumps(obj, separators=(',',':'), sort_keys=True)
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
- >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
- >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
+ >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' '))
{
"4": 5,
"6": 7
@@ -52,7 +52,7 @@ Decoding JSON::
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
- >>> from StringIO import StringIO
+ >>> from simplejson.compat import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
@@ -68,8 +68,8 @@ Specializing JSON object decoding::
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
- >>> import decimal
- >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
+ >>> from decimal import Decimal
+ >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
@@ -95,18 +95,38 @@ Using simplejson.tool from the shell to validate and pretty-print::
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
- Expecting property name: line 1 column 2 (char 2)
+ Expecting property name: line 1 column 3 (char 2)
"""
-__version__ = '2.0.9'
+from __future__ import absolute_import
+__version__ = '3.7.3'
__all__ = [
'dump', 'dumps', 'load', 'loads',
- 'JSONDecoder', 'JSONEncoder',
+ 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
+ 'OrderedDict', 'simple_first',
]
__author__ = 'Bob Ippolito '
-from decoder import JSONDecoder
-from encoder import JSONEncoder
+from decimal import Decimal
+
+from .scanner import JSONDecodeError
+from .decoder import JSONDecoder
+from .encoder import JSONEncoder, JSONEncoderForHTML
+def _import_OrderedDict():
+ import collections
+ try:
+ return collections.OrderedDict
+ except AttributeError:
+ from . import ordered_dict
+ return ordered_dict.OrderedDict
+OrderedDict = _import_OrderedDict()
+
+def _import_c_make_encoder():
+ try:
+ from ._speedups import make_encoder
+ return make_encoder
+ except ImportError:
+ return None
_default_encoder = JSONEncoder(
skipkeys=False,
@@ -117,56 +137,117 @@ _default_encoder = JSONEncoder(
separators=None,
encoding='utf-8',
default=None,
+ use_decimal=True,
+ namedtuple_as_object=True,
+ tuple_as_array=True,
+ bigint_as_string=False,
+ item_sort_key=None,
+ for_json=False,
+ ignore_nan=False,
+ int_as_string_bitcount=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, **kw):
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, use_decimal=True,
+ namedtuple_as_object=True, tuple_as_array=True,
+ bigint_as_string=False, sort_keys=False, item_sort_key=None,
+ for_json=False, ignore_nan=False, int_as_string_bitcount=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
- If ``skipkeys`` is true then ``dict`` keys that are not basic types
+ If *skipkeys* is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
- If ``ensure_ascii`` is false, then the some chunks written to ``fp``
+ If *ensure_ascii* is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
- If ``check_circular`` is false, then the circular reference check
+ If *check_circular* is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
- If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ If *allow_nan* is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
- in strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+ in strict compliance of the original JSON specification, instead of using
+ the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See
+ *ignore_nan* for ECMA-262 compliant behavior.
- If ``indent`` is a non-negative integer, then JSON array elements and object
- members will be pretty-printed with that indent level. An indent level
- of 0 will only insert newlines. ``None`` is the most compact representation.
+ If *indent* is a string, then JSON array elements and object members
+ will be pretty-printed with a newline followed by that string repeated
+ for each level of nesting. ``None`` (the default) selects the most compact
+ representation without any newlines. For backwards compatibility with
+ versions of simplejson earlier than 2.1.0, an integer is also accepted
+ and is converted to a string with that many spaces.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, *separators* should be an
+ ``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
+ if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
+ compact JSON representation, you should specify ``(',', ':')`` to eliminate
+ whitespace.
- ``encoding`` is the character encoding for str instances, default is UTF-8.
+ *encoding* is the character encoding for str instances, default is UTF-8.
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
+ *default(obj)* is a function that should return a serializable version
+ of obj or raise ``TypeError``. The default simply raises ``TypeError``.
+
+ If *use_decimal* is true (default: ``True``) then decimal.Decimal
+ will be natively serialized to JSON with full precision.
+
+ If *namedtuple_as_object* is true (default: ``True``),
+ :class:`tuple` subclasses with ``_asdict()`` methods will be encoded
+ as JSON objects.
+
+ If *tuple_as_array* is true (default: ``True``),
+ :class:`tuple` (and subclasses) will be encoded as JSON arrays.
+
+ If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
+ or lower than -2**53 will be encoded as strings. This is to avoid the
+ rounding that happens in Javascript otherwise. Note that this is still a
+ lossy operation that will not round-trip correctly and should be used
+ sparingly.
+
+ If *int_as_string_bitcount* is a positive number (n), then int of size
+ greater than or equal to 2**n or lower than or equal to -2**n will be
+ encoded as strings.
+
+ If specified, *item_sort_key* is a callable used to sort the items in
+ each dictionary. This is useful if you want to sort items other than
+ in alphabetical order by key. This option takes precedence over
+ *sort_keys*.
+
+ If *sort_keys* is true (default: ``False``), the output of dictionaries
+ will be sorted by item.
+
+ If *for_json* is true (default: ``False``), objects with a ``for_json()``
+ method will use the return value of that method for encoding as JSON
+ instead of the object.
+
+ If *ignore_nan* is true (default: ``False``), then out of range
+ :class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
+ ``null`` in compliance with the ECMA-262 specification. If true, this will
+ override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
+ the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
+ of subclassing whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not kw):
+ encoding == 'utf-8' and default is None and use_decimal
+ and namedtuple_as_object and tuple_as_array
+ and not bigint_as_string and not sort_keys
+ and not item_sort_key and not for_json
+ and not ignore_nan and int_as_string_bitcount is None
+ and not kw
+ ):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
@@ -174,7 +255,16 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
- default=default, **kw).iterencode(obj)
+ default=default, use_decimal=use_decimal,
+ namedtuple_as_object=namedtuple_as_object,
+ tuple_as_array=tuple_as_array,
+ bigint_as_string=bigint_as_string,
+ sort_keys=sort_keys,
+ item_sort_key=item_sort_key,
+ for_json=for_json,
+ ignore_nan=ignore_nan,
+ int_as_string_bitcount=int_as_string_bitcount,
+ **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
@@ -182,8 +272,11 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, **kw):
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, use_decimal=True,
+ namedtuple_as_object=True, tuple_as_array=True,
+ bigint_as_string=False, sort_keys=False, item_sort_key=None,
+ for_json=False, ignore_nan=False, int_as_string_bitcount=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
@@ -203,30 +296,77 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
- If ``indent`` is a non-negative integer, then JSON array elements and
- object members will be pretty-printed with that indent level. An indent
- level of 0 will only insert newlines. ``None`` is the most compact
- representation.
+ If ``indent`` is a string, then JSON array elements and object members
+ will be pretty-printed with a newline followed by that string repeated
+ for each level of nesting. ``None`` (the default) selects the most compact
+ representation without any newlines. For backwards compatibility with
+ versions of simplejson earlier than 2.1.0, an integer is also accepted
+ and is converted to a string with that many spaces.
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
+ If specified, ``separators`` should be an
+ ``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
+ if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
+ compact JSON representation, you should specify ``(',', ':')`` to eliminate
+ whitespace.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
+ If *use_decimal* is true (default: ``True``) then decimal.Decimal
+ will be natively serialized to JSON with full precision.
+
+ If *namedtuple_as_object* is true (default: ``True``),
+ :class:`tuple` subclasses with ``_asdict()`` methods will be encoded
+ as JSON objects.
+
+ If *tuple_as_array* is true (default: ``True``),
+ :class:`tuple` (and subclasses) will be encoded as JSON arrays.
+
+ If *bigint_as_string* is true (not the default), ints 2**53 and higher
+ or lower than -2**53 will be encoded as strings. This is to avoid the
+ rounding that happens in Javascript otherwise.
+
+ If *int_as_string_bitcount* is a positive number (n), then int of size
+ greater than or equal to 2**n or lower than or equal to -2**n will be
+ encoded as strings.
+
+ If specified, *item_sort_key* is a callable used to sort the items in
+ each dictionary. This is useful if you want to sort items other than
+ in alphabetical order by key. This option takes precendence over
+ *sort_keys*.
+
+ If *sort_keys* is true (default: ``False``), the output of dictionaries
+ will be sorted by item.
+
+ If *for_json* is true (default: ``False``), objects with a ``for_json()``
+ method will use the return value of that method for encoding as JSON
+ instead of the object.
+
+ If *ignore_nan* is true (default: ``False``), then out of range
+ :class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
+ ``null`` in compliance with the ECMA-262 specification. If true, this will
+ override *allow_nan*.
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
+ the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
+ whenever possible.
"""
# cached encoder
- if (not skipkeys and ensure_ascii and
+ if (
+ not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not kw):
+ encoding == 'utf-8' and default is None and use_decimal
+ and namedtuple_as_object and tuple_as_array
+ and not bigint_as_string and not sort_keys
+ and not item_sort_key and not for_json
+ and not ignore_nan and int_as_string_bitcount is None
+ and not kw
+ ):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
@@ -234,85 +374,191 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
+ use_decimal=use_decimal,
+ namedtuple_as_object=namedtuple_as_object,
+ tuple_as_array=tuple_as_array,
+ bigint_as_string=bigint_as_string,
+ sort_keys=sort_keys,
+ item_sort_key=item_sort_key,
+ for_json=for_json,
+ ignore_nan=ignore_nan,
+ int_as_string_bitcount=int_as_string_bitcount,
**kw).encode(obj)
-_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+_default_decoder = JSONDecoder(encoding=None, object_hook=None,
+ object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, **kw):
+ parse_int=None, parse_constant=None, object_pairs_hook=None,
+ use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
+ **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
- If the contents of ``fp`` is encoded with an ASCII based encoding other
- than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
- be specified. Encodings that are not ASCII based (such as UCS-2) are
- not allowed, and should be wrapped with
- ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
- object and passed to ``loads()``
+ *encoding* determines the encoding used to interpret any
+ :class:`str` objects decoded by this instance (``'utf-8'`` by
+ default). It has no effect when decoding :class:`unicode` objects.
- ``object_hook`` is an optional function that will be called with the
- result of any object literal decode (a ``dict``). The return value of
- ``object_hook`` will be used instead of the ``dict``. This feature
- can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+ Note that currently only encodings that are a superset of ASCII work,
+ strings of other encodings should be passed in as :class:`unicode`.
+
+ *object_hook*, if specified, will be called with the result of every
+ JSON object decoded and its return value will be used in place of the
+ given :class:`dict`. This can be used to provide custom
+ deserializations (e.g. to support JSON-RPC class hinting).
+
+ *object_pairs_hook* is an optional function that will be called with
+ the result of any object literal decode with an ordered list of pairs.
+ The return value of *object_pairs_hook* will be used instead of the
+ :class:`dict`. This feature can be used to implement custom decoders
+ that rely on the order that the key and value pairs are decoded (for
+ example, :func:`collections.OrderedDict` will remember the order of
+ insertion). If *object_hook* is also defined, the *object_pairs_hook*
+ takes priority.
+
+ *parse_float*, if specified, will be called with the string of every
+ JSON float to be decoded. By default, this is equivalent to
+ ``float(num_str)``. This can be used to use another datatype or parser
+ for JSON floats (e.g. :class:`decimal.Decimal`).
+
+ *parse_int*, if specified, will be called with the string of every
+ JSON int to be decoded. By default, this is equivalent to
+ ``int(num_str)``. This can be used to use another datatype or parser
+ for JSON integers (e.g. :class:`float`).
+
+ *parse_constant*, if specified, will be called with one of the
+ following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
+ can be used to raise an exception if invalid JSON numbers are
+ encountered.
+
+ If *use_decimal* is true (default: ``False``) then it implies
+ parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
+ kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
+ of subclassing whenever possible.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
- parse_constant=parse_constant, **kw)
+ parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
+ use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, **kw):
+ parse_int=None, parse_constant=None, object_pairs_hook=None,
+ use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
- If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
- other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
- must be specified. Encodings that are not ASCII based (such as UCS-2)
- are not allowed and should be decoded to ``unicode`` first.
+ *encoding* determines the encoding used to interpret any
+ :class:`str` objects decoded by this instance (``'utf-8'`` by
+ default). It has no effect when decoding :class:`unicode` objects.
- ``object_hook`` is an optional function that will be called with the
- result of any object literal decode (a ``dict``). The return value of
- ``object_hook`` will be used instead of the ``dict``. This feature
- can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+ Note that currently only encodings that are a superset of ASCII work,
+ strings of other encodings should be passed in as :class:`unicode`.
- ``parse_float``, if specified, will be called with the string
- of every JSON float to be decoded. By default this is equivalent to
- float(num_str). This can be used to use another datatype or parser
- for JSON floats (e.g. decimal.Decimal).
+ *object_hook*, if specified, will be called with the result of every
+ JSON object decoded and its return value will be used in place of the
+ given :class:`dict`. This can be used to provide custom
+ deserializations (e.g. to support JSON-RPC class hinting).
- ``parse_int``, if specified, will be called with the string
- of every JSON int to be decoded. By default this is equivalent to
- int(num_str). This can be used to use another datatype or parser
- for JSON integers (e.g. float).
+ *object_pairs_hook* is an optional function that will be called with
+ the result of any object literal decode with an ordered list of pairs.
+ The return value of *object_pairs_hook* will be used instead of the
+ :class:`dict`. This feature can be used to implement custom decoders
+ that rely on the order that the key and value pairs are decoded (for
+ example, :func:`collections.OrderedDict` will remember the order of
+ insertion). If *object_hook* is also defined, the *object_pairs_hook*
+ takes priority.
- ``parse_constant``, if specified, will be called with one of the
- following strings: -Infinity, Infinity, NaN, null, true, false.
- This can be used to raise an exception if invalid JSON numbers
- are encountered.
+ *parse_float*, if specified, will be called with the string of every
+ JSON float to be decoded. By default, this is equivalent to
+ ``float(num_str)``. This can be used to use another datatype or parser
+ for JSON floats (e.g. :class:`decimal.Decimal`).
+
+ *parse_int*, if specified, will be called with the string of every
+ JSON int to be decoded. By default, this is equivalent to
+ ``int(num_str)``. This can be used to use another datatype or parser
+ for JSON integers (e.g. :class:`float`).
+
+ *parse_constant*, if specified, will be called with one of the
+ following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
+ can be used to raise an exception if invalid JSON numbers are
+ encountered.
+
+ If *use_decimal* is true (default: ``False``) then it implies
+ parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
+ kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
+ of subclassing whenever possible.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
- parse_constant is None and not kw):
+ parse_constant is None and object_pairs_hook is None
+ and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
+ if object_pairs_hook is not None:
+ kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
+ if use_decimal:
+ if parse_float is not None:
+ raise TypeError("use_decimal=True implies parse_float=Decimal")
+ kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
+
+
+def _toggle_speedups(enabled):
+ from . import decoder as dec
+ from . import encoder as enc
+ from . import scanner as scan
+ c_make_encoder = _import_c_make_encoder()
+ if enabled:
+ dec.scanstring = dec.c_scanstring or dec.py_scanstring
+ enc.c_make_encoder = c_make_encoder
+ enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
+ enc.py_encode_basestring_ascii)
+ scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
+ else:
+ dec.scanstring = dec.py_scanstring
+ enc.c_make_encoder = None
+ enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
+ scan.make_scanner = scan.py_make_scanner
+ dec.make_scanner = scan.make_scanner
+ global _default_decoder
+ _default_decoder = JSONDecoder(
+ encoding=None,
+ object_hook=None,
+ object_pairs_hook=None,
+ )
+ global _default_encoder
+ _default_encoder = JSONEncoder(
+ skipkeys=False,
+ ensure_ascii=True,
+ check_circular=True,
+ allow_nan=True,
+ indent=None,
+ separators=None,
+ encoding='utf-8',
+ default=None,
+ )
+
+def simple_first(kv):
+ """Helper function to pass to item_sort_key to sort simple
+ elements to the top, then container elements.
+ """
+ return (isinstance(kv[1], (list, dict, tuple)), kv[0])
diff --git a/lib/simplejson/_speedups.c b/lib/simplejson/_speedups.c
index 23b5f4a6..bc1648ae 100644
--- a/lib/simplejson/_speedups.c
+++ b/lib/simplejson/_speedups.c
@@ -1,18 +1,85 @@
+/* -*- mode: C; c-file-style: "python"; c-basic-offset: 4 -*- */
#include "Python.h"
#include "structmember.h"
-#if PY_VERSION_HEX < 0x02060000 && !defined(Py_TYPE)
+
+#if PY_MAJOR_VERSION >= 3
+#define PyInt_FromSsize_t PyLong_FromSsize_t
+#define PyInt_AsSsize_t PyLong_AsSsize_t
+#define PyString_Check PyBytes_Check
+#define PyString_GET_SIZE PyBytes_GET_SIZE
+#define PyString_AS_STRING PyBytes_AS_STRING
+#define PyString_FromStringAndSize PyBytes_FromStringAndSize
+#define PyInt_Check(obj) 0
+#define PyInt_CheckExact(obj) 0
+#define JSON_UNICHR Py_UCS4
+#define JSON_InternFromString PyUnicode_InternFromString
+#define JSON_Intern_GET_SIZE PyUnicode_GET_SIZE
+#define JSON_ASCII_Check PyUnicode_Check
+#define JSON_ASCII_AS_STRING PyUnicode_AsUTF8
+#define PyInt_Type PyLong_Type
+#define PyInt_FromString PyLong_FromString
+#define PY2_UNUSED
+#define PY3_UNUSED UNUSED
+#define JSON_NewEmptyUnicode() PyUnicode_New(0, 127)
+#else /* PY_MAJOR_VERSION >= 3 */
+#define PY2_UNUSED UNUSED
+#define PY3_UNUSED
+#define PyUnicode_READY(obj) 0
+#define PyUnicode_KIND(obj) (sizeof(Py_UNICODE))
+#define PyUnicode_DATA(obj) ((void *)(PyUnicode_AS_UNICODE(obj)))
+#define PyUnicode_READ(kind, data, index) ((JSON_UNICHR)((const Py_UNICODE *)(data))[(index)])
+#define PyUnicode_GetLength PyUnicode_GET_SIZE
+#define JSON_UNICHR Py_UNICODE
+#define JSON_ASCII_Check PyString_Check
+#define JSON_ASCII_AS_STRING PyString_AS_STRING
+#define JSON_InternFromString PyString_InternFromString
+#define JSON_Intern_GET_SIZE PyString_GET_SIZE
+#define JSON_NewEmptyUnicode() PyUnicode_FromUnicode(NULL, 0)
+#endif /* PY_MAJOR_VERSION < 3 */
+
+#if PY_VERSION_HEX < 0x02070000
+#if !defined(PyOS_string_to_double)
+#define PyOS_string_to_double json_PyOS_string_to_double
+static double
+json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception);
+static double
+json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception)
+{
+ double x;
+ assert(endptr == NULL);
+ assert(overflow_exception == NULL);
+ PyFPE_START_PROTECT("json_PyOS_string_to_double", return -1.0;)
+ x = PyOS_ascii_atof(s);
+ PyFPE_END_PROTECT(x)
+ return x;
+}
+#endif
+#endif /* PY_VERSION_HEX < 0x02070000 */
+
+#if PY_VERSION_HEX < 0x02060000
+#if !defined(Py_TYPE)
#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
#endif
-#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
+#if !defined(Py_SIZE)
+#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
+#endif
+#if !defined(PyVarObject_HEAD_INIT)
+#define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size,
+#endif
+#endif /* PY_VERSION_HEX < 0x02060000 */
+
+#if PY_VERSION_HEX < 0x02050000
+#if !defined(PY_SSIZE_T_MIN)
typedef int Py_ssize_t;
#define PY_SSIZE_T_MAX INT_MAX
#define PY_SSIZE_T_MIN INT_MIN
#define PyInt_FromSsize_t PyInt_FromLong
#define PyInt_AsSsize_t PyInt_AsLong
#endif
-#ifndef Py_IS_FINITE
+#if !defined(Py_IS_FINITE)
#define Py_IS_FINITE(X) (!Py_IS_INFINITY(X) && !Py_IS_NAN(X))
#endif
+#endif /* PY_VERSION_HEX < 0x02050000 */
#ifdef __GNUC__
#define UNUSED __attribute__((__unused__))
@@ -27,23 +94,55 @@ typedef int Py_ssize_t;
#define PyEncoder_Check(op) PyObject_TypeCheck(op, &PyEncoderType)
#define PyEncoder_CheckExact(op) (Py_TYPE(op) == &PyEncoderType)
+#define JSON_ALLOW_NAN 1
+#define JSON_IGNORE_NAN 2
+
static PyTypeObject PyScannerType;
static PyTypeObject PyEncoderType;
+typedef struct {
+ PyObject *large_strings; /* A list of previously accumulated large strings */
+ PyObject *small_strings; /* Pending small strings */
+} JSON_Accu;
+
+static int
+JSON_Accu_Init(JSON_Accu *acc);
+static int
+JSON_Accu_Accumulate(JSON_Accu *acc, PyObject *unicode);
+static PyObject *
+JSON_Accu_FinishAsList(JSON_Accu *acc);
+static void
+JSON_Accu_Destroy(JSON_Accu *acc);
+
+#define ERR_EXPECTING_VALUE "Expecting value"
+#define ERR_ARRAY_DELIMITER "Expecting ',' delimiter or ']'"
+#define ERR_ARRAY_VALUE_FIRST "Expecting value or ']'"
+#define ERR_OBJECT_DELIMITER "Expecting ',' delimiter or '}'"
+#define ERR_OBJECT_PROPERTY "Expecting property name enclosed in double quotes"
+#define ERR_OBJECT_PROPERTY_FIRST "Expecting property name enclosed in double quotes or '}'"
+#define ERR_OBJECT_PROPERTY_DELIMITER "Expecting ':' delimiter"
+#define ERR_STRING_UNTERMINATED "Unterminated string starting at"
+#define ERR_STRING_CONTROL "Invalid control character %r at"
+#define ERR_STRING_ESC1 "Invalid \\X escape sequence %r"
+#define ERR_STRING_ESC4 "Invalid \\uXXXX escape sequence"
+
typedef struct _PyScannerObject {
PyObject_HEAD
PyObject *encoding;
PyObject *strict;
PyObject *object_hook;
+ PyObject *pairs_hook;
PyObject *parse_float;
PyObject *parse_int;
PyObject *parse_constant;
+ PyObject *memo;
} PyScannerObject;
static PyMemberDef scanner_members[] = {
{"encoding", T_OBJECT, offsetof(PyScannerObject, encoding), READONLY, "encoding"},
{"strict", T_OBJECT, offsetof(PyScannerObject, strict), READONLY, "strict"},
{"object_hook", T_OBJECT, offsetof(PyScannerObject, object_hook), READONLY, "object_hook"},
+ {"object_pairs_hook", T_OBJECT, offsetof(PyScannerObject, pairs_hook), READONLY, "object_pairs_hook"},
{"parse_float", T_OBJECT, offsetof(PyScannerObject, parse_float), READONLY, "parse_float"},
{"parse_int", T_OBJECT, offsetof(PyScannerObject, parse_int), READONLY, "parse_int"},
{"parse_constant", T_OBJECT, offsetof(PyScannerObject, parse_constant), READONLY, "parse_constant"},
@@ -59,35 +158,73 @@ typedef struct _PyEncoderObject {
PyObject *key_separator;
PyObject *item_separator;
PyObject *sort_keys;
- PyObject *skipkeys;
+ PyObject *key_memo;
+ PyObject *encoding;
+ PyObject *Decimal;
+ PyObject *skipkeys_bool;
+ int skipkeys;
int fast_encode;
- int allow_nan;
+ /* 0, JSON_ALLOW_NAN, JSON_IGNORE_NAN */
+ int allow_or_ignore_nan;
+ int use_decimal;
+ int namedtuple_as_object;
+ int tuple_as_array;
+ PyObject *max_long_size;
+ PyObject *min_long_size;
+ PyObject *item_sort_key;
+ PyObject *item_sort_kw;
+ int for_json;
} PyEncoderObject;
static PyMemberDef encoder_members[] = {
{"markers", T_OBJECT, offsetof(PyEncoderObject, markers), READONLY, "markers"},
{"default", T_OBJECT, offsetof(PyEncoderObject, defaultfn), READONLY, "default"},
{"encoder", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoder"},
+ {"encoding", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoding"},
{"indent", T_OBJECT, offsetof(PyEncoderObject, indent), READONLY, "indent"},
{"key_separator", T_OBJECT, offsetof(PyEncoderObject, key_separator), READONLY, "key_separator"},
{"item_separator", T_OBJECT, offsetof(PyEncoderObject, item_separator), READONLY, "item_separator"},
{"sort_keys", T_OBJECT, offsetof(PyEncoderObject, sort_keys), READONLY, "sort_keys"},
- {"skipkeys", T_OBJECT, offsetof(PyEncoderObject, skipkeys), READONLY, "skipkeys"},
+ /* Python 2.5 does not support T_BOOl */
+ {"skipkeys", T_OBJECT, offsetof(PyEncoderObject, skipkeys_bool), READONLY, "skipkeys"},
+ {"key_memo", T_OBJECT, offsetof(PyEncoderObject, key_memo), READONLY, "key_memo"},
+ {"item_sort_key", T_OBJECT, offsetof(PyEncoderObject, item_sort_key), READONLY, "item_sort_key"},
+ {"max_long_size", T_OBJECT, offsetof(PyEncoderObject, max_long_size), READONLY, "max_long_size"},
+ {"min_long_size", T_OBJECT, offsetof(PyEncoderObject, min_long_size), READONLY, "min_long_size"},
{NULL}
};
+static PyObject *
+join_list_unicode(PyObject *lst);
+static PyObject *
+JSON_ParseEncoding(PyObject *encoding);
+static PyObject *
+JSON_UnicodeFromChar(JSON_UNICHR c);
+static PyObject *
+maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj);
static Py_ssize_t
-ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars);
+ascii_char_size(JSON_UNICHR c);
+static Py_ssize_t
+ascii_escape_char(JSON_UNICHR c, char *output, Py_ssize_t chars);
static PyObject *
ascii_escape_unicode(PyObject *pystr);
static PyObject *
ascii_escape_str(PyObject *pystr);
static PyObject *
py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr);
-void init_speedups(void);
+#if PY_MAJOR_VERSION < 3
+static PyObject *
+join_list_string(PyObject *lst);
static PyObject *
scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr);
static PyObject *
+scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr);
+static PyObject *
+_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr);
+#endif
+static PyObject *
+scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr);
+static PyObject *
scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr);
static PyObject *
_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx);
@@ -107,14 +244,16 @@ static void
encoder_dealloc(PyObject *self);
static int
encoder_clear(PyObject *self);
-static int
-encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ssize_t indent_level);
-static int
-encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level);
-static int
-encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level);
static PyObject *
-_encoded_const(PyObject *const);
+encoder_stringify_key(PyEncoderObject *s, PyObject *key);
+static int
+encoder_listencode_list(PyEncoderObject *s, JSON_Accu *rval, PyObject *seq, Py_ssize_t indent_level);
+static int
+encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ssize_t indent_level);
+static int
+encoder_listencode_dict(PyEncoderObject *s, JSON_Accu *rval, PyObject *dct, Py_ssize_t indent_level);
+static PyObject *
+_encoded_const(PyObject *obj);
static void
raise_errmsg(char *msg, PyObject *s, Py_ssize_t end);
static PyObject *
@@ -125,25 +264,185 @@ static PyObject *
_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr);
static PyObject *
encoder_encode_float(PyEncoderObject *s, PyObject *obj);
+static int
+_is_namedtuple(PyObject *obj);
+static int
+_has_for_json_hook(PyObject *obj);
+static PyObject *
+moduleinit(void);
#define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '"')
#define IS_WHITESPACE(c) (((c) == ' ') || ((c) == '\t') || ((c) == '\n') || ((c) == '\r'))
#define MIN_EXPANSION 6
-#ifdef Py_UNICODE_WIDE
-#define MAX_EXPANSION (2 * MIN_EXPANSION)
+
+static int
+JSON_Accu_Init(JSON_Accu *acc)
+{
+ /* Lazily allocated */
+ acc->large_strings = NULL;
+ acc->small_strings = PyList_New(0);
+ if (acc->small_strings == NULL)
+ return -1;
+ return 0;
+}
+
+static int
+flush_accumulator(JSON_Accu *acc)
+{
+ Py_ssize_t nsmall = PyList_GET_SIZE(acc->small_strings);
+ if (nsmall) {
+ int ret;
+ PyObject *joined;
+ if (acc->large_strings == NULL) {
+ acc->large_strings = PyList_New(0);
+ if (acc->large_strings == NULL)
+ return -1;
+ }
+#if PY_MAJOR_VERSION >= 3
+ joined = join_list_unicode(acc->small_strings);
+#else /* PY_MAJOR_VERSION >= 3 */
+ joined = join_list_string(acc->small_strings);
+#endif /* PY_MAJOR_VERSION < 3 */
+ if (joined == NULL)
+ return -1;
+ if (PyList_SetSlice(acc->small_strings, 0, nsmall, NULL)) {
+ Py_DECREF(joined);
+ return -1;
+ }
+ ret = PyList_Append(acc->large_strings, joined);
+ Py_DECREF(joined);
+ return ret;
+ }
+ return 0;
+}
+
+static int
+JSON_Accu_Accumulate(JSON_Accu *acc, PyObject *unicode)
+{
+ Py_ssize_t nsmall;
+#if PY_MAJOR_VERSION >= 3
+ assert(PyUnicode_Check(unicode));
+#else /* PY_MAJOR_VERSION >= 3 */
+ assert(JSON_ASCII_Check(unicode) || PyUnicode_Check(unicode));
+#endif /* PY_MAJOR_VERSION < 3 */
+
+ if (PyList_Append(acc->small_strings, unicode))
+ return -1;
+ nsmall = PyList_GET_SIZE(acc->small_strings);
+ /* Each item in a list of unicode objects has an overhead (in 64-bit
+ * builds) of:
+ * - 8 bytes for the list slot
+ * - 56 bytes for the header of the unicode object
+ * that is, 64 bytes. 100000 such objects waste more than 6MB
+ * compared to a single concatenated string.
+ */
+ if (nsmall < 100000)
+ return 0;
+ return flush_accumulator(acc);
+}
+
+static PyObject *
+JSON_Accu_FinishAsList(JSON_Accu *acc)
+{
+ int ret;
+ PyObject *res;
+
+ ret = flush_accumulator(acc);
+ Py_CLEAR(acc->small_strings);
+ if (ret) {
+ Py_CLEAR(acc->large_strings);
+ return NULL;
+ }
+ res = acc->large_strings;
+ acc->large_strings = NULL;
+ if (res == NULL)
+ return PyList_New(0);
+ return res;
+}
+
+static void
+JSON_Accu_Destroy(JSON_Accu *acc)
+{
+ Py_CLEAR(acc->small_strings);
+ Py_CLEAR(acc->large_strings);
+}
+
+static int
+IS_DIGIT(JSON_UNICHR c)
+{
+ return c >= '0' && c <= '9';
+}
+
+static PyObject *
+JSON_UnicodeFromChar(JSON_UNICHR c)
+{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *rval = PyUnicode_New(1, c);
+ if (rval)
+ PyUnicode_WRITE(PyUnicode_KIND(rval), PyUnicode_DATA(rval), 0, c);
+ return rval;
+#else /* PY_MAJOR_VERSION >= 3 */
+ return PyUnicode_FromUnicode(&c, 1);
+#endif /* PY_MAJOR_VERSION < 3 */
+}
+
+static PyObject *
+maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj)
+{
+ if (s->max_long_size != Py_None && s->min_long_size != Py_None) {
+ if (PyObject_RichCompareBool(obj, s->max_long_size, Py_GE) ||
+ PyObject_RichCompareBool(obj, s->min_long_size, Py_LE)) {
+#if PY_MAJOR_VERSION >= 3
+ PyObject* quoted = PyUnicode_FromFormat("\"%U\"", encoded);
#else
-#define MAX_EXPANSION MIN_EXPANSION
+ PyObject* quoted = PyString_FromFormat("\"%s\"",
+ PyString_AsString(encoded));
#endif
+ Py_DECREF(encoded);
+ encoded = quoted;
+ }
+ }
+
+ return encoded;
+}
+
+static int
+_is_namedtuple(PyObject *obj)
+{
+ int rval = 0;
+ PyObject *_asdict = PyObject_GetAttrString(obj, "_asdict");
+ if (_asdict == NULL) {
+ PyErr_Clear();
+ return 0;
+ }
+ rval = PyCallable_Check(_asdict);
+ Py_DECREF(_asdict);
+ return rval;
+}
+
+static int
+_has_for_json_hook(PyObject *obj)
+{
+ int rval = 0;
+ PyObject *for_json = PyObject_GetAttrString(obj, "for_json");
+ if (for_json == NULL) {
+ PyErr_Clear();
+ return 0;
+ }
+ rval = PyCallable_Check(for_json);
+ Py_DECREF(for_json);
+ return rval;
+}
static int
_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr)
{
/* PyObject to Py_ssize_t converter */
*size_ptr = PyInt_AsSsize_t(o);
- if (*size_ptr == -1 && PyErr_Occurred());
- return 1;
- return 0;
+ if (*size_ptr == -1 && PyErr_Occurred())
+ return 0;
+ return 1;
}
static PyObject *
@@ -154,44 +453,74 @@ _convertPyInt_FromSsize_t(Py_ssize_t *size_ptr)
}
static Py_ssize_t
-ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars)
+ascii_escape_char(JSON_UNICHR c, char *output, Py_ssize_t chars)
{
/* Escape unicode code point c to ASCII escape sequences
in char *output. output must have at least 12 bytes unused to
accommodate an escaped surrogate pair "\uXXXX\uXXXX" */
- output[chars++] = '\\';
- switch (c) {
- case '\\': output[chars++] = (char)c; break;
- case '"': output[chars++] = (char)c; break;
- case '\b': output[chars++] = 'b'; break;
- case '\f': output[chars++] = 'f'; break;
- case '\n': output[chars++] = 'n'; break;
- case '\r': output[chars++] = 'r'; break;
- case '\t': output[chars++] = 't'; break;
- default:
-#ifdef Py_UNICODE_WIDE
- if (c >= 0x10000) {
- /* UTF-16 surrogate pair */
- Py_UNICODE v = c - 0x10000;
- c = 0xd800 | ((v >> 10) & 0x3ff);
+ if (S_CHAR(c)) {
+ output[chars++] = (char)c;
+ }
+ else {
+ output[chars++] = '\\';
+ switch (c) {
+ case '\\': output[chars++] = (char)c; break;
+ case '"': output[chars++] = (char)c; break;
+ case '\b': output[chars++] = 'b'; break;
+ case '\f': output[chars++] = 'f'; break;
+ case '\n': output[chars++] = 'n'; break;
+ case '\r': output[chars++] = 'r'; break;
+ case '\t': output[chars++] = 't'; break;
+ default:
+#if defined(Py_UNICODE_WIDE) || PY_MAJOR_VERSION >= 3
+ if (c >= 0x10000) {
+ /* UTF-16 surrogate pair */
+ JSON_UNICHR v = c - 0x10000;
+ c = 0xd800 | ((v >> 10) & 0x3ff);
+ output[chars++] = 'u';
+ output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf];
+ output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf];
+ output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf];
+ output[chars++] = "0123456789abcdef"[(c ) & 0xf];
+ c = 0xdc00 | (v & 0x3ff);
+ output[chars++] = '\\';
+ }
+#endif
output[chars++] = 'u';
output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf];
output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf];
output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf];
output[chars++] = "0123456789abcdef"[(c ) & 0xf];
- c = 0xdc00 | (v & 0x3ff);
- output[chars++] = '\\';
- }
-#endif
- output[chars++] = 'u';
- output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf];
- output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf];
- output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf];
- output[chars++] = "0123456789abcdef"[(c ) & 0xf];
+ }
}
return chars;
}
+static Py_ssize_t
+ascii_char_size(JSON_UNICHR c)
+{
+ if (S_CHAR(c)) {
+ return 1;
+ }
+ else if (c == '\\' ||
+ c == '"' ||
+ c == '\b' ||
+ c == '\f' ||
+ c == '\n' ||
+ c == '\r' ||
+ c == '\t') {
+ return 2;
+ }
+#if defined(Py_UNICODE_WIDE) || PY_MAJOR_VERSION >= 3
+ else if (c >= 0x10000U) {
+ return 2 * MIN_EXPANSION;
+ }
+#endif
+ else {
+ return MIN_EXPANSION;
+ }
+}
+
static PyObject *
ascii_escape_unicode(PyObject *pystr)
{
@@ -199,57 +528,62 @@ ascii_escape_unicode(PyObject *pystr)
Py_ssize_t i;
Py_ssize_t input_chars;
Py_ssize_t output_size;
- Py_ssize_t max_output_size;
Py_ssize_t chars;
+ PY2_UNUSED int kind;
+ void *data;
PyObject *rval;
char *output;
- Py_UNICODE *input_unicode;
- input_chars = PyUnicode_GET_SIZE(pystr);
- input_unicode = PyUnicode_AS_UNICODE(pystr);
+ if (PyUnicode_READY(pystr))
+ return NULL;
- /* One char input can be up to 6 chars output, estimate 4 of these */
- output_size = 2 + (MIN_EXPANSION * 4) + input_chars;
- max_output_size = 2 + (input_chars * MAX_EXPANSION);
+ kind = PyUnicode_KIND(pystr);
+ data = PyUnicode_DATA(pystr);
+ input_chars = PyUnicode_GetLength(pystr);
+ output_size = 2;
+ for (i = 0; i < input_chars; i++) {
+ output_size += ascii_char_size(PyUnicode_READ(kind, data, i));
+ }
+#if PY_MAJOR_VERSION >= 3
+ rval = PyUnicode_New(output_size, 127);
+ if (rval == NULL) {
+ return NULL;
+ }
+ assert(PyUnicode_KIND(rval) == PyUnicode_1BYTE_KIND);
+ output = (char *)PyUnicode_DATA(rval);
+#else
rval = PyString_FromStringAndSize(NULL, output_size);
if (rval == NULL) {
return NULL;
}
output = PyString_AS_STRING(rval);
+#endif
chars = 0;
output[chars++] = '"';
for (i = 0; i < input_chars; i++) {
- Py_UNICODE c = input_unicode[i];
- if (S_CHAR(c)) {
- output[chars++] = (char)c;
- }
- else {
- chars = ascii_escape_char(c, output, chars);
- }
- if (output_size - chars < (1 + MAX_EXPANSION)) {
- /* There's more than four, so let's resize by a lot */
- Py_ssize_t new_output_size = output_size * 2;
- /* This is an upper bound */
- if (new_output_size > max_output_size) {
- new_output_size = max_output_size;
- }
- /* Make sure that the output size changed before resizing */
- if (new_output_size != output_size) {
- output_size = new_output_size;
- if (_PyString_Resize(&rval, output_size) == -1) {
- return NULL;
- }
- output = PyString_AS_STRING(rval);
- }
- }
+ chars = ascii_escape_char(PyUnicode_READ(kind, data, i), output, chars);
}
output[chars++] = '"';
- if (_PyString_Resize(&rval, chars) == -1) {
- return NULL;
- }
+ assert(chars == output_size);
return rval;
}
+#if PY_MAJOR_VERSION >= 3
+
+static PyObject *
+ascii_escape_str(PyObject *pystr)
+{
+ PyObject *rval;
+ PyObject *input = PyUnicode_DecodeUTF8(PyString_AS_STRING(pystr), PyString_GET_SIZE(pystr), NULL);
+ if (input == NULL)
+ return NULL;
+ rval = ascii_escape_unicode(input);
+ Py_DECREF(input);
+ return rval;
+}
+
+#else /* PY_MAJOR_VERSION >= 3 */
+
static PyObject *
ascii_escape_str(PyObject *pystr)
{
@@ -264,98 +598,208 @@ ascii_escape_str(PyObject *pystr)
input_chars = PyString_GET_SIZE(pystr);
input_str = PyString_AS_STRING(pystr);
+ output_size = 2;
/* Fast path for a string that's already ASCII */
for (i = 0; i < input_chars; i++) {
- Py_UNICODE c = (Py_UNICODE)(unsigned char)input_str[i];
- if (!S_CHAR(c)) {
- /* If we have to escape something, scan the string for unicode */
- Py_ssize_t j;
- for (j = i; j < input_chars; j++) {
- c = (Py_UNICODE)(unsigned char)input_str[j];
- if (c > 0x7f) {
- /* We hit a non-ASCII character, bail to unicode mode */
- PyObject *uni;
- uni = PyUnicode_DecodeUTF8(input_str, input_chars, "strict");
- if (uni == NULL) {
- return NULL;
- }
- rval = ascii_escape_unicode(uni);
- Py_DECREF(uni);
- return rval;
- }
+ JSON_UNICHR c = (JSON_UNICHR)input_str[i];
+ if (c > 0x7f) {
+ /* We hit a non-ASCII character, bail to unicode mode */
+ PyObject *uni;
+ uni = PyUnicode_DecodeUTF8(input_str, input_chars, "strict");
+ if (uni == NULL) {
+ return NULL;
}
- break;
+ rval = ascii_escape_unicode(uni);
+ Py_DECREF(uni);
+ return rval;
}
+ output_size += ascii_char_size(c);
}
- if (i == input_chars) {
- /* Input is already ASCII */
- output_size = 2 + input_chars;
- }
- else {
- /* One char input can be up to 6 chars output, estimate 4 of these */
- output_size = 2 + (MIN_EXPANSION * 4) + input_chars;
- }
rval = PyString_FromStringAndSize(NULL, output_size);
if (rval == NULL) {
return NULL;
}
+ chars = 0;
output = PyString_AS_STRING(rval);
- output[0] = '"';
-
- /* We know that everything up to i is ASCII already */
- chars = i + 1;
- memcpy(&output[1], input_str, i);
-
- for (; i < input_chars; i++) {
- Py_UNICODE c = (Py_UNICODE)(unsigned char)input_str[i];
- if (S_CHAR(c)) {
- output[chars++] = (char)c;
- }
- else {
- chars = ascii_escape_char(c, output, chars);
- }
- /* An ASCII char can't possibly expand to a surrogate! */
- if (output_size - chars < (1 + MIN_EXPANSION)) {
- /* There's more than four, so let's resize by a lot */
- output_size *= 2;
- if (output_size > 2 + (input_chars * MIN_EXPANSION)) {
- output_size = 2 + (input_chars * MIN_EXPANSION);
- }
- if (_PyString_Resize(&rval, output_size) == -1) {
- return NULL;
- }
- output = PyString_AS_STRING(rval);
- }
+ output[chars++] = '"';
+ for (i = 0; i < input_chars; i++) {
+ chars = ascii_escape_char((JSON_UNICHR)input_str[i], output, chars);
}
output[chars++] = '"';
- if (_PyString_Resize(&rval, chars) == -1) {
- return NULL;
- }
+ assert(chars == output_size);
return rval;
}
+#endif /* PY_MAJOR_VERSION < 3 */
+
+static PyObject *
+encoder_stringify_key(PyEncoderObject *s, PyObject *key)
+{
+ if (PyUnicode_Check(key)) {
+ Py_INCREF(key);
+ return key;
+ }
+ else if (PyString_Check(key)) {
+#if PY_MAJOR_VERSION >= 3
+ return PyUnicode_Decode(
+ PyString_AS_STRING(key),
+ PyString_GET_SIZE(key),
+ JSON_ASCII_AS_STRING(s->encoding),
+ NULL);
+#else /* PY_MAJOR_VERSION >= 3 */
+ Py_INCREF(key);
+ return key;
+#endif /* PY_MAJOR_VERSION < 3 */
+ }
+ else if (PyFloat_Check(key)) {
+ return encoder_encode_float(s, key);
+ }
+ else if (key == Py_True || key == Py_False || key == Py_None) {
+ /* This must come before the PyInt_Check because
+ True and False are also 1 and 0.*/
+ return _encoded_const(key);
+ }
+ else if (PyInt_Check(key) || PyLong_Check(key)) {
+ if (!(PyInt_CheckExact(key) || PyLong_CheckExact(key))) {
+ /* See #118, do not trust custom str/repr */
+ PyObject *res;
+ PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyLong_Type, key, NULL);
+ if (tmp == NULL) {
+ return NULL;
+ }
+ res = PyObject_Str(tmp);
+ Py_DECREF(tmp);
+ return res;
+ }
+ else {
+ return PyObject_Str(key);
+ }
+ }
+ else if (s->use_decimal && PyObject_TypeCheck(key, (PyTypeObject *)s->Decimal)) {
+ return PyObject_Str(key);
+ }
+ else if (s->skipkeys) {
+ Py_INCREF(Py_None);
+ return Py_None;
+ }
+ PyErr_SetString(PyExc_TypeError, "keys must be a string");
+ return NULL;
+}
+
+static PyObject *
+encoder_dict_iteritems(PyEncoderObject *s, PyObject *dct)
+{
+ PyObject *items;
+ PyObject *iter = NULL;
+ PyObject *lst = NULL;
+ PyObject *item = NULL;
+ PyObject *kstr = NULL;
+ static PyObject *sortfun = NULL;
+ static PyObject *sortargs = NULL;
+
+ if (sortargs == NULL) {
+ sortargs = PyTuple_New(0);
+ if (sortargs == NULL)
+ return NULL;
+ }
+
+ if (PyDict_CheckExact(dct))
+ items = PyDict_Items(dct);
+ else
+ items = PyMapping_Items(dct);
+ if (items == NULL)
+ return NULL;
+ iter = PyObject_GetIter(items);
+ Py_DECREF(items);
+ if (iter == NULL)
+ return NULL;
+ if (s->item_sort_kw == Py_None)
+ return iter;
+ lst = PyList_New(0);
+ if (lst == NULL)
+ goto bail;
+ while ((item = PyIter_Next(iter))) {
+ PyObject *key, *value;
+ if (!PyTuple_Check(item) || Py_SIZE(item) != 2) {
+ PyErr_SetString(PyExc_ValueError, "items must return 2-tuples");
+ goto bail;
+ }
+ key = PyTuple_GET_ITEM(item, 0);
+ if (key == NULL)
+ goto bail;
+#if PY_MAJOR_VERSION < 3
+ else if (PyString_Check(key)) {
+ /* item can be added as-is */
+ }
+#endif /* PY_MAJOR_VERSION < 3 */
+ else if (PyUnicode_Check(key)) {
+ /* item can be added as-is */
+ }
+ else {
+ PyObject *tpl;
+ kstr = encoder_stringify_key(s, key);
+ if (kstr == NULL)
+ goto bail;
+ else if (kstr == Py_None) {
+ /* skipkeys */
+ Py_DECREF(kstr);
+ continue;
+ }
+ value = PyTuple_GET_ITEM(item, 1);
+ if (value == NULL)
+ goto bail;
+ tpl = PyTuple_Pack(2, kstr, value);
+ if (tpl == NULL)
+ goto bail;
+ Py_CLEAR(kstr);
+ Py_DECREF(item);
+ item = tpl;
+ }
+ if (PyList_Append(lst, item))
+ goto bail;
+ Py_DECREF(item);
+ }
+ Py_CLEAR(iter);
+ if (PyErr_Occurred())
+ goto bail;
+ sortfun = PyObject_GetAttrString(lst, "sort");
+ if (sortfun == NULL)
+ goto bail;
+ if (!PyObject_Call(sortfun, sortargs, s->item_sort_kw))
+ goto bail;
+ Py_CLEAR(sortfun);
+ iter = PyObject_GetIter(lst);
+ Py_CLEAR(lst);
+ return iter;
+bail:
+ Py_XDECREF(sortfun);
+ Py_XDECREF(kstr);
+ Py_XDECREF(item);
+ Py_XDECREF(lst);
+ Py_XDECREF(iter);
+ return NULL;
+}
static void
raise_errmsg(char *msg, PyObject *s, Py_ssize_t end)
{
- /* Use the Python function simplejson.decoder.errmsg to raise a nice
- looking ValueError exception */
- static PyObject *errmsg_fn = NULL;
- PyObject *pymsg;
- if (errmsg_fn == NULL) {
- PyObject *decoder = PyImport_ImportModule("simplejson.decoder");
- if (decoder == NULL)
+ /* Use JSONDecodeError exception to raise a nice looking ValueError subclass */
+ static PyObject *JSONDecodeError = NULL;
+ PyObject *exc;
+ if (JSONDecodeError == NULL) {
+ PyObject *scanner = PyImport_ImportModule("simplejson.scanner");
+ if (scanner == NULL)
return;
- errmsg_fn = PyObject_GetAttrString(decoder, "errmsg");
- Py_DECREF(decoder);
- if (errmsg_fn == NULL)
+ JSONDecodeError = PyObject_GetAttrString(scanner, "JSONDecodeError");
+ Py_DECREF(scanner);
+ if (JSONDecodeError == NULL)
return;
}
- pymsg = PyObject_CallFunction(errmsg_fn, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end);
- if (pymsg) {
- PyErr_SetObject(PyExc_ValueError, pymsg);
- Py_DECREF(pymsg);
+ exc = PyObject_CallFunction(JSONDecodeError, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end);
+ if (exc) {
+ PyErr_SetObject(JSONDecodeError, exc);
+ Py_DECREF(exc);
}
}
@@ -365,7 +809,7 @@ join_list_unicode(PyObject *lst)
/* return u''.join(lst) */
static PyObject *joinfn = NULL;
if (joinfn == NULL) {
- PyObject *ustr = PyUnicode_FromUnicode(NULL, 0);
+ PyObject *ustr = JSON_NewEmptyUnicode();
if (ustr == NULL)
return NULL;
@@ -377,6 +821,9 @@ join_list_unicode(PyObject *lst)
return PyObject_CallFunctionObjArgs(joinfn, lst, NULL);
}
+#if PY_MAJOR_VERSION >= 3
+#define join_list_string join_list_unicode
+#else /* PY_MAJOR_VERSION >= 3 */
static PyObject *
join_list_string(PyObject *lst)
{
@@ -394,9 +841,11 @@ join_list_string(PyObject *lst)
}
return PyObject_CallFunctionObjArgs(joinfn, lst, NULL);
}
+#endif /* PY_MAJOR_VERSION < 3 */
static PyObject *
-_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) {
+_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx)
+{
/* return (rval, idx) tuple, stealing reference to rval */
PyObject *tpl;
PyObject *pyidx;
@@ -404,6 +853,7 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) {
steal a reference to rval, returns (rval, idx)
*/
if (rval == NULL) {
+ assert(PyErr_Occurred());
return NULL;
}
pyidx = PyInt_FromSsize_t(idx);
@@ -422,6 +872,21 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) {
return tpl;
}
+#define APPEND_OLD_CHUNK \
+ if (chunk != NULL) { \
+ if (chunks == NULL) { \
+ chunks = PyList_New(0); \
+ if (chunks == NULL) { \
+ goto bail; \
+ } \
+ } \
+ if (PyList_Append(chunks, chunk)) { \
+ goto bail; \
+ } \
+ Py_CLEAR(chunk); \
+ }
+
+#if PY_MAJOR_VERSION < 3
static PyObject *
scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr)
{
@@ -440,25 +905,28 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
Py_ssize_t next = begin;
int has_unicode = 0;
char *buf = PyString_AS_STRING(pystr);
- PyObject *chunks = PyList_New(0);
- if (chunks == NULL) {
+ PyObject *chunks = NULL;
+ PyObject *chunk = NULL;
+ PyObject *strchunk = NULL;
+
+ if (len == end) {
+ raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin);
goto bail;
}
- if (end < 0 || len <= end) {
+ else if (end < 0 || len < end) {
PyErr_SetString(PyExc_ValueError, "end is out of bounds");
goto bail;
}
while (1) {
/* Find the end of the string or the next escape */
Py_UNICODE c = 0;
- PyObject *chunk = NULL;
for (next = end; next < len; next++) {
c = (unsigned char)buf[next];
if (c == '"' || c == '\\') {
break;
}
else if (strict && c <= 0x1f) {
- raise_errmsg("Invalid control character at", pystr, next);
+ raise_errmsg(ERR_STRING_CONTROL, pystr, next);
goto bail;
}
else if (c > 0x7f) {
@@ -466,12 +934,24 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
}
}
if (!(c == '"' || c == '\\')) {
- raise_errmsg("Unterminated string starting at", pystr, begin);
+ raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin);
goto bail;
}
/* Pick up this chunk if it's not zero length */
if (next != end) {
- PyObject *strchunk = PyString_FromStringAndSize(&buf[end], next - end);
+ APPEND_OLD_CHUNK
+#if PY_MAJOR_VERSION >= 3
+ if (!has_unicode) {
+ chunk = PyUnicode_DecodeASCII(&buf[end], next - end, NULL);
+ }
+ else {
+ chunk = PyUnicode_Decode(&buf[end], next - end, encoding, NULL);
+ }
+ if (chunk == NULL) {
+ goto bail;
+ }
+#else /* PY_MAJOR_VERSION >= 3 */
+ strchunk = PyString_FromStringAndSize(&buf[end], next - end);
if (strchunk == NULL) {
goto bail;
}
@@ -485,11 +965,7 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
else {
chunk = strchunk;
}
- if (PyList_Append(chunks, chunk)) {
- Py_DECREF(chunk);
- goto bail;
- }
- Py_DECREF(chunk);
+#endif /* PY_MAJOR_VERSION < 3 */
}
next++;
if (c == '"') {
@@ -497,7 +973,7 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
break;
}
if (next == len) {
- raise_errmsg("Unterminated string starting at", pystr, begin);
+ raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin);
goto bail;
}
c = buf[next];
@@ -516,7 +992,7 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
default: c = 0;
}
if (c == 0) {
- raise_errmsg("Invalid \\escape", pystr, end - 2);
+ raise_errmsg(ERR_STRING_ESC1, pystr, end - 2);
goto bail;
}
}
@@ -525,12 +1001,12 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
next++;
end = next + 4;
if (end >= len) {
- raise_errmsg("Invalid \\uXXXX escape", pystr, next - 1);
+ raise_errmsg(ERR_STRING_ESC4, pystr, next - 1);
goto bail;
}
/* Decode 4 hex digits */
for (; next < end; next++) {
- Py_UNICODE digit = buf[next];
+ JSON_UNICHR digit = (JSON_UNICHR)buf[next];
c <<= 4;
switch (digit) {
case '0': case '1': case '2': case '3': case '4':
@@ -543,28 +1019,21 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
case 'F':
c |= (digit - 'A' + 10); break;
default:
- raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5);
+ raise_errmsg(ERR_STRING_ESC4, pystr, end - 5);
goto bail;
}
}
-#ifdef Py_UNICODE_WIDE
+#if (PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE))
/* Surrogate pair */
if ((c & 0xfc00) == 0xd800) {
- Py_UNICODE c2 = 0;
- if (end + 6 >= len) {
- raise_errmsg("Unpaired high surrogate", pystr, end - 5);
- goto bail;
- }
- if (buf[next++] != '\\' || buf[next++] != 'u') {
- raise_errmsg("Unpaired high surrogate", pystr, end - 5);
- goto bail;
- }
- end += 6;
- /* Decode 4 hex digits */
- for (; next < end; next++) {
- c2 <<= 4;
- Py_UNICODE digit = buf[next];
- switch (digit) {
+ if (end + 6 < len && buf[next] == '\\' && buf[next+1] == 'u') {
+ JSON_UNICHR c2 = 0;
+ end += 6;
+ /* Decode 4 hex digits */
+ for (next += 2; next < end; next++) {
+ c2 <<= 4;
+ JSON_UNICHR digit = buf[next];
+ switch (digit) {
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
c2 |= (digit - '0'); break;
@@ -575,27 +1044,34 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
case 'F':
c2 |= (digit - 'A' + 10); break;
default:
- raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5);
+ raise_errmsg(ERR_STRING_ESC4, pystr, end - 5);
goto bail;
+ }
+ }
+ if ((c2 & 0xfc00) != 0xdc00) {
+ /* not a low surrogate, rewind */
+ end -= 6;
+ next = end;
+ }
+ else {
+ c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00));
}
}
- if ((c2 & 0xfc00) != 0xdc00) {
- raise_errmsg("Unpaired high surrogate", pystr, end - 5);
- goto bail;
- }
- c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00));
}
- else if ((c & 0xfc00) == 0xdc00) {
- raise_errmsg("Unpaired low surrogate", pystr, end - 5);
- goto bail;
- }
-#endif
+#endif /* PY_MAJOR_VERSION >= 3 || Py_UNICODE_WIDE */
}
if (c > 0x7f) {
has_unicode = 1;
}
+ APPEND_OLD_CHUNK
+#if PY_MAJOR_VERSION >= 3
+ chunk = JSON_UnicodeFromChar(c);
+ if (chunk == NULL) {
+ goto bail;
+ }
+#else /* PY_MAJOR_VERSION >= 3 */
if (has_unicode) {
- chunk = PyUnicode_FromUnicode(&c, 1);
+ chunk = JSON_UnicodeFromChar(c);
if (chunk == NULL) {
goto bail;
}
@@ -607,26 +1083,33 @@ scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_s
goto bail;
}
}
- if (PyList_Append(chunks, chunk)) {
- Py_DECREF(chunk);
- goto bail;
- }
- Py_DECREF(chunk);
+#endif
}
- rval = join_list_string(chunks);
- if (rval == NULL) {
- goto bail;
+ if (chunks == NULL) {
+ if (chunk != NULL)
+ rval = chunk;
+ else
+ rval = JSON_NewEmptyUnicode();
}
- Py_CLEAR(chunks);
+ else {
+ APPEND_OLD_CHUNK
+ rval = join_list_string(chunks);
+ if (rval == NULL) {
+ goto bail;
+ }
+ Py_CLEAR(chunks);
+ }
+
*next_end_ptr = end;
return rval;
bail:
*next_end_ptr = -1;
+ Py_XDECREF(chunk);
Py_XDECREF(chunks);
return NULL;
}
-
+#endif /* PY_MAJOR_VERSION < 3 */
static PyObject *
scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr)
@@ -640,47 +1123,50 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
Return value is a new PyUnicode
*/
PyObject *rval;
- Py_ssize_t len = PyUnicode_GET_SIZE(pystr);
Py_ssize_t begin = end - 1;
Py_ssize_t next = begin;
- const Py_UNICODE *buf = PyUnicode_AS_UNICODE(pystr);
- PyObject *chunks = PyList_New(0);
- if (chunks == NULL) {
+ PY2_UNUSED int kind = PyUnicode_KIND(pystr);
+ Py_ssize_t len = PyUnicode_GetLength(pystr);
+ void *buf = PyUnicode_DATA(pystr);
+ PyObject *chunks = NULL;
+ PyObject *chunk = NULL;
+
+ if (len == end) {
+ raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin);
goto bail;
}
- if (end < 0 || len <= end) {
+ else if (end < 0 || len < end) {
PyErr_SetString(PyExc_ValueError, "end is out of bounds");
goto bail;
}
while (1) {
/* Find the end of the string or the next escape */
- Py_UNICODE c = 0;
- PyObject *chunk = NULL;
+ JSON_UNICHR c = 0;
for (next = end; next < len; next++) {
- c = buf[next];
+ c = PyUnicode_READ(kind, buf, next);
if (c == '"' || c == '\\') {
break;
}
else if (strict && c <= 0x1f) {
- raise_errmsg("Invalid control character at", pystr, next);
+ raise_errmsg(ERR_STRING_CONTROL, pystr, next);
goto bail;
}
}
if (!(c == '"' || c == '\\')) {
- raise_errmsg("Unterminated string starting at", pystr, begin);
+ raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin);
goto bail;
}
/* Pick up this chunk if it's not zero length */
if (next != end) {
- chunk = PyUnicode_FromUnicode(&buf[end], next - end);
+ APPEND_OLD_CHUNK
+#if PY_MAJOR_VERSION < 3
+ chunk = PyUnicode_FromUnicode(&((const Py_UNICODE *)buf)[end], next - end);
+#else
+ chunk = PyUnicode_Substring(pystr, end, next);
+#endif
if (chunk == NULL) {
goto bail;
}
- if (PyList_Append(chunks, chunk)) {
- Py_DECREF(chunk);
- goto bail;
- }
- Py_DECREF(chunk);
}
next++;
if (c == '"') {
@@ -688,10 +1174,10 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
break;
}
if (next == len) {
- raise_errmsg("Unterminated string starting at", pystr, begin);
+ raise_errmsg(ERR_STRING_UNTERMINATED, pystr, begin);
goto bail;
}
- c = buf[next];
+ c = PyUnicode_READ(kind, buf, next);
if (c != 'u') {
/* Non-unicode backslash escapes */
end = next + 1;
@@ -707,7 +1193,7 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
default: c = 0;
}
if (c == 0) {
- raise_errmsg("Invalid \\escape", pystr, end - 2);
+ raise_errmsg(ERR_STRING_ESC1, pystr, end - 2);
goto bail;
}
}
@@ -716,12 +1202,12 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
next++;
end = next + 4;
if (end >= len) {
- raise_errmsg("Invalid \\uXXXX escape", pystr, next - 1);
+ raise_errmsg(ERR_STRING_ESC4, pystr, next - 1);
goto bail;
}
/* Decode 4 hex digits */
for (; next < end; next++) {
- Py_UNICODE digit = buf[next];
+ JSON_UNICHR digit = PyUnicode_READ(kind, buf, next);
c <<= 4;
switch (digit) {
case '0': case '1': case '2': case '3': case '4':
@@ -734,28 +1220,23 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
case 'F':
c |= (digit - 'A' + 10); break;
default:
- raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5);
+ raise_errmsg(ERR_STRING_ESC4, pystr, end - 5);
goto bail;
}
}
-#ifdef Py_UNICODE_WIDE
+#if PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE)
/* Surrogate pair */
if ((c & 0xfc00) == 0xd800) {
- Py_UNICODE c2 = 0;
- if (end + 6 >= len) {
- raise_errmsg("Unpaired high surrogate", pystr, end - 5);
- goto bail;
- }
- if (buf[next++] != '\\' || buf[next++] != 'u') {
- raise_errmsg("Unpaired high surrogate", pystr, end - 5);
- goto bail;
- }
- end += 6;
- /* Decode 4 hex digits */
- for (; next < end; next++) {
- c2 <<= 4;
- Py_UNICODE digit = buf[next];
- switch (digit) {
+ JSON_UNICHR c2 = 0;
+ if (end + 6 < len &&
+ PyUnicode_READ(kind, buf, next) == '\\' &&
+ PyUnicode_READ(kind, buf, next + 1) == 'u') {
+ end += 6;
+ /* Decode 4 hex digits */
+ for (next += 2; next < end; next++) {
+ JSON_UNICHR digit = PyUnicode_READ(kind, buf, next);
+ c2 <<= 4;
+ switch (digit) {
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
c2 |= (digit - '0'); break;
@@ -766,42 +1247,48 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
case 'F':
c2 |= (digit - 'A' + 10); break;
default:
- raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5);
+ raise_errmsg(ERR_STRING_ESC4, pystr, end - 5);
goto bail;
+ }
+ }
+ if ((c2 & 0xfc00) != 0xdc00) {
+ /* not a low surrogate, rewind */
+ end -= 6;
+ next = end;
+ }
+ else {
+ c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00));
}
}
- if ((c2 & 0xfc00) != 0xdc00) {
- raise_errmsg("Unpaired high surrogate", pystr, end - 5);
- goto bail;
- }
- c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00));
- }
- else if ((c & 0xfc00) == 0xdc00) {
- raise_errmsg("Unpaired low surrogate", pystr, end - 5);
- goto bail;
}
#endif
}
- chunk = PyUnicode_FromUnicode(&c, 1);
+ APPEND_OLD_CHUNK
+ chunk = JSON_UnicodeFromChar(c);
if (chunk == NULL) {
goto bail;
}
- if (PyList_Append(chunks, chunk)) {
- Py_DECREF(chunk);
- goto bail;
- }
- Py_DECREF(chunk);
}
- rval = join_list_unicode(chunks);
- if (rval == NULL) {
- goto bail;
+ if (chunks == NULL) {
+ if (chunk != NULL)
+ rval = chunk;
+ else
+ rval = JSON_NewEmptyUnicode();
+ }
+ else {
+ APPEND_OLD_CHUNK
+ rval = join_list_unicode(chunks);
+ if (rval == NULL) {
+ goto bail;
+ }
+ Py_CLEAR(chunks);
}
- Py_DECREF(chunks);
*next_end_ptr = end;
return rval;
bail:
*next_end_ptr = -1;
+ Py_XDECREF(chunk);
Py_XDECREF(chunks);
return NULL;
}
@@ -834,12 +1321,16 @@ py_scanstring(PyObject* self UNUSED, PyObject *args)
if (encoding == NULL) {
encoding = DEFAULT_ENCODING;
}
- if (PyString_Check(pystr)) {
- rval = scanstring_str(pystr, end, encoding, strict, &next_end);
- }
- else if (PyUnicode_Check(pystr)) {
+ if (PyUnicode_Check(pystr)) {
rval = scanstring_unicode(pystr, end, strict, &next_end);
}
+#if PY_MAJOR_VERSION < 3
+ /* Using a bytes input is unsupported for scanning in Python 3.
+ It is coerced to str in the decoder before it gets here. */
+ else if (PyString_Check(pystr)) {
+ rval = scanstring_str(pystr, end, encoding, strict, &next_end);
+ }
+#endif
else {
PyErr_Format(PyExc_TypeError,
"first argument must be a string, not %.80s",
@@ -891,9 +1382,11 @@ scanner_traverse(PyObject *self, visitproc visit, void *arg)
Py_VISIT(s->encoding);
Py_VISIT(s->strict);
Py_VISIT(s->object_hook);
+ Py_VISIT(s->pairs_hook);
Py_VISIT(s->parse_float);
Py_VISIT(s->parse_int);
Py_VISIT(s->parse_constant);
+ Py_VISIT(s->memo);
return 0;
}
@@ -906,52 +1399,83 @@ scanner_clear(PyObject *self)
Py_CLEAR(s->encoding);
Py_CLEAR(s->strict);
Py_CLEAR(s->object_hook);
+ Py_CLEAR(s->pairs_hook);
Py_CLEAR(s->parse_float);
Py_CLEAR(s->parse_int);
Py_CLEAR(s->parse_constant);
+ Py_CLEAR(s->memo);
return 0;
}
+#if PY_MAJOR_VERSION < 3
static PyObject *
-_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
+_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON object from PyString pystr.
idx is the index of the first character after the opening curly brace.
*next_idx_ptr is a return-by-reference index to the first character after
the closing curly brace.
- Returns a new PyObject (usually a dict, but object_hook can change that)
+ Returns a new PyObject (usually a dict, but object_hook or
+ object_pairs_hook can change that)
*/
char *str = PyString_AS_STRING(pystr);
Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1;
- PyObject *rval = PyDict_New();
+ PyObject *rval = NULL;
+ PyObject *pairs = NULL;
+ PyObject *item;
PyObject *key = NULL;
PyObject *val = NULL;
- char *encoding = PyString_AS_STRING(s->encoding);
+ char *encoding = JSON_ASCII_AS_STRING(s->encoding);
int strict = PyObject_IsTrue(s->strict);
+ int has_pairs_hook = (s->pairs_hook != Py_None);
+ int did_parse = 0;
Py_ssize_t next_idx;
- if (rval == NULL)
- return NULL;
+ if (has_pairs_hook) {
+ pairs = PyList_New(0);
+ if (pairs == NULL)
+ return NULL;
+ }
+ else {
+ rval = PyDict_New();
+ if (rval == NULL)
+ return NULL;
+ }
/* skip whitespace after { */
while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
/* only loop if the object is non-empty */
if (idx <= end_idx && str[idx] != '}') {
+ int trailing_delimiter = 0;
while (idx <= end_idx) {
+ PyObject *memokey;
+ trailing_delimiter = 0;
+
/* read key */
if (str[idx] != '"') {
- raise_errmsg("Expecting property name", pystr, idx);
+ raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx);
goto bail;
}
key = scanstring_str(pystr, idx + 1, encoding, strict, &next_idx);
if (key == NULL)
goto bail;
+ memokey = PyDict_GetItem(s->memo, key);
+ if (memokey != NULL) {
+ Py_INCREF(memokey);
+ Py_DECREF(key);
+ key = memokey;
+ }
+ else {
+ if (PyDict_SetItem(s->memo, key, key) < 0)
+ goto bail;
+ }
idx = next_idx;
/* skip whitespace between key and : delimiter, read :, skip whitespace */
while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
if (idx > end_idx || str[idx] != ':') {
- raise_errmsg("Expecting : delimiter", pystr, idx);
+ raise_errmsg(ERR_OBJECT_PROPERTY_DELIMITER, pystr, idx);
goto bail;
}
idx++;
@@ -962,36 +1486,70 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
if (val == NULL)
goto bail;
- if (PyDict_SetItem(rval, key, val) == -1)
- goto bail;
-
- Py_CLEAR(key);
- Py_CLEAR(val);
+ if (has_pairs_hook) {
+ item = PyTuple_Pack(2, key, val);
+ if (item == NULL)
+ goto bail;
+ Py_CLEAR(key);
+ Py_CLEAR(val);
+ if (PyList_Append(pairs, item) == -1) {
+ Py_DECREF(item);
+ goto bail;
+ }
+ Py_DECREF(item);
+ }
+ else {
+ if (PyDict_SetItem(rval, key, val) < 0)
+ goto bail;
+ Py_CLEAR(key);
+ Py_CLEAR(val);
+ }
idx = next_idx;
/* skip whitespace before } or , */
while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
/* bail if the object is closed or we didn't get the , delimiter */
+ did_parse = 1;
if (idx > end_idx) break;
if (str[idx] == '}') {
break;
}
else if (str[idx] != ',') {
- raise_errmsg("Expecting , delimiter", pystr, idx);
+ raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx);
goto bail;
}
idx++;
/* skip whitespace after , delimiter */
while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ trailing_delimiter = 1;
+ }
+ if (trailing_delimiter) {
+ raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx);
+ goto bail;
}
}
/* verify that idx < end_idx, str[idx] should be '}' */
if (idx > end_idx || str[idx] != '}') {
- raise_errmsg("Expecting object", pystr, end_idx);
+ if (did_parse) {
+ raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx);
+ } else {
+ raise_errmsg(ERR_OBJECT_PROPERTY_FIRST, pystr, idx);
+ }
goto bail;
}
+
+ /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */
+ if (s->pairs_hook != Py_None) {
+ val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL);
+ if (val == NULL)
+ goto bail;
+ Py_DECREF(pairs);
+ *next_idx_ptr = idx + 1;
+ return val;
+ }
+
/* if object_hook is not None: rval = object_hook(rval) */
if (s->object_hook != Py_None) {
val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL);
@@ -1004,14 +1562,17 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
*next_idx_ptr = idx + 1;
return rval;
bail:
+ Py_XDECREF(rval);
Py_XDECREF(key);
Py_XDECREF(val);
- Py_DECREF(rval);
+ Py_XDECREF(pairs);
return NULL;
}
+#endif /* PY_MAJOR_VERSION < 3 */
static PyObject *
-_parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
+_parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON object from PyUnicode pystr.
idx is the index of the first character after the opening curly brace.
*next_idx_ptr is a return-by-reference index to the first character after
@@ -1019,78 +1580,141 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
Returns a new PyObject (usually a dict, but object_hook can change that)
*/
- Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr);
- Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1;
- PyObject *val = NULL;
- PyObject *rval = PyDict_New();
+ void *str = PyUnicode_DATA(pystr);
+ Py_ssize_t end_idx = PyUnicode_GetLength(pystr) - 1;
+ PY2_UNUSED int kind = PyUnicode_KIND(pystr);
+ PyObject *rval = NULL;
+ PyObject *pairs = NULL;
+ PyObject *item;
PyObject *key = NULL;
+ PyObject *val = NULL;
int strict = PyObject_IsTrue(s->strict);
+ int has_pairs_hook = (s->pairs_hook != Py_None);
+ int did_parse = 0;
Py_ssize_t next_idx;
- if (rval == NULL)
- return NULL;
+
+ if (has_pairs_hook) {
+ pairs = PyList_New(0);
+ if (pairs == NULL)
+ return NULL;
+ }
+ else {
+ rval = PyDict_New();
+ if (rval == NULL)
+ return NULL;
+ }
/* skip whitespace after { */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
/* only loop if the object is non-empty */
- if (idx <= end_idx && str[idx] != '}') {
+ if (idx <= end_idx && PyUnicode_READ(kind, str, idx) != '}') {
+ int trailing_delimiter = 0;
while (idx <= end_idx) {
+ PyObject *memokey;
+ trailing_delimiter = 0;
+
/* read key */
- if (str[idx] != '"') {
- raise_errmsg("Expecting property name", pystr, idx);
+ if (PyUnicode_READ(kind, str, idx) != '"') {
+ raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx);
goto bail;
}
key = scanstring_unicode(pystr, idx + 1, strict, &next_idx);
if (key == NULL)
goto bail;
+ memokey = PyDict_GetItem(s->memo, key);
+ if (memokey != NULL) {
+ Py_INCREF(memokey);
+ Py_DECREF(key);
+ key = memokey;
+ }
+ else {
+ if (PyDict_SetItem(s->memo, key, key) < 0)
+ goto bail;
+ }
idx = next_idx;
- /* skip whitespace between key and : delimiter, read :, skip whitespace */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
- if (idx > end_idx || str[idx] != ':') {
- raise_errmsg("Expecting : delimiter", pystr, idx);
+ /* skip whitespace between key and : delimiter, read :, skip
+ whitespace */
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
+ if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ':') {
+ raise_errmsg(ERR_OBJECT_PROPERTY_DELIMITER, pystr, idx);
goto bail;
}
idx++;
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
/* read any JSON term */
val = scan_once_unicode(s, pystr, idx, &next_idx);
if (val == NULL)
goto bail;
- if (PyDict_SetItem(rval, key, val) == -1)
- goto bail;
-
- Py_CLEAR(key);
- Py_CLEAR(val);
+ if (has_pairs_hook) {
+ item = PyTuple_Pack(2, key, val);
+ if (item == NULL)
+ goto bail;
+ Py_CLEAR(key);
+ Py_CLEAR(val);
+ if (PyList_Append(pairs, item) == -1) {
+ Py_DECREF(item);
+ goto bail;
+ }
+ Py_DECREF(item);
+ }
+ else {
+ if (PyDict_SetItem(rval, key, val) < 0)
+ goto bail;
+ Py_CLEAR(key);
+ Py_CLEAR(val);
+ }
idx = next_idx;
/* skip whitespace before } or , */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
- /* bail if the object is closed or we didn't get the , delimiter */
+ /* bail if the object is closed or we didn't get the ,
+ delimiter */
+ did_parse = 1;
if (idx > end_idx) break;
- if (str[idx] == '}') {
+ if (PyUnicode_READ(kind, str, idx) == '}') {
break;
}
- else if (str[idx] != ',') {
- raise_errmsg("Expecting , delimiter", pystr, idx);
+ else if (PyUnicode_READ(kind, str, idx) != ',') {
+ raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx);
goto bail;
}
idx++;
/* skip whitespace after , delimiter */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
+ trailing_delimiter = 1;
+ }
+ if (trailing_delimiter) {
+ raise_errmsg(ERR_OBJECT_PROPERTY, pystr, idx);
+ goto bail;
}
}
/* verify that idx < end_idx, str[idx] should be '}' */
- if (idx > end_idx || str[idx] != '}') {
- raise_errmsg("Expecting object", pystr, end_idx);
+ if (idx > end_idx || PyUnicode_READ(kind, str, idx) != '}') {
+ if (did_parse) {
+ raise_errmsg(ERR_OBJECT_DELIMITER, pystr, idx);
+ } else {
+ raise_errmsg(ERR_OBJECT_PROPERTY_FIRST, pystr, idx);
+ }
goto bail;
}
+ /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */
+ if (s->pairs_hook != Py_None) {
+ val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL);
+ if (val == NULL)
+ goto bail;
+ Py_DECREF(pairs);
+ *next_idx_ptr = idx + 1;
+ return val;
+ }
+
/* if object_hook is not None: rval = object_hook(rval) */
if (s->object_hook != Py_None) {
val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL);
@@ -1103,14 +1727,17 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
*next_idx_ptr = idx + 1;
return rval;
bail:
+ Py_XDECREF(rval);
Py_XDECREF(key);
Py_XDECREF(val);
- Py_DECREF(rval);
+ Py_XDECREF(pairs);
return NULL;
}
+#if PY_MAJOR_VERSION < 3
static PyObject *
-_parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
+_parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON array from PyString pystr.
idx is the index of the first character after the opening brace.
*next_idx_ptr is a return-by-reference index to the first character after
@@ -1131,12 +1758,14 @@ _parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t
/* only loop if the array is non-empty */
if (idx <= end_idx && str[idx] != ']') {
+ int trailing_delimiter = 0;
while (idx <= end_idx) {
-
+ trailing_delimiter = 0;
/* read any JSON term and de-tuplefy the (rval, idx) */
val = scan_once_str(s, pystr, idx, &next_idx);
- if (val == NULL)
+ if (val == NULL) {
goto bail;
+ }
if (PyList_Append(rval, val) == -1)
goto bail;
@@ -1153,19 +1782,28 @@ _parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t
break;
}
else if (str[idx] != ',') {
- raise_errmsg("Expecting , delimiter", pystr, idx);
+ raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx);
goto bail;
}
idx++;
/* skip whitespace after , */
while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ trailing_delimiter = 1;
+ }
+ if (trailing_delimiter) {
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
+ goto bail;
}
}
/* verify that idx < end_idx, str[idx] should be ']' */
if (idx > end_idx || str[idx] != ']') {
- raise_errmsg("Expecting object", pystr, end_idx);
+ if (PyList_GET_SIZE(rval)) {
+ raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx);
+ } else {
+ raise_errmsg(ERR_ARRAY_VALUE_FIRST, pystr, idx);
+ }
goto bail;
}
*next_idx_ptr = idx + 1;
@@ -1175,9 +1813,11 @@ bail:
Py_DECREF(rval);
return NULL;
}
+#endif /* PY_MAJOR_VERSION < 3 */
static PyObject *
-_parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
+_parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON array from PyString pystr.
idx is the index of the first character after the opening brace.
*next_idx_ptr is a return-by-reference index to the first character after
@@ -1185,8 +1825,9 @@ _parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssi
Returns a new PyList
*/
- Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr);
- Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1;
+ PY2_UNUSED int kind = PyUnicode_KIND(pystr);
+ void *str = PyUnicode_DATA(pystr);
+ Py_ssize_t end_idx = PyUnicode_GetLength(pystr) - 1;
PyObject *val = NULL;
PyObject *rval = PyList_New(0);
Py_ssize_t next_idx;
@@ -1194,16 +1835,18 @@ _parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssi
return NULL;
/* skip whitespace after [ */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
/* only loop if the array is non-empty */
- if (idx <= end_idx && str[idx] != ']') {
+ if (idx <= end_idx && PyUnicode_READ(kind, str, idx) != ']') {
+ int trailing_delimiter = 0;
while (idx <= end_idx) {
-
+ trailing_delimiter = 0;
/* read any JSON term */
val = scan_once_unicode(s, pystr, idx, &next_idx);
- if (val == NULL)
+ if (val == NULL) {
goto bail;
+ }
if (PyList_Append(rval, val) == -1)
goto bail;
@@ -1212,27 +1855,36 @@ _parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssi
idx = next_idx;
/* skip whitespace between term and , */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
/* bail if the array is closed or we didn't get the , delimiter */
if (idx > end_idx) break;
- if (str[idx] == ']') {
+ if (PyUnicode_READ(kind, str, idx) == ']') {
break;
}
- else if (str[idx] != ',') {
- raise_errmsg("Expecting , delimiter", pystr, idx);
+ else if (PyUnicode_READ(kind, str, idx) != ',') {
+ raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx);
goto bail;
}
idx++;
/* skip whitespace after , */
- while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++;
+ while (idx <= end_idx && IS_WHITESPACE(PyUnicode_READ(kind, str, idx))) idx++;
+ trailing_delimiter = 1;
+ }
+ if (trailing_delimiter) {
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
+ goto bail;
}
}
/* verify that idx < end_idx, str[idx] should be ']' */
- if (idx > end_idx || str[idx] != ']') {
- raise_errmsg("Expecting object", pystr, end_idx);
+ if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ']') {
+ if (PyList_GET_SIZE(rval)) {
+ raise_errmsg(ERR_ARRAY_DELIMITER, pystr, idx);
+ } else {
+ raise_errmsg(ERR_ARRAY_VALUE_FIRST, pystr, idx);
+ }
goto bail;
}
*next_idx_ptr = idx + 1;
@@ -1244,7 +1896,8 @@ bail:
}
static PyObject *
-_parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) {
+_parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON constant from PyString pystr.
constant is the constant string that was found
("NaN", "Infinity", "-Infinity").
@@ -1257,20 +1910,22 @@ _parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t *
PyObject *cstr;
PyObject *rval;
/* constant is "NaN", "Infinity", or "-Infinity" */
- cstr = PyString_InternFromString(constant);
+ cstr = JSON_InternFromString(constant);
if (cstr == NULL)
return NULL;
/* rval = parse_constant(constant) */
rval = PyObject_CallFunctionObjArgs(s->parse_constant, cstr, NULL);
- idx += PyString_GET_SIZE(cstr);
+ idx += JSON_Intern_GET_SIZE(cstr);
Py_DECREF(cstr);
*next_idx_ptr = idx;
return rval;
}
+#if PY_MAJOR_VERSION < 3
static PyObject *
-_match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) {
+_match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON number from PyString pystr.
idx is the index of the first character of the number
*next_idx_ptr is a return-by-reference index to the first character after
@@ -1289,11 +1944,11 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz
/* read a sign if it's there, make sure it's not the end of the string */
if (str[idx] == '-') {
- idx++;
- if (idx > end_idx) {
- PyErr_SetNone(PyExc_StopIteration);
+ if (idx >= end_idx) {
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
return NULL;
}
+ idx++;
}
/* read as many integer digits as we find as long as it doesn't start with 0 */
@@ -1307,7 +1962,7 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz
}
/* no integer digits, error */
else {
- PyErr_SetNone(PyExc_StopIteration);
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
return NULL;
}
@@ -1350,7 +2005,12 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz
rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL);
}
else {
- rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr)));
+ /* rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); */
+ double d = PyOS_string_to_double(PyString_AS_STRING(numstr),
+ NULL, NULL);
+ if (d == -1.0 && PyErr_Occurred())
+ return NULL;
+ rval = PyFloat_FromDouble(d);
}
}
else {
@@ -1366,9 +2026,11 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz
*next_idx_ptr = idx;
return rval;
}
+#endif /* PY_MAJOR_VERSION < 3 */
static PyObject *
-_match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) {
+_match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr)
+{
/* Read a JSON number from PyUnicode pystr.
idx is the index of the first character of the number
*next_idx_ptr is a return-by-reference index to the first character after
@@ -1378,57 +2040,68 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
PyInt, PyLong, or PyFloat.
May return other types if parse_int or parse_float are set
*/
- Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr);
- Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1;
+ PY2_UNUSED int kind = PyUnicode_KIND(pystr);
+ void *str = PyUnicode_DATA(pystr);
+ Py_ssize_t end_idx = PyUnicode_GetLength(pystr) - 1;
Py_ssize_t idx = start;
int is_float = 0;
+ JSON_UNICHR c;
PyObject *rval;
PyObject *numstr;
/* read a sign if it's there, make sure it's not the end of the string */
- if (str[idx] == '-') {
- idx++;
- if (idx > end_idx) {
- PyErr_SetNone(PyExc_StopIteration);
+ if (PyUnicode_READ(kind, str, idx) == '-') {
+ if (idx >= end_idx) {
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
return NULL;
}
+ idx++;
}
/* read as many integer digits as we find as long as it doesn't start with 0 */
- if (str[idx] >= '1' && str[idx] <= '9') {
- idx++;
- while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++;
- }
- /* if it starts with 0 we only expect one integer digit */
- else if (str[idx] == '0') {
+ c = PyUnicode_READ(kind, str, idx);
+ if (c == '0') {
+ /* if it starts with 0 we only expect one integer digit */
idx++;
}
- /* no integer digits, error */
+ else if (IS_DIGIT(c)) {
+ idx++;
+ while (idx <= end_idx && IS_DIGIT(PyUnicode_READ(kind, str, idx))) {
+ idx++;
+ }
+ }
else {
- PyErr_SetNone(PyExc_StopIteration);
+ /* no integer digits, error */
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
return NULL;
}
/* if the next char is '.' followed by a digit then read all float digits */
- if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') {
+ if (idx < end_idx &&
+ PyUnicode_READ(kind, str, idx) == '.' &&
+ IS_DIGIT(PyUnicode_READ(kind, str, idx + 1))) {
is_float = 1;
idx += 2;
- while (idx < end_idx && str[idx] >= '0' && str[idx] <= '9') idx++;
+ while (idx <= end_idx && IS_DIGIT(PyUnicode_READ(kind, str, idx))) idx++;
}
/* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */
- if (idx < end_idx && (str[idx] == 'e' || str[idx] == 'E')) {
+ if (idx < end_idx &&
+ (PyUnicode_READ(kind, str, idx) == 'e' ||
+ PyUnicode_READ(kind, str, idx) == 'E')) {
Py_ssize_t e_start = idx;
idx++;
/* read an exponent sign if present */
- if (idx < end_idx && (str[idx] == '-' || str[idx] == '+')) idx++;
+ if (idx < end_idx &&
+ (PyUnicode_READ(kind, str, idx) == '-' ||
+ PyUnicode_READ(kind, str, idx) == '+')) idx++;
/* read all digits */
- while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++;
+ while (idx <= end_idx && IS_DIGIT(PyUnicode_READ(kind, str, idx))) idx++;
/* if we got a digit, then parse as float. if not, backtrack */
- if (str[idx - 1] >= '0' && str[idx - 1] <= '9') {
+ if (IS_DIGIT(PyUnicode_READ(kind, str, idx - 1))) {
is_float = 1;
}
else {
@@ -1437,7 +2110,11 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
}
/* copy the section we determined to be a number */
- numstr = PyUnicode_FromUnicode(&str[start], idx - start);
+#if PY_MAJOR_VERSION >= 3
+ numstr = PyUnicode_Substring(pystr, start, idx);
+#else
+ numstr = PyUnicode_FromUnicode(&((Py_UNICODE *)str)[start], idx - start);
+#endif
if (numstr == NULL)
return NULL;
if (is_float) {
@@ -1446,7 +2123,11 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL);
}
else {
+#if PY_MAJOR_VERSION >= 3
+ rval = PyFloat_FromString(numstr);
+#else
rval = PyFloat_FromString(numstr, NULL);
+#endif
}
}
else {
@@ -1458,6 +2139,7 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
return rval;
}
+#if PY_MAJOR_VERSION < 3
static PyObject *
scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
{
@@ -1470,69 +2152,100 @@ scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *n
*/
char *str = PyString_AS_STRING(pystr);
Py_ssize_t length = PyString_GET_SIZE(pystr);
- if (idx >= length) {
- PyErr_SetNone(PyExc_StopIteration);
+ PyObject *rval = NULL;
+ int fallthrough = 0;
+ if (idx < 0 || idx >= length) {
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
return NULL;
}
switch (str[idx]) {
case '"':
/* string */
- return scanstring_str(pystr, idx + 1,
- PyString_AS_STRING(s->encoding),
+ rval = scanstring_str(pystr, idx + 1,
+ JSON_ASCII_AS_STRING(s->encoding),
PyObject_IsTrue(s->strict),
next_idx_ptr);
+ break;
case '{':
/* object */
- return _parse_object_str(s, pystr, idx + 1, next_idx_ptr);
+ if (Py_EnterRecursiveCall(" while decoding a JSON object "
+ "from a string"))
+ return NULL;
+ rval = _parse_object_str(s, pystr, idx + 1, next_idx_ptr);
+ Py_LeaveRecursiveCall();
+ break;
case '[':
/* array */
- return _parse_array_str(s, pystr, idx + 1, next_idx_ptr);
+ if (Py_EnterRecursiveCall(" while decoding a JSON array "
+ "from a string"))
+ return NULL;
+ rval = _parse_array_str(s, pystr, idx + 1, next_idx_ptr);
+ Py_LeaveRecursiveCall();
+ break;
case 'n':
/* null */
if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') {
Py_INCREF(Py_None);
*next_idx_ptr = idx + 4;
- return Py_None;
+ rval = Py_None;
}
+ else
+ fallthrough = 1;
break;
case 't':
/* true */
if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') {
Py_INCREF(Py_True);
*next_idx_ptr = idx + 4;
- return Py_True;
+ rval = Py_True;
}
+ else
+ fallthrough = 1;
break;
case 'f':
/* false */
if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') {
Py_INCREF(Py_False);
*next_idx_ptr = idx + 5;
- return Py_False;
+ rval = Py_False;
}
+ else
+ fallthrough = 1;
break;
case 'N':
/* NaN */
if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') {
- return _parse_constant(s, "NaN", idx, next_idx_ptr);
+ rval = _parse_constant(s, "NaN", idx, next_idx_ptr);
}
+ else
+ fallthrough = 1;
break;
case 'I':
/* Infinity */
if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') {
- return _parse_constant(s, "Infinity", idx, next_idx_ptr);
+ rval = _parse_constant(s, "Infinity", idx, next_idx_ptr);
}
+ else
+ fallthrough = 1;
break;
case '-':
/* -Infinity */
if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') {
- return _parse_constant(s, "-Infinity", idx, next_idx_ptr);
+ rval = _parse_constant(s, "-Infinity", idx, next_idx_ptr);
}
+ else
+ fallthrough = 1;
break;
+ default:
+ fallthrough = 1;
}
/* Didn't find a string, object, array, or named constant. Look for a number. */
- return _match_number_str(s, pystr, idx, next_idx_ptr);
+ if (fallthrough)
+ rval = _match_number_str(s, pystr, idx, next_idx_ptr);
+ return rval;
}
+#endif /* PY_MAJOR_VERSION < 3 */
+
static PyObject *
scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr)
@@ -1544,69 +2257,126 @@ scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
Returns a new PyObject representation of the term.
*/
- Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr);
- Py_ssize_t length = PyUnicode_GET_SIZE(pystr);
- if (idx >= length) {
- PyErr_SetNone(PyExc_StopIteration);
+ PY2_UNUSED int kind = PyUnicode_KIND(pystr);
+ void *str = PyUnicode_DATA(pystr);
+ Py_ssize_t length = PyUnicode_GetLength(pystr);
+ PyObject *rval = NULL;
+ int fallthrough = 0;
+ if (idx < 0 || idx >= length) {
+ raise_errmsg(ERR_EXPECTING_VALUE, pystr, idx);
return NULL;
}
- switch (str[idx]) {
+ switch (PyUnicode_READ(kind, str, idx)) {
case '"':
/* string */
- return scanstring_unicode(pystr, idx + 1,
+ rval = scanstring_unicode(pystr, idx + 1,
PyObject_IsTrue(s->strict),
next_idx_ptr);
+ break;
case '{':
/* object */
- return _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr);
+ if (Py_EnterRecursiveCall(" while decoding a JSON object "
+ "from a unicode string"))
+ return NULL;
+ rval = _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr);
+ Py_LeaveRecursiveCall();
+ break;
case '[':
/* array */
- return _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr);
+ if (Py_EnterRecursiveCall(" while decoding a JSON array "
+ "from a unicode string"))
+ return NULL;
+ rval = _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr);
+ Py_LeaveRecursiveCall();
+ break;
case 'n':
/* null */
- if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') {
+ if ((idx + 3 < length) &&
+ PyUnicode_READ(kind, str, idx + 1) == 'u' &&
+ PyUnicode_READ(kind, str, idx + 2) == 'l' &&
+ PyUnicode_READ(kind, str, idx + 3) == 'l') {
Py_INCREF(Py_None);
*next_idx_ptr = idx + 4;
- return Py_None;
+ rval = Py_None;
}
+ else
+ fallthrough = 1;
break;
case 't':
/* true */
- if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') {
+ if ((idx + 3 < length) &&
+ PyUnicode_READ(kind, str, idx + 1) == 'r' &&
+ PyUnicode_READ(kind, str, idx + 2) == 'u' &&
+ PyUnicode_READ(kind, str, idx + 3) == 'e') {
Py_INCREF(Py_True);
*next_idx_ptr = idx + 4;
- return Py_True;
+ rval = Py_True;
}
+ else
+ fallthrough = 1;
break;
case 'f':
/* false */
- if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') {
+ if ((idx + 4 < length) &&
+ PyUnicode_READ(kind, str, idx + 1) == 'a' &&
+ PyUnicode_READ(kind, str, idx + 2) == 'l' &&
+ PyUnicode_READ(kind, str, idx + 3) == 's' &&
+ PyUnicode_READ(kind, str, idx + 4) == 'e') {
Py_INCREF(Py_False);
*next_idx_ptr = idx + 5;
- return Py_False;
+ rval = Py_False;
}
+ else
+ fallthrough = 1;
break;
case 'N':
/* NaN */
- if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') {
- return _parse_constant(s, "NaN", idx, next_idx_ptr);
+ if ((idx + 2 < length) &&
+ PyUnicode_READ(kind, str, idx + 1) == 'a' &&
+ PyUnicode_READ(kind, str, idx + 2) == 'N') {
+ rval = _parse_constant(s, "NaN", idx, next_idx_ptr);
}
+ else
+ fallthrough = 1;
break;
case 'I':
/* Infinity */
- if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') {
- return _parse_constant(s, "Infinity", idx, next_idx_ptr);
+ if ((idx + 7 < length) &&
+ PyUnicode_READ(kind, str, idx + 1) == 'n' &&
+ PyUnicode_READ(kind, str, idx + 2) == 'f' &&
+ PyUnicode_READ(kind, str, idx + 3) == 'i' &&
+ PyUnicode_READ(kind, str, idx + 4) == 'n' &&
+ PyUnicode_READ(kind, str, idx + 5) == 'i' &&
+ PyUnicode_READ(kind, str, idx + 6) == 't' &&
+ PyUnicode_READ(kind, str, idx + 7) == 'y') {
+ rval = _parse_constant(s, "Infinity", idx, next_idx_ptr);
}
+ else
+ fallthrough = 1;
break;
case '-':
/* -Infinity */
- if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') {
- return _parse_constant(s, "-Infinity", idx, next_idx_ptr);
+ if ((idx + 8 < length) &&
+ PyUnicode_READ(kind, str, idx + 1) == 'I' &&
+ PyUnicode_READ(kind, str, idx + 2) == 'n' &&
+ PyUnicode_READ(kind, str, idx + 3) == 'f' &&
+ PyUnicode_READ(kind, str, idx + 4) == 'i' &&
+ PyUnicode_READ(kind, str, idx + 5) == 'n' &&
+ PyUnicode_READ(kind, str, idx + 6) == 'i' &&
+ PyUnicode_READ(kind, str, idx + 7) == 't' &&
+ PyUnicode_READ(kind, str, idx + 8) == 'y') {
+ rval = _parse_constant(s, "-Infinity", idx, next_idx_ptr);
}
+ else
+ fallthrough = 1;
break;
+ default:
+ fallthrough = 1;
}
/* Didn't find a string, object, array, or named constant. Look for a number. */
- return _match_number_unicode(s, pystr, idx, next_idx_ptr);
+ if (fallthrough)
+ rval = _match_number_unicode(s, pystr, idx, next_idx_ptr);
+ return rval;
}
static PyObject *
@@ -1624,18 +2394,21 @@ scanner_call(PyObject *self, PyObject *args, PyObject *kwds)
if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:scan_once", kwlist, &pystr, _convertPyInt_AsSsize_t, &idx))
return NULL;
- if (PyString_Check(pystr)) {
- rval = scan_once_str(s, pystr, idx, &next_idx);
- }
- else if (PyUnicode_Check(pystr)) {
+ if (PyUnicode_Check(pystr)) {
rval = scan_once_unicode(s, pystr, idx, &next_idx);
}
+#if PY_MAJOR_VERSION < 3
+ else if (PyString_Check(pystr)) {
+ rval = scan_once_str(s, pystr, idx, &next_idx);
+ }
+#endif /* PY_MAJOR_VERSION < 3 */
else {
PyErr_Format(PyExc_TypeError,
"first argument must be a string, not %.80s",
Py_TYPE(pystr)->tp_name);
return NULL;
}
+ PyDict_Clear(s->memo);
return _build_rval_index_tuple(rval, next_idx);
}
@@ -1648,6 +2421,7 @@ scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
s->encoding = NULL;
s->strict = NULL;
s->object_hook = NULL;
+ s->pairs_hook = NULL;
s->parse_float = NULL;
s->parse_int = NULL;
s->parse_constant = NULL;
@@ -1655,6 +2429,25 @@ scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
return (PyObject *)s;
}
+static PyObject *
+JSON_ParseEncoding(PyObject *encoding)
+{
+ if (encoding == NULL)
+ return NULL;
+ if (encoding == Py_None)
+ return JSON_InternFromString(DEFAULT_ENCODING);
+#if PY_MAJOR_VERSION < 3
+ if (PyUnicode_Check(encoding))
+ return PyUnicode_AsEncodedString(encoding, NULL, NULL);
+#endif
+ if (JSON_ASCII_Check(encoding)) {
+ Py_INCREF(encoding);
+ return encoding;
+ }
+ PyErr_SetString(PyExc_TypeError, "encoding must be a string");
+ return NULL;
+}
+
static int
scanner_init(PyObject *self, PyObject *args, PyObject *kwds)
{
@@ -1662,6 +2455,7 @@ scanner_init(PyObject *self, PyObject *args, PyObject *kwds)
PyObject *ctx;
static char *kwlist[] = {"context", NULL};
PyScannerObject *s;
+ PyObject *encoding;
assert(PyScanner_Check(self));
s = (PyScannerObject *)self;
@@ -1669,18 +2463,17 @@ scanner_init(PyObject *self, PyObject *args, PyObject *kwds)
if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:make_scanner", kwlist, &ctx))
return -1;
- /* PyString_AS_STRING is used on encoding */
- s->encoding = PyObject_GetAttrString(ctx, "encoding");
- if (s->encoding == Py_None) {
- Py_DECREF(Py_None);
- s->encoding = PyString_InternFromString(DEFAULT_ENCODING);
+ if (s->memo == NULL) {
+ s->memo = PyDict_New();
+ if (s->memo == NULL)
+ goto bail;
}
- else if (PyUnicode_Check(s->encoding)) {
- PyObject *tmp = PyUnicode_AsEncodedString(s->encoding, NULL, NULL);
- Py_DECREF(s->encoding);
- s->encoding = tmp;
- }
- if (s->encoding == NULL || !PyString_Check(s->encoding))
+
+ /* JSON_ASCII_AS_STRING is used on encoding */
+ encoding = PyObject_GetAttrString(ctx, "encoding");
+ s->encoding = JSON_ParseEncoding(encoding);
+ Py_XDECREF(encoding);
+ if (s->encoding == NULL)
goto bail;
/* All of these will fail "gracefully" so we don't need to verify them */
@@ -1690,6 +2483,9 @@ scanner_init(PyObject *self, PyObject *args, PyObject *kwds)
s->object_hook = PyObject_GetAttrString(ctx, "object_hook");
if (s->object_hook == NULL)
goto bail;
+ s->pairs_hook = PyObject_GetAttrString(ctx, "object_pairs_hook");
+ if (s->pairs_hook == NULL)
+ goto bail;
s->parse_float = PyObject_GetAttrString(ctx, "parse_float");
if (s->parse_float == NULL)
goto bail;
@@ -1706,6 +2502,7 @@ bail:
Py_CLEAR(s->encoding);
Py_CLEAR(s->strict);
Py_CLEAR(s->object_hook);
+ Py_CLEAR(s->pairs_hook);
Py_CLEAR(s->parse_float);
Py_CLEAR(s->parse_int);
Py_CLEAR(s->parse_constant);
@@ -1716,8 +2513,7 @@ PyDoc_STRVAR(scanner_doc, "JSON scanner object");
static
PyTypeObject PyScannerType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* tp_internal */
+ PyVarObject_HEAD_INIT(NULL, 0)
"simplejson._speedups.Scanner", /* tp_name */
sizeof(PyScannerObject), /* tp_basicsize */
0, /* tp_itemsize */
@@ -1767,11 +2563,17 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
s->markers = NULL;
s->defaultfn = NULL;
s->encoder = NULL;
+ s->encoding = NULL;
s->indent = NULL;
s->key_separator = NULL;
s->item_separator = NULL;
+ s->key_memo = NULL;
s->sort_keys = NULL;
- s->skipkeys = NULL;
+ s->item_sort_key = NULL;
+ s->item_sort_kw = NULL;
+ s->Decimal = NULL;
+ s->max_long_size = NULL;
+ s->min_long_size = NULL;
}
return (PyObject *)s;
}
@@ -1780,28 +2582,138 @@ static int
encoder_init(PyObject *self, PyObject *args, PyObject *kwds)
{
/* initialize Encoder object */
- static char *kwlist[] = {"markers", "default", "encoder", "indent", "key_separator", "item_separator", "sort_keys", "skipkeys", "allow_nan", NULL};
+ static char *kwlist[] = {
+ "markers",
+ "default",
+ "encoder",
+ "indent",
+ "key_separator",
+ "item_separator",
+ "sort_keys",
+ "skipkeys",
+ "allow_nan",
+ "key_memo",
+ "use_decimal",
+ "namedtuple_as_object",
+ "tuple_as_array",
+ "int_as_string_bitcount",
+ "item_sort_key",
+ "encoding",
+ "for_json",
+ "ignore_nan",
+ "Decimal",
+ NULL};
PyEncoderObject *s;
- PyObject *allow_nan;
+ PyObject *markers, *defaultfn, *encoder, *indent, *key_separator;
+ PyObject *item_separator, *sort_keys, *skipkeys, *allow_nan, *key_memo;
+ PyObject *use_decimal, *namedtuple_as_object, *tuple_as_array;
+ PyObject *int_as_string_bitcount, *item_sort_key, *encoding, *for_json;
+ PyObject *ignore_nan, *Decimal;
assert(PyEncoder_Check(self));
s = (PyEncoderObject *)self;
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOO:make_encoder", kwlist,
- &s->markers, &s->defaultfn, &s->encoder, &s->indent, &s->key_separator, &s->item_separator, &s->sort_keys, &s->skipkeys, &allow_nan))
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOOOOOOOOOOOO:make_encoder", kwlist,
+ &markers, &defaultfn, &encoder, &indent, &key_separator, &item_separator,
+ &sort_keys, &skipkeys, &allow_nan, &key_memo, &use_decimal,
+ &namedtuple_as_object, &tuple_as_array,
+ &int_as_string_bitcount, &item_sort_key, &encoding, &for_json,
+ &ignore_nan, &Decimal))
return -1;
- Py_INCREF(s->markers);
- Py_INCREF(s->defaultfn);
- Py_INCREF(s->encoder);
- Py_INCREF(s->indent);
- Py_INCREF(s->key_separator);
- Py_INCREF(s->item_separator);
- Py_INCREF(s->sort_keys);
- Py_INCREF(s->skipkeys);
+ Py_INCREF(markers);
+ s->markers = markers;
+ Py_INCREF(defaultfn);
+ s->defaultfn = defaultfn;
+ Py_INCREF(encoder);
+ s->encoder = encoder;
+ s->encoding = JSON_ParseEncoding(encoding);
+ if (s->encoding == NULL)
+ return -1;
+ Py_INCREF(indent);
+ s->indent = indent;
+ Py_INCREF(key_separator);
+ s->key_separator = key_separator;
+ Py_INCREF(item_separator);
+ s->item_separator = item_separator;
+ Py_INCREF(skipkeys);
+ s->skipkeys_bool = skipkeys;
+ s->skipkeys = PyObject_IsTrue(skipkeys);
+ Py_INCREF(key_memo);
+ s->key_memo = key_memo;
s->fast_encode = (PyCFunction_Check(s->encoder) && PyCFunction_GetFunction(s->encoder) == (PyCFunction)py_encode_basestring_ascii);
- s->allow_nan = PyObject_IsTrue(allow_nan);
+ s->allow_or_ignore_nan = (
+ (PyObject_IsTrue(ignore_nan) ? JSON_IGNORE_NAN : 0) |
+ (PyObject_IsTrue(allow_nan) ? JSON_ALLOW_NAN : 0));
+ s->use_decimal = PyObject_IsTrue(use_decimal);
+ s->namedtuple_as_object = PyObject_IsTrue(namedtuple_as_object);
+ s->tuple_as_array = PyObject_IsTrue(tuple_as_array);
+ if (PyInt_Check(int_as_string_bitcount) || PyLong_Check(int_as_string_bitcount)) {
+ static const unsigned int long_long_bitsize = SIZEOF_LONG_LONG * 8;
+ int int_as_string_bitcount_val = (int)PyLong_AsLong(int_as_string_bitcount);
+ if (int_as_string_bitcount_val > 0 && int_as_string_bitcount_val < long_long_bitsize) {
+ s->max_long_size = PyLong_FromUnsignedLongLong(1ULL << int_as_string_bitcount_val);
+ s->min_long_size = PyLong_FromLongLong(-1LL << int_as_string_bitcount_val);
+ if (s->min_long_size == NULL || s->max_long_size == NULL) {
+ return -1;
+ }
+ }
+ else {
+ PyErr_Format(PyExc_TypeError,
+ "int_as_string_bitcount (%d) must be greater than 0 and less than the number of bits of a `long long` type (%u bits)",
+ int_as_string_bitcount_val, long_long_bitsize);
+ return -1;
+ }
+ }
+ else if (int_as_string_bitcount == Py_None) {
+ Py_INCREF(Py_None);
+ s->max_long_size = Py_None;
+ Py_INCREF(Py_None);
+ s->min_long_size = Py_None;
+ }
+ else {
+ PyErr_SetString(PyExc_TypeError, "int_as_string_bitcount must be None or an integer");
+ return -1;
+ }
+ if (item_sort_key != Py_None) {
+ if (!PyCallable_Check(item_sort_key)) {
+ PyErr_SetString(PyExc_TypeError, "item_sort_key must be None or callable");
+ return -1;
+ }
+ }
+ else if (PyObject_IsTrue(sort_keys)) {
+ static PyObject *itemgetter0 = NULL;
+ if (!itemgetter0) {
+ PyObject *operator = PyImport_ImportModule("operator");
+ if (!operator)
+ return -1;
+ itemgetter0 = PyObject_CallMethod(operator, "itemgetter", "i", 0);
+ Py_DECREF(operator);
+ }
+ item_sort_key = itemgetter0;
+ if (!item_sort_key)
+ return -1;
+ }
+ if (item_sort_key == Py_None) {
+ Py_INCREF(Py_None);
+ s->item_sort_kw = Py_None;
+ }
+ else {
+ s->item_sort_kw = PyDict_New();
+ if (s->item_sort_kw == NULL)
+ return -1;
+ if (PyDict_SetItemString(s->item_sort_kw, "key", item_sort_key))
+ return -1;
+ }
+ Py_INCREF(sort_keys);
+ s->sort_keys = sort_keys;
+ Py_INCREF(item_sort_key);
+ s->item_sort_key = item_sort_key;
+ Py_INCREF(Decimal);
+ s->Decimal = Decimal;
+ s->for_json = PyObject_IsTrue(for_json);
+
return 0;
}
@@ -1811,22 +2723,21 @@ encoder_call(PyObject *self, PyObject *args, PyObject *kwds)
/* Python callable interface to encode_listencode_obj */
static char *kwlist[] = {"obj", "_current_indent_level", NULL};
PyObject *obj;
- PyObject *rval;
Py_ssize_t indent_level;
PyEncoderObject *s;
+ JSON_Accu rval;
assert(PyEncoder_Check(self));
s = (PyEncoderObject *)self;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:_iterencode", kwlist,
&obj, _convertPyInt_AsSsize_t, &indent_level))
return NULL;
- rval = PyList_New(0);
- if (rval == NULL)
+ if (JSON_Accu_Init(&rval))
return NULL;
- if (encoder_listencode_obj(s, rval, obj, indent_level)) {
- Py_DECREF(rval);
+ if (encoder_listencode_obj(s, &rval, obj, indent_level)) {
+ JSON_Accu_Destroy(&rval);
return NULL;
}
- return rval;
+ return JSON_Accu_FinishAsList(&rval);
}
static PyObject *
@@ -1836,7 +2747,7 @@ _encoded_const(PyObject *obj)
if (obj == Py_None) {
static PyObject *s_null = NULL;
if (s_null == NULL) {
- s_null = PyString_InternFromString("null");
+ s_null = JSON_InternFromString("null");
}
Py_INCREF(s_null);
return s_null;
@@ -1844,7 +2755,7 @@ _encoded_const(PyObject *obj)
else if (obj == Py_True) {
static PyObject *s_true = NULL;
if (s_true == NULL) {
- s_true = PyString_InternFromString("true");
+ s_true = JSON_InternFromString("true");
}
Py_INCREF(s_true);
return s_true;
@@ -1852,7 +2763,7 @@ _encoded_const(PyObject *obj)
else if (obj == Py_False) {
static PyObject *s_false = NULL;
if (s_false == NULL) {
- s_false = PyString_InternFromString("false");
+ s_false = JSON_InternFromString("false");
}
Py_INCREF(s_false);
return s_false;
@@ -1869,22 +2780,54 @@ encoder_encode_float(PyEncoderObject *s, PyObject *obj)
/* Return the JSON representation of a PyFloat */
double i = PyFloat_AS_DOUBLE(obj);
if (!Py_IS_FINITE(i)) {
- if (!s->allow_nan) {
+ if (!s->allow_or_ignore_nan) {
PyErr_SetString(PyExc_ValueError, "Out of range float values are not JSON compliant");
return NULL;
}
- if (i > 0) {
- return PyString_FromString("Infinity");
+ if (s->allow_or_ignore_nan & JSON_IGNORE_NAN) {
+ return _encoded_const(Py_None);
+ }
+ /* JSON_ALLOW_NAN is set */
+ else if (i > 0) {
+ static PyObject *sInfinity = NULL;
+ if (sInfinity == NULL)
+ sInfinity = JSON_InternFromString("Infinity");
+ if (sInfinity)
+ Py_INCREF(sInfinity);
+ return sInfinity;
}
else if (i < 0) {
- return PyString_FromString("-Infinity");
+ static PyObject *sNegInfinity = NULL;
+ if (sNegInfinity == NULL)
+ sNegInfinity = JSON_InternFromString("-Infinity");
+ if (sNegInfinity)
+ Py_INCREF(sNegInfinity);
+ return sNegInfinity;
}
else {
- return PyString_FromString("NaN");
+ static PyObject *sNaN = NULL;
+ if (sNaN == NULL)
+ sNaN = JSON_InternFromString("NaN");
+ if (sNaN)
+ Py_INCREF(sNaN);
+ return sNaN;
}
}
/* Use a better float format here? */
- return PyObject_Repr(obj);
+ if (PyFloat_CheckExact(obj)) {
+ return PyObject_Repr(obj);
+ }
+ else {
+ /* See #118, do not trust custom str/repr */
+ PyObject *res;
+ PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyFloat_Type, obj, NULL);
+ if (tmp == NULL) {
+ return NULL;
+ }
+ res = PyObject_Repr(tmp);
+ Py_DECREF(tmp);
+ return res;
+ }
}
static PyObject *
@@ -1898,116 +2841,169 @@ encoder_encode_string(PyEncoderObject *s, PyObject *obj)
}
static int
-_steal_list_append(PyObject *lst, PyObject *stolen)
+_steal_accumulate(JSON_Accu *accu, PyObject *stolen)
{
/* Append stolen and then decrement its reference count */
- int rval = PyList_Append(lst, stolen);
+ int rval = JSON_Accu_Accumulate(accu, stolen);
Py_DECREF(stolen);
return rval;
}
static int
-encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level)
+encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ssize_t indent_level)
{
/* Encode Python object obj to a JSON term, rval is a PyList */
- PyObject *newobj;
- int rv;
-
- if (obj == Py_None || obj == Py_True || obj == Py_False) {
- PyObject *cstr = _encoded_const(obj);
- if (cstr == NULL)
- return -1;
- return _steal_list_append(rval, cstr);
- }
- else if (PyString_Check(obj) || PyUnicode_Check(obj))
- {
- PyObject *encoded = encoder_encode_string(s, obj);
- if (encoded == NULL)
- return -1;
- return _steal_list_append(rval, encoded);
- }
- else if (PyInt_Check(obj) || PyLong_Check(obj)) {
- PyObject *encoded = PyObject_Str(obj);
- if (encoded == NULL)
- return -1;
- return _steal_list_append(rval, encoded);
- }
- else if (PyFloat_Check(obj)) {
- PyObject *encoded = encoder_encode_float(s, obj);
- if (encoded == NULL)
- return -1;
- return _steal_list_append(rval, encoded);
- }
- else if (PyList_Check(obj) || PyTuple_Check(obj)) {
- return encoder_listencode_list(s, rval, obj, indent_level);
- }
- else if (PyDict_Check(obj)) {
- return encoder_listencode_dict(s, rval, obj, indent_level);
- }
- else {
- PyObject *ident = NULL;
- if (s->markers != Py_None) {
- int has_key;
- ident = PyLong_FromVoidPtr(obj);
- if (ident == NULL)
- return -1;
- has_key = PyDict_Contains(s->markers, ident);
- if (has_key) {
- if (has_key != -1)
- PyErr_SetString(PyExc_ValueError, "Circular reference detected");
- Py_DECREF(ident);
- return -1;
+ int rv = -1;
+ do {
+ if (obj == Py_None || obj == Py_True || obj == Py_False) {
+ PyObject *cstr = _encoded_const(obj);
+ if (cstr != NULL)
+ rv = _steal_accumulate(rval, cstr);
+ }
+ else if (PyString_Check(obj) || PyUnicode_Check(obj))
+ {
+ PyObject *encoded = encoder_encode_string(s, obj);
+ if (encoded != NULL)
+ rv = _steal_accumulate(rval, encoded);
+ }
+ else if (PyInt_Check(obj) || PyLong_Check(obj)) {
+ PyObject *encoded;
+ if (PyInt_CheckExact(obj) || PyLong_CheckExact(obj)) {
+ encoded = PyObject_Str(obj);
}
- if (PyDict_SetItem(s->markers, ident, obj)) {
- Py_DECREF(ident);
- return -1;
+ else {
+ /* See #118, do not trust custom str/repr */
+ PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyLong_Type, obj, NULL);
+ if (tmp == NULL) {
+ encoded = NULL;
+ }
+ else {
+ encoded = PyObject_Str(tmp);
+ Py_DECREF(tmp);
+ }
+ }
+ if (encoded != NULL) {
+ encoded = maybe_quote_bigint(s, encoded, obj);
+ if (encoded == NULL)
+ break;
+ rv = _steal_accumulate(rval, encoded);
}
}
- newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL);
- if (newobj == NULL) {
- Py_XDECREF(ident);
- return -1;
+ else if (PyFloat_Check(obj)) {
+ PyObject *encoded = encoder_encode_float(s, obj);
+ if (encoded != NULL)
+ rv = _steal_accumulate(rval, encoded);
}
- rv = encoder_listencode_obj(s, rval, newobj, indent_level);
- Py_DECREF(newobj);
- if (rv) {
- Py_XDECREF(ident);
- return -1;
+ else if (s->for_json && _has_for_json_hook(obj)) {
+ PyObject *newobj;
+ if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+ return rv;
+ newobj = PyObject_CallMethod(obj, "for_json", NULL);
+ if (newobj != NULL) {
+ rv = encoder_listencode_obj(s, rval, newobj, indent_level);
+ Py_DECREF(newobj);
+ }
+ Py_LeaveRecursiveCall();
}
- if (ident != NULL) {
- if (PyDict_DelItem(s->markers, ident)) {
+ else if (s->namedtuple_as_object && _is_namedtuple(obj)) {
+ PyObject *newobj;
+ if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+ return rv;
+ newobj = PyObject_CallMethod(obj, "_asdict", NULL);
+ if (newobj != NULL) {
+ rv = encoder_listencode_dict(s, rval, newobj, indent_level);
+ Py_DECREF(newobj);
+ }
+ Py_LeaveRecursiveCall();
+ }
+ else if (PyList_Check(obj) || (s->tuple_as_array && PyTuple_Check(obj))) {
+ if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+ return rv;
+ rv = encoder_listencode_list(s, rval, obj, indent_level);
+ Py_LeaveRecursiveCall();
+ }
+ else if (PyDict_Check(obj)) {
+ if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+ return rv;
+ rv = encoder_listencode_dict(s, rval, obj, indent_level);
+ Py_LeaveRecursiveCall();
+ }
+ else if (s->use_decimal && PyObject_TypeCheck(obj, (PyTypeObject *)s->Decimal)) {
+ PyObject *encoded = PyObject_Str(obj);
+ if (encoded != NULL)
+ rv = _steal_accumulate(rval, encoded);
+ }
+ else {
+ PyObject *ident = NULL;
+ PyObject *newobj;
+ if (s->markers != Py_None) {
+ int has_key;
+ ident = PyLong_FromVoidPtr(obj);
+ if (ident == NULL)
+ break;
+ has_key = PyDict_Contains(s->markers, ident);
+ if (has_key) {
+ if (has_key != -1)
+ PyErr_SetString(PyExc_ValueError, "Circular reference detected");
+ Py_DECREF(ident);
+ break;
+ }
+ if (PyDict_SetItem(s->markers, ident, obj)) {
+ Py_DECREF(ident);
+ break;
+ }
+ }
+ if (Py_EnterRecursiveCall(" while encoding a JSON object"))
+ return rv;
+ newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL);
+ if (newobj == NULL) {
+ Py_XDECREF(ident);
+ Py_LeaveRecursiveCall();
+ break;
+ }
+ rv = encoder_listencode_obj(s, rval, newobj, indent_level);
+ Py_LeaveRecursiveCall();
+ Py_DECREF(newobj);
+ if (rv) {
+ Py_XDECREF(ident);
+ rv = -1;
+ }
+ else if (ident != NULL) {
+ if (PyDict_DelItem(s->markers, ident)) {
+ Py_XDECREF(ident);
+ rv = -1;
+ }
Py_XDECREF(ident);
- return -1;
}
- Py_XDECREF(ident);
}
- return rv;
- }
+ } while (0);
+ return rv;
}
static int
-encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level)
+encoder_listencode_dict(PyEncoderObject *s, JSON_Accu *rval, PyObject *dct, Py_ssize_t indent_level)
{
- /* Encode Python dict dct a JSON term, rval is a PyList */
+ /* Encode Python dict dct a JSON term */
static PyObject *open_dict = NULL;
static PyObject *close_dict = NULL;
static PyObject *empty_dict = NULL;
PyObject *kstr = NULL;
PyObject *ident = NULL;
- PyObject *key, *value;
- Py_ssize_t pos;
- int skipkeys;
+ PyObject *iter = NULL;
+ PyObject *item = NULL;
+ PyObject *items = NULL;
+ PyObject *encoded = NULL;
Py_ssize_t idx;
if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) {
- open_dict = PyString_InternFromString("{");
- close_dict = PyString_InternFromString("}");
- empty_dict = PyString_InternFromString("{}");
+ open_dict = JSON_InternFromString("{");
+ close_dict = JSON_InternFromString("}");
+ empty_dict = JSON_InternFromString("{}");
if (open_dict == NULL || close_dict == NULL || empty_dict == NULL)
return -1;
}
if (PyDict_Size(dct) == 0)
- return PyList_Append(rval, empty_dict);
+ return JSON_Accu_Accumulate(rval, empty_dict);
if (s->markers != Py_None) {
int has_key;
@@ -2025,75 +3021,77 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss
}
}
- if (PyList_Append(rval, open_dict))
+ if (JSON_Accu_Accumulate(rval, open_dict))
goto bail;
if (s->indent != Py_None) {
/* TODO: DOES NOT RUN */
indent_level += 1;
/*
- newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
*/
}
- /* TODO: C speedup not implemented for sort_keys */
+ iter = encoder_dict_iteritems(s, dct);
+ if (iter == NULL)
+ goto bail;
- pos = 0;
- skipkeys = PyObject_IsTrue(s->skipkeys);
idx = 0;
- while (PyDict_Next(dct, &pos, &key, &value)) {
- PyObject *encoded;
-
- if (PyString_Check(key) || PyUnicode_Check(key)) {
- Py_INCREF(key);
- kstr = key;
- }
- else if (PyFloat_Check(key)) {
- kstr = encoder_encode_float(s, key);
- if (kstr == NULL)
- goto bail;
- }
- else if (PyInt_Check(key) || PyLong_Check(key)) {
- kstr = PyObject_Str(key);
- if (kstr == NULL)
- goto bail;
- }
- else if (key == Py_True || key == Py_False || key == Py_None) {
- kstr = _encoded_const(key);
- if (kstr == NULL)
- goto bail;
- }
- else if (skipkeys) {
- continue;
- }
- else {
- /* TODO: include repr of key */
- PyErr_SetString(PyExc_ValueError, "keys must be a string");
+ while ((item = PyIter_Next(iter))) {
+ PyObject *encoded, *key, *value;
+ if (!PyTuple_Check(item) || Py_SIZE(item) != 2) {
+ PyErr_SetString(PyExc_ValueError, "items must return 2-tuples");
goto bail;
}
+ key = PyTuple_GET_ITEM(item, 0);
+ if (key == NULL)
+ goto bail;
+ value = PyTuple_GET_ITEM(item, 1);
+ if (value == NULL)
+ goto bail;
+ encoded = PyDict_GetItem(s->key_memo, key);
+ if (encoded != NULL) {
+ Py_INCREF(encoded);
+ } else {
+ kstr = encoder_stringify_key(s, key);
+ if (kstr == NULL)
+ goto bail;
+ else if (kstr == Py_None) {
+ /* skipkeys */
+ Py_DECREF(item);
+ Py_DECREF(kstr);
+ continue;
+ }
+ }
if (idx) {
- if (PyList_Append(rval, s->item_separator))
+ if (JSON_Accu_Accumulate(rval, s->item_separator))
goto bail;
}
-
- encoded = encoder_encode_string(s, kstr);
- Py_CLEAR(kstr);
- if (encoded == NULL)
- goto bail;
- if (PyList_Append(rval, encoded)) {
- Py_DECREF(encoded);
+ if (encoded == NULL) {
+ encoded = encoder_encode_string(s, kstr);
+ Py_CLEAR(kstr);
+ if (encoded == NULL)
+ goto bail;
+ if (PyDict_SetItem(s->key_memo, key, encoded))
+ goto bail;
+ }
+ if (JSON_Accu_Accumulate(rval, encoded)) {
goto bail;
}
- Py_DECREF(encoded);
- if (PyList_Append(rval, s->key_separator))
+ Py_CLEAR(encoded);
+ if (JSON_Accu_Accumulate(rval, s->key_separator))
goto bail;
if (encoder_listencode_obj(s, rval, value, indent_level))
goto bail;
+ Py_CLEAR(item);
idx += 1;
}
+ Py_CLEAR(iter);
+ if (PyErr_Occurred())
+ goto bail;
if (ident != NULL) {
if (PyDict_DelItem(s->markers, ident))
goto bail;
@@ -2103,14 +3101,18 @@ encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ss
/* TODO: DOES NOT RUN */
indent_level -= 1;
/*
- yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '\n' + (_indent * _current_indent_level)
*/
}
- if (PyList_Append(rval, close_dict))
+ if (JSON_Accu_Accumulate(rval, close_dict))
goto bail;
return 0;
bail:
+ Py_XDECREF(encoded);
+ Py_XDECREF(items);
+ Py_XDECREF(item);
+ Py_XDECREF(iter);
Py_XDECREF(kstr);
Py_XDECREF(ident);
return -1;
@@ -2118,34 +3120,31 @@ bail:
static int
-encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ssize_t indent_level)
+encoder_listencode_list(PyEncoderObject *s, JSON_Accu *rval, PyObject *seq, Py_ssize_t indent_level)
{
- /* Encode Python list seq to a JSON term, rval is a PyList */
+ /* Encode Python list seq to a JSON term */
static PyObject *open_array = NULL;
static PyObject *close_array = NULL;
static PyObject *empty_array = NULL;
PyObject *ident = NULL;
- PyObject *s_fast = NULL;
- Py_ssize_t num_items;
- PyObject **seq_items;
- Py_ssize_t i;
+ PyObject *iter = NULL;
+ PyObject *obj = NULL;
+ int is_true;
+ int i = 0;
if (open_array == NULL || close_array == NULL || empty_array == NULL) {
- open_array = PyString_InternFromString("[");
- close_array = PyString_InternFromString("]");
- empty_array = PyString_InternFromString("[]");
+ open_array = JSON_InternFromString("[");
+ close_array = JSON_InternFromString("]");
+ empty_array = JSON_InternFromString("[]");
if (open_array == NULL || close_array == NULL || empty_array == NULL)
return -1;
}
ident = NULL;
- s_fast = PySequence_Fast(seq, "_iterencode_list needs a sequence");
- if (s_fast == NULL)
+ is_true = PyObject_IsTrue(seq);
+ if (is_true == -1)
return -1;
- num_items = PySequence_Fast_GET_SIZE(s_fast);
- if (num_items == 0) {
- Py_DECREF(s_fast);
- return PyList_Append(rval, empty_array);
- }
+ else if (is_true == 0)
+ return JSON_Accu_Accumulate(rval, empty_array);
if (s->markers != Py_None) {
int has_key;
@@ -2163,27 +3162,34 @@ encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ss
}
}
- seq_items = PySequence_Fast_ITEMS(s_fast);
- if (PyList_Append(rval, open_array))
+ iter = PyObject_GetIter(seq);
+ if (iter == NULL)
+ goto bail;
+
+ if (JSON_Accu_Accumulate(rval, open_array))
goto bail;
if (s->indent != Py_None) {
/* TODO: DOES NOT RUN */
indent_level += 1;
/*
- newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
*/
}
- for (i = 0; i < num_items; i++) {
- PyObject *obj = seq_items[i];
+ while ((obj = PyIter_Next(iter))) {
if (i) {
- if (PyList_Append(rval, s->item_separator))
+ if (JSON_Accu_Accumulate(rval, s->item_separator))
goto bail;
}
if (encoder_listencode_obj(s, rval, obj, indent_level))
goto bail;
+ i++;
+ Py_CLEAR(obj);
}
+ Py_CLEAR(iter);
+ if (PyErr_Occurred())
+ goto bail;
if (ident != NULL) {
if (PyDict_DelItem(s->markers, ident))
goto bail;
@@ -2193,17 +3199,17 @@ encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ss
/* TODO: DOES NOT RUN */
indent_level -= 1;
/*
- yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '\n' + (_indent * _current_indent_level)
*/
}
- if (PyList_Append(rval, close_array))
+ if (JSON_Accu_Accumulate(rval, close_array))
goto bail;
- Py_DECREF(s_fast);
return 0;
bail:
+ Py_XDECREF(obj);
+ Py_XDECREF(iter);
Py_XDECREF(ident);
- Py_DECREF(s_fast);
return -1;
}
@@ -2224,11 +3230,17 @@ encoder_traverse(PyObject *self, visitproc visit, void *arg)
Py_VISIT(s->markers);
Py_VISIT(s->defaultfn);
Py_VISIT(s->encoder);
+ Py_VISIT(s->encoding);
Py_VISIT(s->indent);
Py_VISIT(s->key_separator);
Py_VISIT(s->item_separator);
+ Py_VISIT(s->key_memo);
Py_VISIT(s->sort_keys);
- Py_VISIT(s->skipkeys);
+ Py_VISIT(s->item_sort_kw);
+ Py_VISIT(s->item_sort_key);
+ Py_VISIT(s->max_long_size);
+ Py_VISIT(s->min_long_size);
+ Py_VISIT(s->Decimal);
return 0;
}
@@ -2242,11 +3254,18 @@ encoder_clear(PyObject *self)
Py_CLEAR(s->markers);
Py_CLEAR(s->defaultfn);
Py_CLEAR(s->encoder);
+ Py_CLEAR(s->encoding);
Py_CLEAR(s->indent);
Py_CLEAR(s->key_separator);
Py_CLEAR(s->item_separator);
+ Py_CLEAR(s->key_memo);
+ Py_CLEAR(s->skipkeys_bool);
Py_CLEAR(s->sort_keys);
- Py_CLEAR(s->skipkeys);
+ Py_CLEAR(s->item_sort_kw);
+ Py_CLEAR(s->item_sort_key);
+ Py_CLEAR(s->max_long_size);
+ Py_CLEAR(s->min_long_size);
+ Py_CLEAR(s->Decimal);
return 0;
}
@@ -2254,8 +3273,7 @@ PyDoc_STRVAR(encoder_doc, "_iterencode(obj, _current_indent_level) -> iterable")
static
PyTypeObject PyEncoderType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* tp_internal */
+ PyVarObject_HEAD_INIT(NULL, 0)
"simplejson._speedups.Encoder", /* tp_name */
sizeof(PyEncoderObject), /* tp_basicsize */
0, /* tp_itemsize */
@@ -2311,19 +3329,53 @@ static PyMethodDef speedups_methods[] = {
PyDoc_STRVAR(module_doc,
"simplejson speedups\n");
-void
-init_speedups(void)
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "_speedups", /* m_name */
+ module_doc, /* m_doc */
+ -1, /* m_size */
+ speedups_methods, /* m_methods */
+ NULL, /* m_reload */
+ NULL, /* m_traverse */
+ NULL, /* m_clear*/
+ NULL, /* m_free */
+};
+#endif
+
+static PyObject *
+moduleinit(void)
{
PyObject *m;
PyScannerType.tp_new = PyType_GenericNew;
if (PyType_Ready(&PyScannerType) < 0)
- return;
+ return NULL;
PyEncoderType.tp_new = PyType_GenericNew;
if (PyType_Ready(&PyEncoderType) < 0)
- return;
+ return NULL;
+
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&moduledef);
+#else
m = Py_InitModule3("_speedups", speedups_methods, module_doc);
+#endif
Py_INCREF((PyObject*)&PyScannerType);
PyModule_AddObject(m, "make_scanner", (PyObject*)&PyScannerType);
Py_INCREF((PyObject*)&PyEncoderType);
PyModule_AddObject(m, "make_encoder", (PyObject*)&PyEncoderType);
+ return m;
}
+
+#if PY_MAJOR_VERSION >= 3
+PyMODINIT_FUNC
+PyInit__speedups(void)
+{
+ return moduleinit();
+}
+#else
+void
+init_speedups(void)
+{
+ moduleinit();
+}
+#endif
diff --git a/lib/simplejson/compat.py b/lib/simplejson/compat.py
new file mode 100644
index 00000000..a0af4a1c
--- /dev/null
+++ b/lib/simplejson/compat.py
@@ -0,0 +1,46 @@
+"""Python 3 compatibility shims
+"""
+import sys
+if sys.version_info[0] < 3:
+ PY3 = False
+ def b(s):
+ return s
+ def u(s):
+ return unicode(s, 'unicode_escape')
+ import cStringIO as StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ text_type = unicode
+ binary_type = str
+ string_types = (basestring,)
+ integer_types = (int, long)
+ unichr = unichr
+ reload_module = reload
+ def fromhex(s):
+ return s.decode('hex')
+
+else:
+ PY3 = True
+ if sys.version_info[:2] >= (3, 4):
+ from importlib import reload as reload_module
+ else:
+ from imp import reload as reload_module
+ import codecs
+ def b(s):
+ return codecs.latin_1_encode(s)[0]
+ def u(s):
+ return s
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ text_type = str
+ binary_type = bytes
+ string_types = (str,)
+ integer_types = (int,)
+
+ def unichr(s):
+ return u(chr(s))
+
+ def fromhex(s):
+ return bytes.fromhex(s)
+
+long_type = integer_types[-1]
diff --git a/lib/simplejson/decoder.py b/lib/simplejson/decoder.py
index dd57ddee..545e6587 100644
--- a/lib/simplejson/decoder.py
+++ b/lib/simplejson/decoder.py
@@ -1,21 +1,30 @@
"""Implementation of JSONDecoder
"""
+from __future__ import absolute_import
import re
import sys
import struct
+from .compat import fromhex, b, u, text_type, binary_type, PY3, unichr
+from .scanner import make_scanner, JSONDecodeError
-from lib.simplejson.scanner import make_scanner
-try:
- from lib.simplejson._speedups import scanstring as c_scanstring
-except ImportError:
- c_scanstring = None
+def _import_c_scanstring():
+ try:
+ from ._speedups import scanstring
+ return scanstring
+ except ImportError:
+ return None
+c_scanstring = _import_c_scanstring()
+# NOTE (3.1.0): JSONDecodeError may still be imported from this module for
+# compatibility, but it was never in the __all__
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
- _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+ _BYTES = fromhex('7FF80000000000007FF0000000000000')
+ # The struct module in Python 2.4 would get frexp() out of range here
+ # when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
@@ -23,31 +32,6 @@ def _floatconstants():
NaN, PosInf, NegInf = _floatconstants()
-
-def linecol(doc, pos):
- lineno = doc.count('\n', 0, pos) + 1
- if lineno == 1:
- colno = pos
- else:
- colno = pos - doc.rindex('\n', 0, pos)
- return lineno, colno
-
-
-def errmsg(msg, doc, pos, end=None):
- # Note that this function is called from _speedups
- lineno, colno = linecol(doc, pos)
- if end is None:
- #fmt = '{0}: line {1} column {2} (char {3})'
- #return fmt.format(msg, lineno, colno, pos)
- fmt = '%s: line %d column %d (char %d)'
- return fmt % (msg, lineno, colno, pos)
- endlineno, endcolno = linecol(doc, end)
- #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
- #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
- fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
- return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
-
-
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
@@ -56,19 +40,21 @@ _CONSTANTS = {
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
- '"': u'"', '\\': u'\\', '/': u'/',
- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+ '"': u('"'), '\\': u('\u005c'), '/': u('/'),
+ 'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'),
}
DEFAULT_ENCODING = "utf-8"
-def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
+def py_scanstring(s, end, encoding=None, strict=True,
+ _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join,
+ _PY3=PY3, _maxunicode=sys.maxunicode):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
-
+
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
@@ -79,14 +65,14 @@ def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHU
while 1:
chunk = _m(s, end)
if chunk is None:
- raise ValueError(
- errmsg("Unterminated string starting at", s, begin))
+ raise JSONDecodeError(
+ "Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
- if not isinstance(content, unicode):
- content = unicode(content, encoding)
+ if not _PY3 and not isinstance(content, text_type):
+ content = text_type(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
@@ -94,49 +80,57 @@ def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHU
break
elif terminator != '\\':
if strict:
- msg = "Invalid control character %r at" % (terminator,)
- #msg = "Invalid control character {0!r} at".format(terminator)
- raise ValueError(errmsg(msg, s, end))
+ msg = "Invalid control character %r at"
+ raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
- raise ValueError(
- errmsg("Unterminated string starting at", s, begin))
+ raise JSONDecodeError(
+ "Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
- msg = "Invalid \\escape: " + repr(esc)
- raise ValueError(errmsg(msg, s, end))
+ msg = "Invalid \\X escape sequence %r"
+ raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
+ msg = "Invalid \\uXXXX escape sequence"
esc = s[end + 1:end + 5]
- next_end = end + 5
- if len(esc) != 4:
- msg = "Invalid \\uXXXX escape"
- raise ValueError(errmsg(msg, s, end))
- uni = int(esc, 16)
+ escX = esc[1:2]
+ if len(esc) != 4 or escX == 'x' or escX == 'X':
+ raise JSONDecodeError(msg, s, end - 1)
+ try:
+ uni = int(esc, 16)
+ except ValueError:
+ raise JSONDecodeError(msg, s, end - 1)
+ end += 5
# Check for surrogate pair on UCS-4 systems
- if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
- msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
- if not s[end + 5:end + 7] == '\\u':
- raise ValueError(errmsg(msg, s, end))
- esc2 = s[end + 7:end + 11]
- if len(esc2) != 4:
- raise ValueError(errmsg(msg, s, end))
- uni2 = int(esc2, 16)
- uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
- next_end += 6
+ # Note that this will join high/low surrogate pairs
+ # but will also pass unpaired surrogates through
+ if (_maxunicode > 65535 and
+ uni & 0xfc00 == 0xd800 and
+ s[end:end + 2] == '\\u'):
+ esc2 = s[end + 2:end + 6]
+ escX = esc2[1:2]
+ if len(esc2) == 4 and not (escX == 'x' or escX == 'X'):
+ try:
+ uni2 = int(esc2, 16)
+ except ValueError:
+ raise JSONDecodeError(msg, s, end)
+ if uni2 & 0xfc00 == 0xdc00:
+ uni = 0x10000 + (((uni - 0xd800) << 10) |
+ (uni2 - 0xdc00))
+ end += 6
char = unichr(uni)
- end = next_end
# Append the unescaped character
_append(char)
- return u''.join(chunks), end
+ return _join(chunks), end
# Use speedup if available
@@ -145,8 +139,15 @@ scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
-def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
- pairs = {}
+def JSONObject(state, encoding, strict, scan_once, object_hook,
+ object_pairs_hook, memo=None,
+ _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ (s, end) = state
+ # Backwards compatibility
+ if memo is None:
+ memo = {}
+ memo_get = memo.setdefault
+ pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
@@ -157,19 +158,28 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
+ if object_pairs_hook is not None:
+ result = object_pairs_hook(pairs)
+ return result, end + 1
+ pairs = {}
+ if object_hook is not None:
+ pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
- raise ValueError(errmsg("Expecting property name", s, end))
+ raise JSONDecodeError(
+ "Expecting property name enclosed in double quotes",
+ s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
+ key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
- raise ValueError(errmsg("Expecting : delimiter", s, end))
+ raise JSONDecodeError("Expecting ':' delimiter", s, end)
end += 1
@@ -181,11 +191,8 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE
except IndexError:
pass
- try:
- value, end = scan_once(s, end)
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
- pairs[key] = value
+ value, end = scan_once(s, end)
+ pairs.append((key, value))
try:
nextchar = s[end]
@@ -199,7 +206,7 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE
if nextchar == '}':
break
elif nextchar != ',':
- raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+ raise JSONDecodeError("Expecting ',' delimiter or '}'", s, end - 1)
try:
nextchar = s[end]
@@ -214,13 +221,20 @@ def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE
end += 1
if nextchar != '"':
- raise ValueError(errmsg("Expecting property name", s, end - 1))
+ raise JSONDecodeError(
+ "Expecting property name enclosed in double quotes",
+ s, end - 1)
+ if object_pairs_hook is not None:
+ result = object_pairs_hook(pairs)
+ return result, end
+ pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
-def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+def JSONArray(state, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ (s, end) = state
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
@@ -229,12 +243,11 @@ def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
+ elif nextchar == '':
+ raise JSONDecodeError("Expecting value or ']'", s, end)
_append = values.append
while True:
- try:
- value, end = scan_once(s, end)
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
+ value, end = scan_once(s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
@@ -244,7 +257,7 @@ def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
if nextchar == ']':
break
elif nextchar != ',':
- raise ValueError(errmsg("Expecting , delimiter", s, end))
+ raise JSONDecodeError("Expecting ',' delimiter or ']'", s, end - 1)
try:
if s[end] in _ws:
@@ -268,7 +281,7 @@ class JSONDecoder(object):
+---------------+-------------------+
| array | list |
+---------------+-------------------+
- | string | unicode |
+ | string | str, unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
@@ -287,37 +300,56 @@ class JSONDecoder(object):
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, strict=True):
- """``encoding`` determines the encoding used to interpret any ``str``
- objects decoded by this instance (utf-8 by default). It has no
- effect when decoding ``unicode`` objects.
+ parse_int=None, parse_constant=None, strict=True,
+ object_pairs_hook=None):
+ """
+ *encoding* determines the encoding used to interpret any
+ :class:`str` objects decoded by this instance (``'utf-8'`` by
+ default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
- strings of other encodings should be passed in as ``unicode``.
+ strings of other encodings should be passed in as :class:`unicode`.
- ``object_hook``, if specified, will be called with the result
- of every JSON object decoded and its return value will be used in
- place of the given ``dict``. This can be used to provide custom
+ *object_hook*, if specified, will be called with the result of every
+ JSON object decoded and its return value will be used in place of the
+ given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
- ``parse_float``, if specified, will be called with the string
- of every JSON float to be decoded. By default this is equivalent to
- float(num_str). This can be used to use another datatype or parser
- for JSON floats (e.g. decimal.Decimal).
+ *object_pairs_hook* is an optional function that will be called with
+ the result of any object literal decode with an ordered list of pairs.
+ The return value of *object_pairs_hook* will be used instead of the
+ :class:`dict`. This feature can be used to implement custom decoders
+ that rely on the order that the key and value pairs are decoded (for
+ example, :func:`collections.OrderedDict` will remember the order of
+ insertion). If *object_hook* is also defined, the *object_pairs_hook*
+ takes priority.
- ``parse_int``, if specified, will be called with the string
- of every JSON int to be decoded. By default this is equivalent to
- int(num_str). This can be used to use another datatype or parser
- for JSON integers (e.g. float).
+ *parse_float*, if specified, will be called with the string of every
+ JSON float to be decoded. By default, this is equivalent to
+ ``float(num_str)``. This can be used to use another datatype or parser
+ for JSON floats (e.g. :class:`decimal.Decimal`).
- ``parse_constant``, if specified, will be called with one of the
- following strings: -Infinity, Infinity, NaN.
- This can be used to raise an exception if invalid JSON numbers
- are encountered.
+ *parse_int*, if specified, will be called with the string of every
+ JSON int to be decoded. By default, this is equivalent to
+ ``int(num_str)``. This can be used to use another datatype or parser
+ for JSON integers (e.g. :class:`float`).
+
+ *parse_constant*, if specified, will be called with one of the
+ following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
+ can be used to raise an exception if invalid JSON numbers are
+ encountered.
+
+ *strict* controls the parser's behavior when it encounters an
+ invalid control character in a string. The default setting of
+ ``True`` means that unescaped control characters are parse errors, if
+ ``False`` then control characters will be allowed in strings.
"""
+ if encoding is None:
+ encoding = DEFAULT_ENCODING
self.encoding = encoding
self.object_hook = object_hook
+ self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
@@ -325,30 +357,44 @@ class JSONDecoder(object):
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
+ self.memo = {}
self.scan_once = make_scanner(self)
- def decode(self, s, _w=WHITESPACE.match):
+ def decode(self, s, _w=WHITESPACE.match, _PY3=PY3):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
- obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+ if _PY3 and isinstance(s, binary_type):
+ s = s.decode(self.encoding)
+ obj, end = self.raw_decode(s)
end = _w(s, end).end()
if end != len(s):
- raise ValueError(errmsg("Extra data", s, end, len(s)))
+ raise JSONDecodeError("Extra data", s, end, len(s))
return obj
- def raw_decode(self, s, idx=0):
- """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
- with a JSON document) and return a 2-tuple of the Python
+ def raw_decode(self, s, idx=0, _w=WHITESPACE.match, _PY3=PY3):
+ """Decode a JSON document from ``s`` (a ``str`` or ``unicode``
+ beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
+ Optionally, ``idx`` can be used to specify an offset in ``s`` where
+ the JSON document begins.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
- try:
- obj, end = self.scan_once(s, idx)
- except StopIteration:
- raise ValueError("No JSON object could be decoded")
- return obj, end
+ if idx < 0:
+ # Ensure that raw_decode bails on negative indexes, the regex
+ # would otherwise mask this behavior. #98
+ raise JSONDecodeError('Expecting value', s, idx)
+ if _PY3 and not isinstance(s, text_type):
+ raise TypeError("Input string must be text, not bytes")
+ # strip UTF-8 bom
+ if len(s) > idx:
+ ord0 = ord(s[idx])
+ if ord0 == 0xfeff:
+ idx += 1
+ elif ord0 == 0xef and s[idx:idx + 3] == '\xef\xbb\xbf':
+ idx += 3
+ return self.scan_once(s, idx=_w(s, idx).end())
diff --git a/lib/simplejson/encoder.py b/lib/simplejson/encoder.py
index 15c35f7a..d240438e 100644
--- a/lib/simplejson/encoder.py
+++ b/lib/simplejson/encoder.py
@@ -1,17 +1,25 @@
"""Implementation of JSONEncoder
"""
+from __future__ import absolute_import
import re
+from operator import itemgetter
+# Do not import Decimal directly to avoid reload issues
+import decimal
+from .compat import u, unichr, binary_type, string_types, integer_types, PY3
+def _import_speedups():
+ try:
+ from . import _speedups
+ return _speedups.encode_basestring_ascii, _speedups.make_encoder
+ except ImportError:
+ return None, None
+c_encode_basestring_ascii, c_make_encoder = _import_speedups()
-try:
- from lib.simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
-except ImportError:
- c_encode_basestring_ascii = None
-try:
- from lib.simplejson._speedups import make_encoder as c_make_encoder
-except ImportError:
- c_make_encoder = None
+from simplejson.decoder import PosInf
-ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+#ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
+# This is required because u() will mangle the string and ur'' isn't valid
+# python3 syntax
+ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
@@ -26,26 +34,36 @@ ESCAPE_DCT = {
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+for i in [0x2028, 0x2029]:
+ ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
-# Assume this produces an infinity on all machines (probably not guaranteed)
-INFINITY = float('1e66666')
FLOAT_REPR = repr
-def encode_basestring(s):
+def encode_basestring(s, _PY3=PY3, _q=u('"')):
"""Return a JSON representation of a Python string
"""
+ if _PY3:
+ if isinstance(s, binary_type):
+ s = s.decode('utf-8')
+ else:
+ if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
- return '"' + ESCAPE.sub(replace, s) + '"'
+ return _q + ESCAPE.sub(replace, s) + _q
-def py_encode_basestring_ascii(s):
+def py_encode_basestring_ascii(s, _PY3=PY3):
"""Return an ASCII-only JSON representation of a Python string
"""
- if isinstance(s, str) and HAS_UTF8.search(s) is not None:
- s = s.decode('utf-8')
+ if _PY3:
+ if isinstance(s, binary_type):
+ s = s.decode('utf-8')
+ else:
+ if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
@@ -65,7 +83,8 @@ def py_encode_basestring_ascii(s):
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
-encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
+encode_basestring_ascii = (
+ c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON encoder for Python data structures.
@@ -75,7 +94,7 @@ class JSONEncoder(object):
+-------------------+---------------+
| Python | JSON |
+===================+===============+
- | dict | object |
+ | dict, namedtuple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
@@ -98,9 +117,14 @@ class JSONEncoder(object):
"""
item_separator = ', '
key_separator = ': '
+
def __init__(self, skipkeys=False, ensure_ascii=True,
- check_circular=True, allow_nan=True, sort_keys=False,
- indent=None, separators=None, encoding='utf-8', default=None):
+ check_circular=True, allow_nan=True, sort_keys=False,
+ indent=None, separators=None, encoding='utf-8', default=None,
+ use_decimal=True, namedtuple_as_object=True,
+ tuple_as_array=True, bigint_as_string=False,
+ item_sort_key=None, for_json=False, ignore_nan=False,
+ int_as_string_bitcount=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
@@ -125,14 +149,17 @@ class JSONEncoder(object):
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
- If indent is a non-negative integer, then JSON array
- elements and object members will be pretty-printed with that
- indent level. An indent level of 0 will only insert newlines.
- None is the most compact representation.
+ If indent is a string, then JSON array elements and object members
+ will be pretty-printed with a newline followed by that string repeated
+ for each level of nesting. ``None`` (the default) selects the most compact
+ representation without any newlines. For backwards compatibility with
+ versions of simplejson earlier than 2.1.0, an integer is also accepted
+ and is converted to a string with that many spaces.
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
+ If specified, separators should be an (item_separator, key_separator)
+ tuple. The default is (', ', ': ') if *indent* is ``None`` and
+ (',', ': ') otherwise. To get the most compact JSON representation,
+ you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
@@ -142,6 +169,37 @@ class JSONEncoder(object):
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
+ If use_decimal is true (not the default), ``decimal.Decimal`` will
+ be supported directly by the encoder. For the inverse, decode JSON
+ with ``parse_float=decimal.Decimal``.
+
+ If namedtuple_as_object is true (the default), objects with
+ ``_asdict()`` methods will be encoded as JSON objects.
+
+ If tuple_as_array is true (the default), tuple (and subclasses) will
+ be encoded as JSON arrays.
+
+ If bigint_as_string is true (not the default), ints 2**53 and higher
+ or lower than -2**53 will be encoded as strings. This is to avoid the
+ rounding that happens in Javascript otherwise.
+
+ If int_as_string_bitcount is a positive number (n), then int of size
+ greater than or equal to 2**n or lower than or equal to -2**n will be
+ encoded as strings.
+
+ If specified, item_sort_key is a callable used to sort the items in
+ each dictionary. This is useful if you want to sort items other than
+ in alphabetical order by key.
+
+ If for_json is true (not the default), objects with a ``for_json()``
+ method will use the return value of that method for encoding as JSON
+ instead of the object.
+
+ If *ignore_nan* is true (default: ``False``), then out of range
+ :class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized
+ as ``null`` in compliance with the ECMA-262 specification. If true,
+ this will override *allow_nan*.
+
"""
self.skipkeys = skipkeys
@@ -149,9 +207,21 @@ class JSONEncoder(object):
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
+ self.use_decimal = use_decimal
+ self.namedtuple_as_object = namedtuple_as_object
+ self.tuple_as_array = tuple_as_array
+ self.bigint_as_string = bigint_as_string
+ self.item_sort_key = item_sort_key
+ self.for_json = for_json
+ self.ignore_nan = ignore_nan
+ self.int_as_string_bitcount = int_as_string_bitcount
+ if indent is not None and not isinstance(indent, string_types):
+ indent = indent * ' '
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
+ elif indent is not None:
+ self.item_separator = ','
if default is not None:
self.default = default
self.encoding = encoding
@@ -179,17 +249,17 @@ class JSONEncoder(object):
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
+ >>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
- if isinstance(o, basestring):
- if isinstance(o, str):
- _encoding = self.encoding
- if (_encoding is not None
- and not (_encoding == 'utf-8')):
- o = o.decode(_encoding)
+ if isinstance(o, binary_type):
+ _encoding = self.encoding
+ if (_encoding is not None and not (_encoding == 'utf-8')):
+ o = o.decode(_encoding)
+ if isinstance(o, string_types):
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
@@ -200,7 +270,10 @@ class JSONEncoder(object):
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
- return ''.join(chunks)
+ if self.ensure_ascii:
+ return ''.join(chunks)
+ else:
+ return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
@@ -222,13 +295,15 @@ class JSONEncoder(object):
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
- if isinstance(o, str):
+ if isinstance(o, binary_type):
o = o.decode(_encoding)
return _orig_encoder(o)
- def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
- # Check for specials. Note that this type of test is processor- and/or
- # platform-specific, so do tests which don't depend on the internals.
+ def floatstr(o, allow_nan=self.allow_nan, ignore_nan=self.ignore_nan,
+ _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
+ # Check for specials. Note that this type of test is processor
+ # and/or platform-specific, so do tests which don't depend on
+ # the internals.
if o != o:
text = 'NaN'
@@ -237,44 +312,123 @@ class JSONEncoder(object):
elif o == _neginf:
text = '-Infinity'
else:
+ if type(o) != float:
+ # See #118, do not trust custom str/repr
+ o = float(o)
return _repr(o)
- if not allow_nan:
+ if ignore_nan:
+ text = 'null'
+ elif not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
-
- if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
+ key_memo = {}
+ int_as_string_bitcount = (
+ 53 if self.bigint_as_string else self.int_as_string_bitcount)
+ if (_one_shot and c_make_encoder is not None
+ and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, self.allow_nan)
+ self.skipkeys, self.allow_nan, key_memo, self.use_decimal,
+ self.namedtuple_as_object, self.tuple_as_array,
+ int_as_string_bitcount,
+ self.item_sort_key, self.encoding, self.for_json,
+ self.ignore_nan, decimal.Decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, _one_shot)
- return _iterencode(o, 0)
+ self.skipkeys, _one_shot, self.use_decimal,
+ self.namedtuple_as_object, self.tuple_as_array,
+ int_as_string_bitcount,
+ self.item_sort_key, self.encoding, self.for_json,
+ Decimal=decimal.Decimal)
+ try:
+ return _iterencode(o, 0)
+ finally:
+ key_memo.clear()
-def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
+
+class JSONEncoderForHTML(JSONEncoder):
+ """An encoder that produces JSON safe to embed in HTML.
+
+ To embed JSON content in, say, a script tag on a web page, the
+ characters &, < and > should be escaped. They cannot be escaped
+ with the usual entities (e.g. &) because they are not expanded
+ within
-
-
-""")
-
- def test_optional_path(self):
- self.assertEqual(self.fetch_json("/optional_path/foo"),
- {u("path"): u("foo")})
- self.assertEqual(self.fetch_json("/optional_path/"),
- {u("path"): None})
-
- def test_multi_header(self):
- response = self.fetch("/multi_header")
- self.assertEqual(response.headers["x-overwrite"], "2")
- self.assertEqual(response.headers.get_list("x-multi"), ["3", "4"])
-
- def test_redirect(self):
- response = self.fetch("/redirect?permanent=1", follow_redirects=False)
- self.assertEqual(response.code, 301)
- response = self.fetch("/redirect?permanent=0", follow_redirects=False)
- self.assertEqual(response.code, 302)
- response = self.fetch("/redirect?status=307", follow_redirects=False)
- self.assertEqual(response.code, 307)
-
- def test_web_redirect(self):
- response = self.fetch("/web_redirect_permanent", follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
- response = self.fetch("/web_redirect", follow_redirects=False)
- self.assertEqual(response.code, 302)
- self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
-
- def test_web_redirect_double_slash(self):
- response = self.fetch("//web_redirect_double_slash", follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
-
- def test_header_injection(self):
- response = self.fetch("/header_injection")
- self.assertEqual(response.body, b"ok")
-
- def test_get_argument(self):
- response = self.fetch("/get_argument?foo=bar")
- self.assertEqual(response.body, b"bar")
- response = self.fetch("/get_argument?foo=")
- self.assertEqual(response.body, b"")
- response = self.fetch("/get_argument")
- self.assertEqual(response.body, b"default")
-
- # Test merging of query and body arguments.
- # In singular form, body arguments take precedence over query arguments.
- body = urllib_parse.urlencode(dict(foo="hello"))
- response = self.fetch("/get_argument?foo=bar", method="POST", body=body)
- self.assertEqual(response.body, b"hello")
- # In plural methods they are merged.
- response = self.fetch("/get_arguments?foo=bar",
- method="POST", body=body)
- self.assertEqual(json_decode(response.body),
- dict(default=['bar', 'hello'],
- query=['bar'],
- body=['hello']))
-
- def test_get_query_arguments(self):
- # send as a post so we can ensure the separation between query
- # string and body arguments.
- body = urllib_parse.urlencode(dict(foo="hello"))
- response = self.fetch("/get_argument?source=query&foo=bar",
- method="POST", body=body)
- self.assertEqual(response.body, b"bar")
- response = self.fetch("/get_argument?source=query&foo=",
- method="POST", body=body)
- self.assertEqual(response.body, b"")
- response = self.fetch("/get_argument?source=query",
- method="POST", body=body)
- self.assertEqual(response.body, b"default")
-
- def test_get_body_arguments(self):
- body = urllib_parse.urlencode(dict(foo="bar"))
- response = self.fetch("/get_argument?source=body&foo=hello",
- method="POST", body=body)
- self.assertEqual(response.body, b"bar")
-
- body = urllib_parse.urlencode(dict(foo=""))
- response = self.fetch("/get_argument?source=body&foo=hello",
- method="POST", body=body)
- self.assertEqual(response.body, b"")
-
- body = urllib_parse.urlencode(dict())
- response = self.fetch("/get_argument?source=body&foo=hello",
- method="POST", body=body)
- self.assertEqual(response.body, b"default")
-
- def test_no_gzip(self):
- response = self.fetch('/get_argument')
- self.assertNotIn('Accept-Encoding', response.headers.get('Vary', ''))
- self.assertNotIn('gzip', response.headers.get('Content-Encoding', ''))
-
-
-class NonWSGIWebTests(WebTestCase):
- def get_handlers(self):
- return [("/flow_control", FlowControlHandler),
- ("/empty_flush", EmptyFlushCallbackHandler),
- ]
-
- def test_flow_control(self):
- self.assertEqual(self.fetch("/flow_control").body, b"123")
-
- def test_empty_flush(self):
- response = self.fetch("/empty_flush")
- self.assertEqual(response.body, b"ok")
-
-
-@wsgi_safe
-class ErrorResponseTest(WebTestCase):
- def get_handlers(self):
- class DefaultHandler(RequestHandler):
- def get(self):
- if self.get_argument("status", None):
- raise HTTPError(int(self.get_argument("status")))
- 1 / 0
-
- class WriteErrorHandler(RequestHandler):
- def get(self):
- if self.get_argument("status", None):
- self.send_error(int(self.get_argument("status")))
- else:
- 1 / 0
-
- def write_error(self, status_code, **kwargs):
- self.set_header("Content-Type", "text/plain")
- if "exc_info" in kwargs:
- self.write("Exception: %s" % kwargs["exc_info"][0].__name__)
- else:
- self.write("Status: %d" % status_code)
-
- class FailedWriteErrorHandler(RequestHandler):
- def get(self):
- 1 / 0
-
- def write_error(self, status_code, **kwargs):
- raise Exception("exception in write_error")
-
- return [url("/default", DefaultHandler),
- url("/write_error", WriteErrorHandler),
- url("/failed_write_error", FailedWriteErrorHandler),
- ]
-
- def test_default(self):
- with ExpectLog(app_log, "Uncaught exception"):
- response = self.fetch("/default")
- self.assertEqual(response.code, 500)
- self.assertTrue(b"500: Internal Server Error" in response.body)
-
- response = self.fetch("/default?status=503")
- self.assertEqual(response.code, 503)
- self.assertTrue(b"503: Service Unavailable" in response.body)
-
- def test_write_error(self):
- with ExpectLog(app_log, "Uncaught exception"):
- response = self.fetch("/write_error")
- self.assertEqual(response.code, 500)
- self.assertEqual(b"Exception: ZeroDivisionError", response.body)
-
- response = self.fetch("/write_error?status=503")
- self.assertEqual(response.code, 503)
- self.assertEqual(b"Status: 503", response.body)
-
- def test_failed_write_error(self):
- with ExpectLog(app_log, "Uncaught exception"):
- response = self.fetch("/failed_write_error")
- self.assertEqual(response.code, 500)
- self.assertEqual(b"", response.body)
-
-
-@wsgi_safe
-class StaticFileTest(WebTestCase):
- # The expected MD5 hash of robots.txt, used in tests that call
- # StaticFileHandler.get_version
- robots_txt_hash = b"f71d20196d4caf35b6a670db8c70b03d"
- static_dir = os.path.join(os.path.dirname(__file__), 'static')
-
- def get_handlers(self):
- class StaticUrlHandler(RequestHandler):
- def get(self, path):
- with_v = int(self.get_argument('include_version', 1))
- self.write(self.static_url(path, include_version=with_v))
-
- class AbsoluteStaticUrlHandler(StaticUrlHandler):
- include_host = True
-
- class OverrideStaticUrlHandler(RequestHandler):
- def get(self, path):
- do_include = bool(self.get_argument("include_host"))
- self.include_host = not do_include
-
- regular_url = self.static_url(path)
- override_url = self.static_url(path, include_host=do_include)
- if override_url == regular_url:
- return self.write(str(False))
-
- protocol = self.request.protocol + "://"
- protocol_length = len(protocol)
- check_regular = regular_url.find(protocol, 0, protocol_length)
- check_override = override_url.find(protocol, 0, protocol_length)
-
- if do_include:
- result = (check_override == 0 and check_regular == -1)
- else:
- result = (check_override == -1 and check_regular == 0)
- self.write(str(result))
-
- return [('/static_url/(.*)', StaticUrlHandler),
- ('/abs_static_url/(.*)', AbsoluteStaticUrlHandler),
- ('/override_static_url/(.*)', OverrideStaticUrlHandler)]
-
- def get_app_kwargs(self):
- return dict(static_path=relpath('static'))
-
- def test_static_files(self):
- response = self.fetch('/robots.txt')
- self.assertTrue(b"Disallow: /" in response.body)
-
- response = self.fetch('/static/robots.txt')
- self.assertTrue(b"Disallow: /" in response.body)
-
- def test_static_url(self):
- response = self.fetch("/static_url/robots.txt")
- self.assertEqual(response.body,
- b"/static/robots.txt?v=" + self.robots_txt_hash)
-
- def test_absolute_static_url(self):
- response = self.fetch("/abs_static_url/robots.txt")
- self.assertEqual(response.body, (
- utf8(self.get_url("/")) +
- b"static/robots.txt?v=" +
- self.robots_txt_hash
- ))
-
- def test_relative_version_exclusion(self):
- response = self.fetch("/static_url/robots.txt?include_version=0")
- self.assertEqual(response.body, b"/static/robots.txt")
-
- def test_absolute_version_exclusion(self):
- response = self.fetch("/abs_static_url/robots.txt?include_version=0")
- self.assertEqual(response.body,
- utf8(self.get_url("/") + "static/robots.txt"))
-
- def test_include_host_override(self):
- self._trigger_include_host_check(False)
- self._trigger_include_host_check(True)
-
- def _trigger_include_host_check(self, include_host):
- path = "/override_static_url/robots.txt?include_host=%s"
- response = self.fetch(path % int(include_host))
- self.assertEqual(response.body, utf8(str(True)))
-
- def get_and_head(self, *args, **kwargs):
- """Performs a GET and HEAD request and returns the GET response.
-
- Fails if any ``Content-*`` headers returned by the two requests
- differ.
- """
- head_response = self.fetch(*args, method="HEAD", **kwargs)
- get_response = self.fetch(*args, method="GET", **kwargs)
- content_headers = set()
- for h in itertools.chain(head_response.headers, get_response.headers):
- if h.startswith('Content-'):
- content_headers.add(h)
- for h in content_headers:
- self.assertEqual(head_response.headers.get(h),
- get_response.headers.get(h),
- "%s differs between GET (%s) and HEAD (%s)" %
- (h, head_response.headers.get(h),
- get_response.headers.get(h)))
- return get_response
-
- def test_static_304_if_modified_since(self):
- response1 = self.get_and_head("/static/robots.txt")
- response2 = self.get_and_head("/static/robots.txt", headers={
- 'If-Modified-Since': response1.headers['Last-Modified']})
- self.assertEqual(response2.code, 304)
- self.assertTrue('Content-Length' not in response2.headers)
- self.assertTrue('Last-Modified' not in response2.headers)
-
- def test_static_304_if_none_match(self):
- response1 = self.get_and_head("/static/robots.txt")
- response2 = self.get_and_head("/static/robots.txt", headers={
- 'If-None-Match': response1.headers['Etag']})
- self.assertEqual(response2.code, 304)
-
- def test_static_if_modified_since_pre_epoch(self):
- # On windows, the functions that work with time_t do not accept
- # negative values, and at least one client (processing.js) seems
- # to use if-modified-since 1/1/1960 as a cache-busting technique.
- response = self.get_and_head("/static/robots.txt", headers={
- 'If-Modified-Since': 'Fri, 01 Jan 1960 00:00:00 GMT'})
- self.assertEqual(response.code, 200)
-
- def test_static_if_modified_since_time_zone(self):
- # Instead of the value from Last-Modified, make requests with times
- # chosen just before and after the known modification time
- # of the file to ensure that the right time zone is being used
- # when parsing If-Modified-Since.
- stat = os.stat(relpath('static/robots.txt'))
-
- response = self.get_and_head('/static/robots.txt', headers={
- 'If-Modified-Since': format_timestamp(stat.st_mtime - 1)})
- self.assertEqual(response.code, 200)
- response = self.get_and_head('/static/robots.txt', headers={
- 'If-Modified-Since': format_timestamp(stat.st_mtime + 1)})
- self.assertEqual(response.code, 304)
-
- def test_static_etag(self):
- response = self.get_and_head('/static/robots.txt')
- self.assertEqual(utf8(response.headers.get("Etag")),
- b'"' + self.robots_txt_hash + b'"')
-
- def test_static_with_range(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=0-9'})
- self.assertEqual(response.code, 206)
- self.assertEqual(response.body, b"User-agent")
- self.assertEqual(utf8(response.headers.get("Etag")),
- b'"' + self.robots_txt_hash + b'"')
- self.assertEqual(response.headers.get("Content-Length"), "10")
- self.assertEqual(response.headers.get("Content-Range"),
- "bytes 0-9/26")
-
- def test_static_with_range_full_file(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=0-'})
- # Note: Chrome refuses to play audio if it gets an HTTP 206 in response
- # to ``Range: bytes=0-`` :(
- self.assertEqual(response.code, 200)
- robots_file_path = os.path.join(self.static_dir, "robots.txt")
- with open(robots_file_path) as f:
- self.assertEqual(response.body, utf8(f.read()))
- self.assertEqual(response.headers.get("Content-Length"), "26")
- self.assertEqual(response.headers.get("Content-Range"), None)
-
- def test_static_with_range_full_past_end(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=0-10000000'})
- self.assertEqual(response.code, 200)
- robots_file_path = os.path.join(self.static_dir, "robots.txt")
- with open(robots_file_path) as f:
- self.assertEqual(response.body, utf8(f.read()))
- self.assertEqual(response.headers.get("Content-Length"), "26")
- self.assertEqual(response.headers.get("Content-Range"), None)
-
- def test_static_with_range_partial_past_end(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=1-10000000'})
- self.assertEqual(response.code, 206)
- robots_file_path = os.path.join(self.static_dir, "robots.txt")
- with open(robots_file_path) as f:
- self.assertEqual(response.body, utf8(f.read()[1:]))
- self.assertEqual(response.headers.get("Content-Length"), "25")
- self.assertEqual(response.headers.get("Content-Range"), "bytes 1-25/26")
-
- def test_static_with_range_end_edge(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=22-'})
- self.assertEqual(response.body, b": /\n")
- self.assertEqual(response.headers.get("Content-Length"), "4")
- self.assertEqual(response.headers.get("Content-Range"),
- "bytes 22-25/26")
-
- def test_static_with_range_neg_end(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=-4'})
- self.assertEqual(response.body, b": /\n")
- self.assertEqual(response.headers.get("Content-Length"), "4")
- self.assertEqual(response.headers.get("Content-Range"),
- "bytes 22-25/26")
-
- def test_static_invalid_range(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'asdf'})
- self.assertEqual(response.code, 200)
-
- def test_static_unsatisfiable_range_zero_suffix(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=-0'})
- self.assertEqual(response.headers.get("Content-Range"),
- "bytes */26")
- self.assertEqual(response.code, 416)
-
- def test_static_unsatisfiable_range_invalid_start(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=26'})
- self.assertEqual(response.code, 416)
- self.assertEqual(response.headers.get("Content-Range"),
- "bytes */26")
-
- def test_static_head(self):
- response = self.fetch('/static/robots.txt', method='HEAD')
- self.assertEqual(response.code, 200)
- # No body was returned, but we did get the right content length.
- self.assertEqual(response.body, b'')
- self.assertEqual(response.headers['Content-Length'], '26')
- self.assertEqual(utf8(response.headers['Etag']),
- b'"' + self.robots_txt_hash + b'"')
-
- def test_static_head_range(self):
- response = self.fetch('/static/robots.txt', method='HEAD',
- headers={'Range': 'bytes=1-4'})
- self.assertEqual(response.code, 206)
- self.assertEqual(response.body, b'')
- self.assertEqual(response.headers['Content-Length'], '4')
- self.assertEqual(utf8(response.headers['Etag']),
- b'"' + self.robots_txt_hash + b'"')
-
- def test_static_range_if_none_match(self):
- response = self.get_and_head('/static/robots.txt', headers={
- 'Range': 'bytes=1-4',
- 'If-None-Match': b'"' + self.robots_txt_hash + b'"'})
- self.assertEqual(response.code, 304)
- self.assertEqual(response.body, b'')
- self.assertTrue('Content-Length' not in response.headers)
- self.assertEqual(utf8(response.headers['Etag']),
- b'"' + self.robots_txt_hash + b'"')
-
- def test_static_404(self):
- response = self.get_and_head('/static/blarg')
- self.assertEqual(response.code, 404)
-
-
-@wsgi_safe
-class StaticDefaultFilenameTest(WebTestCase):
- def get_app_kwargs(self):
- return dict(static_path=relpath('static'),
- static_handler_args=dict(default_filename='index.html'))
-
- def get_handlers(self):
- return []
-
- def test_static_default_filename(self):
- response = self.fetch('/static/dir/', follow_redirects=False)
- self.assertEqual(response.code, 200)
- self.assertEqual(b'this is the index\n', response.body)
-
- def test_static_default_redirect(self):
- response = self.fetch('/static/dir', follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertTrue(response.headers['Location'].endswith('/static/dir/'))
-
-
-@wsgi_safe
-class StaticFileWithPathTest(WebTestCase):
- def get_app_kwargs(self):
- return dict(static_path=relpath('static'),
- static_handler_args=dict(default_filename='index.html'))
-
- def get_handlers(self):
- return [("/foo/(.*)", StaticFileHandler, {
- "path": relpath("templates/"),
- })]
-
- def test_serve(self):
- response = self.fetch("/foo/utf8.html")
- self.assertEqual(response.body, b"H\xc3\xa9llo\n")
-
-
-@wsgi_safe
-class CustomStaticFileTest(WebTestCase):
- def get_handlers(self):
- class MyStaticFileHandler(StaticFileHandler):
- @classmethod
- def make_static_url(cls, settings, path):
- version_hash = cls.get_version(settings, path)
- extension_index = path.rindex('.')
- before_version = path[:extension_index]
- after_version = path[(extension_index + 1):]
- return '/static/%s.%s.%s' % (before_version, version_hash,
- after_version)
-
- def parse_url_path(self, url_path):
- extension_index = url_path.rindex('.')
- version_index = url_path.rindex('.', 0, extension_index)
- return '%s%s' % (url_path[:version_index],
- url_path[extension_index:])
-
- @classmethod
- def get_absolute_path(cls, settings, path):
- return 'CustomStaticFileTest:' + path
-
- def validate_absolute_path(self, root, absolute_path):
- return absolute_path
-
- @classmethod
- def get_content(self, path, start=None, end=None):
- assert start is None and end is None
- if path == 'CustomStaticFileTest:foo.txt':
- return b'bar'
- raise Exception("unexpected path %r" % path)
-
- def get_content_size(self):
- if self.absolute_path == 'CustomStaticFileTest:foo.txt':
- return 3
- raise Exception("unexpected path %r" % self.absolute_path)
-
- def get_modified_time(self):
- return None
-
- @classmethod
- def get_version(cls, settings, path):
- return "42"
-
- class StaticUrlHandler(RequestHandler):
- def get(self, path):
- self.write(self.static_url(path))
-
- self.static_handler_class = MyStaticFileHandler
-
- return [("/static_url/(.*)", StaticUrlHandler)]
-
- def get_app_kwargs(self):
- return dict(static_path="dummy",
- static_handler_class=self.static_handler_class)
-
- def test_serve(self):
- response = self.fetch("/static/foo.42.txt")
- self.assertEqual(response.body, b"bar")
-
- def test_static_url(self):
- with ExpectLog(gen_log, "Could not open static file", required=False):
- response = self.fetch("/static_url/foo.txt")
- self.assertEqual(response.body, b"/static/foo.42.txt")
-
-
-@wsgi_safe
-class HostMatchingTest(WebTestCase):
- class Handler(RequestHandler):
- def initialize(self, reply):
- self.reply = reply
-
- def get(self):
- self.write(self.reply)
-
- def get_handlers(self):
- return [("/foo", HostMatchingTest.Handler, {"reply": "wildcard"})]
-
- def test_host_matching(self):
- self.app.add_handlers("www.example.com",
- [("/foo", HostMatchingTest.Handler, {"reply": "[0]"})])
- self.app.add_handlers(r"www\.example\.com",
- [("/bar", HostMatchingTest.Handler, {"reply": "[1]"})])
- self.app.add_handlers("www.example.com",
- [("/baz", HostMatchingTest.Handler, {"reply": "[2]"})])
-
- response = self.fetch("/foo")
- self.assertEqual(response.body, b"wildcard")
- response = self.fetch("/bar")
- self.assertEqual(response.code, 404)
- response = self.fetch("/baz")
- self.assertEqual(response.code, 404)
-
- response = self.fetch("/foo", headers={'Host': 'www.example.com'})
- self.assertEqual(response.body, b"[0]")
- response = self.fetch("/bar", headers={'Host': 'www.example.com'})
- self.assertEqual(response.body, b"[1]")
- response = self.fetch("/baz", headers={'Host': 'www.example.com'})
- self.assertEqual(response.body, b"[2]")
-
-
-@wsgi_safe
-class NamedURLSpecGroupsTest(WebTestCase):
- def get_handlers(self):
- class EchoHandler(RequestHandler):
- def get(self, path):
- self.write(path)
-
- return [("/str/(?P.*)", EchoHandler),
- (u("/unicode/(?P.*)"), EchoHandler)]
-
- def test_named_urlspec_groups(self):
- response = self.fetch("/str/foo")
- self.assertEqual(response.body, b"foo")
-
- response = self.fetch("/unicode/bar")
- self.assertEqual(response.body, b"bar")
-
-
-@wsgi_safe
-class ClearHeaderTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- self.set_header("h1", "foo")
- self.set_header("h2", "bar")
- self.clear_header("h1")
- self.clear_header("nonexistent")
-
- def test_clear_header(self):
- response = self.fetch("/")
- self.assertTrue("h1" not in response.headers)
- self.assertEqual(response.headers["h2"], "bar")
-
-
-@wsgi_safe
-class Header304Test(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- self.set_header("Content-Language", "en_US")
- self.write("hello")
-
- def test_304_headers(self):
- response1 = self.fetch('/')
- self.assertEqual(response1.headers["Content-Length"], "5")
- self.assertEqual(response1.headers["Content-Language"], "en_US")
-
- response2 = self.fetch('/', headers={
- 'If-None-Match': response1.headers["Etag"]})
- self.assertEqual(response2.code, 304)
- self.assertTrue("Content-Length" not in response2.headers)
- self.assertTrue("Content-Language" not in response2.headers)
- # Not an entity header, but should not be added to 304s by chunking
- self.assertTrue("Transfer-Encoding" not in response2.headers)
-
-
-@wsgi_safe
-class StatusReasonTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- reason = self.request.arguments.get('reason', [])
- self.set_status(int(self.get_argument('code')),
- reason=reason[0] if reason else None)
-
- def get_http_client(self):
- # simple_httpclient only: curl doesn't expose the reason string
- return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
- def test_status(self):
- response = self.fetch("/?code=304")
- self.assertEqual(response.code, 304)
- self.assertEqual(response.reason, "Not Modified")
- response = self.fetch("/?code=304&reason=Foo")
- self.assertEqual(response.code, 304)
- self.assertEqual(response.reason, "Foo")
- response = self.fetch("/?code=682&reason=Bar")
- self.assertEqual(response.code, 682)
- self.assertEqual(response.reason, "Bar")
- with ExpectLog(app_log, 'Uncaught exception'):
- response = self.fetch("/?code=682")
- self.assertEqual(response.code, 500)
-
-
-@wsgi_safe
-class DateHeaderTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- self.write("hello")
-
- def test_date_header(self):
- response = self.fetch('/')
- header_date = datetime.datetime(
- *email.utils.parsedate(response.headers['Date'])[:6])
- self.assertTrue(header_date - datetime.datetime.utcnow() <
- datetime.timedelta(seconds=2))
-
-
-@wsgi_safe
-class RaiseWithReasonTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- raise HTTPError(682, reason="Foo")
-
- def get_http_client(self):
- # simple_httpclient only: curl doesn't expose the reason string
- return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
- def test_raise_with_reason(self):
- response = self.fetch("/")
- self.assertEqual(response.code, 682)
- self.assertEqual(response.reason, "Foo")
- self.assertIn(b'682: Foo', response.body)
-
- def test_httperror_str(self):
- self.assertEqual(str(HTTPError(682, reason="Foo")), "HTTP 682: Foo")
-
-
-@wsgi_safe
-class ErrorHandlerXSRFTest(WebTestCase):
- def get_handlers(self):
- # note that if the handlers list is empty we get the default_host
- # redirect fallback instead of a 404, so test with both an
- # explicitly defined error handler and an implicit 404.
- return [('/error', ErrorHandler, dict(status_code=417))]
-
- def get_app_kwargs(self):
- return dict(xsrf_cookies=True)
-
- def test_error_xsrf(self):
- response = self.fetch('/error', method='POST', body='')
- self.assertEqual(response.code, 417)
-
- def test_404_xsrf(self):
- response = self.fetch('/404', method='POST', body='')
- self.assertEqual(response.code, 404)
-
-
-@wsgi_safe
-class GzipTestCase(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- if self.get_argument('vary', None):
- self.set_header('Vary', self.get_argument('vary'))
- self.write('hello world')
-
- def get_app_kwargs(self):
- return dict(
- gzip=True,
- static_path=os.path.join(os.path.dirname(__file__), 'static'))
-
- def test_gzip(self):
- response = self.fetch('/')
- # simple_httpclient renames the content-encoding header;
- # curl_httpclient doesn't.
- self.assertEqual(
- response.headers.get(
- 'Content-Encoding',
- response.headers.get('X-Consumed-Content-Encoding')),
- 'gzip')
- self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
-
- def test_gzip_static(self):
- # The streaming responses in StaticFileHandler have subtle
- # interactions with the gzip output so test this case separately.
- response = self.fetch('/robots.txt')
- self.assertEqual(
- response.headers.get(
- 'Content-Encoding',
- response.headers.get('X-Consumed-Content-Encoding')),
- 'gzip')
- self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
-
- def test_gzip_not_requested(self):
- response = self.fetch('/', use_gzip=False)
- self.assertNotIn('Content-Encoding', response.headers)
- self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
-
- def test_vary_already_present(self):
- response = self.fetch('/?vary=Accept-Language')
- self.assertEqual(response.headers['Vary'],
- 'Accept-Language, Accept-Encoding')
-
-
-@wsgi_safe
-class PathArgsInPrepareTest(WebTestCase):
- class Handler(RequestHandler):
- def prepare(self):
- self.write(dict(args=self.path_args, kwargs=self.path_kwargs))
-
- def get(self, path):
- assert path == 'foo'
- self.finish()
-
- def get_handlers(self):
- return [('/pos/(.*)', self.Handler),
- ('/kw/(?P.*)', self.Handler)]
-
- def test_pos(self):
- response = self.fetch('/pos/foo')
- response.rethrow()
- data = json_decode(response.body)
- self.assertEqual(data, {'args': ['foo'], 'kwargs': {}})
-
- def test_kw(self):
- response = self.fetch('/kw/foo')
- response.rethrow()
- data = json_decode(response.body)
- self.assertEqual(data, {'args': [], 'kwargs': {'path': 'foo'}})
-
-
-@wsgi_safe
-class ClearAllCookiesTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- self.clear_all_cookies()
- self.write('ok')
-
- def test_clear_all_cookies(self):
- response = self.fetch('/', headers={'Cookie': 'foo=bar; baz=xyzzy'})
- set_cookies = sorted(response.headers.get_list('Set-Cookie'))
- self.assertTrue(set_cookies[0].startswith('baz=;'))
- self.assertTrue(set_cookies[1].startswith('foo=;'))
-
-
-class PermissionError(Exception):
- pass
-
-
-@wsgi_safe
-class ExceptionHandlerTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- exc = self.get_argument('exc')
- if exc == 'http':
- raise HTTPError(410, "no longer here")
- elif exc == 'zero':
- 1 / 0
- elif exc == 'permission':
- raise PermissionError('not allowed')
-
- def write_error(self, status_code, **kwargs):
- if 'exc_info' in kwargs:
- typ, value, tb = kwargs['exc_info']
- if isinstance(value, PermissionError):
- self.set_status(403)
- self.write('PermissionError')
- return
- RequestHandler.write_error(self, status_code, **kwargs)
-
- def log_exception(self, typ, value, tb):
- if isinstance(value, PermissionError):
- app_log.warning('custom logging for PermissionError: %s',
- value.args[0])
- else:
- RequestHandler.log_exception(self, typ, value, tb)
-
- def test_http_error(self):
- # HTTPErrors are logged as warnings with no stack trace.
- # TODO: extend ExpectLog to test this more precisely
- with ExpectLog(gen_log, '.*no longer here'):
- response = self.fetch('/?exc=http')
- self.assertEqual(response.code, 410)
-
- def test_unknown_error(self):
- # Unknown errors are logged as errors with a stack trace.
- with ExpectLog(app_log, 'Uncaught exception'):
- response = self.fetch('/?exc=zero')
- self.assertEqual(response.code, 500)
-
- def test_known_error(self):
- # log_exception can override logging behavior, and write_error
- # can override the response.
- with ExpectLog(app_log,
- 'custom logging for PermissionError: not allowed'):
- response = self.fetch('/?exc=permission')
- self.assertEqual(response.code, 403)
-
-
-@wsgi_safe
-class BuggyLoggingTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- 1/0
-
- def log_exception(self, typ, value, tb):
- 1/0
-
- def test_buggy_log_exception(self):
- # Something gets logged even though the application's
- # logger is broken.
- with ExpectLog(app_log, '.*'):
- self.fetch('/')
-
-
-@wsgi_safe
-class UIMethodUIModuleTest(SimpleHandlerTestCase):
- """Test that UI methods and modules are created correctly and
- associated with the handler.
- """
- class Handler(RequestHandler):
- def get(self):
- self.render('foo.html')
-
- def value(self):
- return self.get_argument("value")
-
- def get_app_kwargs(self):
- def my_ui_method(handler, x):
- return "In my_ui_method(%s) with handler value %s." % (
- x, handler.value())
-
- class MyModule(UIModule):
- def render(self, x):
- return "In MyModule(%s) with handler value %s." % (
- x, self.handler.value())
-
- loader = DictLoader({
- 'foo.html': '{{ my_ui_method(42) }} {% module MyModule(123) %}',
- })
- return dict(template_loader=loader,
- ui_methods={'my_ui_method': my_ui_method},
- ui_modules={'MyModule': MyModule})
-
- def tearDown(self):
- super(UIMethodUIModuleTest, self).tearDown()
- # TODO: fix template loader caching so this isn't necessary.
- RequestHandler._template_loaders.clear()
-
- def test_ui_method(self):
- response = self.fetch('/?value=asdf')
- self.assertEqual(response.body,
- b'In my_ui_method(42) with handler value asdf. '
- b'In MyModule(123) with handler value asdf.')
-
-
-@wsgi_safe
-class GetArgumentErrorTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- try:
- self.get_argument('foo')
- self.write({})
- except MissingArgumentError as e:
- self.write({'arg_name': e.arg_name,
- 'log_message': e.log_message})
-
- def test_catch_error(self):
- response = self.fetch('/')
- self.assertEqual(json_decode(response.body),
- {'arg_name': 'foo',
- 'log_message': 'Missing argument foo'})
-
-
-class MultipleExceptionTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- exc_count = 0
-
- @asynchronous
- def get(self):
- from tornado.ioloop import IOLoop
- IOLoop.current().add_callback(lambda: 1 / 0)
- IOLoop.current().add_callback(lambda: 1 / 0)
-
- def log_exception(self, typ, value, tb):
- MultipleExceptionTest.Handler.exc_count += 1
-
- def test_multi_exception(self):
- # This test verifies that multiple exceptions raised into the same
- # ExceptionStackContext do not generate extraneous log entries
- # due to "Cannot send error response after headers written".
- # log_exception is called, but it does not proceed to send_error.
- response = self.fetch('/')
- self.assertEqual(response.code, 500)
- response = self.fetch('/')
- self.assertEqual(response.code, 500)
- # Each of our two requests generated two exceptions, we should have
- # seen at least three of them by now (the fourth may still be
- # in the queue).
- self.assertGreater(MultipleExceptionTest.Handler.exc_count, 2)
-
-
-@wsgi_safe
-class SetLazyPropertiesTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def prepare(self):
- self.current_user = 'Ben'
- self.locale = locale.get('en_US')
-
- def get_user_locale(self):
- raise NotImplementedError()
-
- def get_current_user(self):
- raise NotImplementedError()
-
- def get(self):
- self.write('Hello %s (%s)' % (self.current_user, self.locale.code))
-
- def test_set_properties(self):
- # Ensure that current_user can be assigned to normally for apps
- # that want to forgo the lazy get_current_user property
- response = self.fetch('/')
- self.assertEqual(response.body, b'Hello Ben (en_US)')
-
-
-@wsgi_safe
-class GetCurrentUserTest(WebTestCase):
- def get_app_kwargs(self):
- class WithoutUserModule(UIModule):
- def render(self):
- return ''
-
- class WithUserModule(UIModule):
- def render(self):
- return str(self.current_user)
-
- loader = DictLoader({
- 'without_user.html': '',
- 'with_user.html': '{{ current_user }}',
- 'without_user_module.html': '{% module WithoutUserModule() %}',
- 'with_user_module.html': '{% module WithUserModule() %}',
- })
- return dict(template_loader=loader,
- ui_modules={'WithUserModule': WithUserModule,
- 'WithoutUserModule': WithoutUserModule})
-
- def tearDown(self):
- super(GetCurrentUserTest, self).tearDown()
- RequestHandler._template_loaders.clear()
-
- def get_handlers(self):
- class CurrentUserHandler(RequestHandler):
- def prepare(self):
- self.has_loaded_current_user = False
-
- def get_current_user(self):
- self.has_loaded_current_user = True
- return ''
-
- class WithoutUserHandler(CurrentUserHandler):
- def get(self):
- self.render_string('without_user.html')
- self.finish(str(self.has_loaded_current_user))
-
- class WithUserHandler(CurrentUserHandler):
- def get(self):
- self.render_string('with_user.html')
- self.finish(str(self.has_loaded_current_user))
-
- class CurrentUserModuleHandler(CurrentUserHandler):
- def get_template_namespace(self):
- # If RequestHandler.get_template_namespace is called, then
- # get_current_user is evaluated. Until #820 is fixed, this
- # is a small hack to circumvent the issue.
- return self.ui
-
- class WithoutUserModuleHandler(CurrentUserModuleHandler):
- def get(self):
- self.render_string('without_user_module.html')
- self.finish(str(self.has_loaded_current_user))
-
- class WithUserModuleHandler(CurrentUserModuleHandler):
- def get(self):
- self.render_string('with_user_module.html')
- self.finish(str(self.has_loaded_current_user))
-
- return [('/without_user', WithoutUserHandler),
- ('/with_user', WithUserHandler),
- ('/without_user_module', WithoutUserModuleHandler),
- ('/with_user_module', WithUserModuleHandler)]
-
- @unittest.skip('needs fix')
- def test_get_current_user_is_lazy(self):
- # TODO: Make this test pass. See #820.
- response = self.fetch('/without_user')
- self.assertEqual(response.body, b'False')
-
- def test_get_current_user_works(self):
- response = self.fetch('/with_user')
- self.assertEqual(response.body, b'True')
-
- def test_get_current_user_from_ui_module_is_lazy(self):
- response = self.fetch('/without_user_module')
- self.assertEqual(response.body, b'False')
-
- def test_get_current_user_from_ui_module_works(self):
- response = self.fetch('/with_user_module')
- self.assertEqual(response.body, b'True')
-
-
-@wsgi_safe
-class UnimplementedHTTPMethodsTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- pass
-
- def test_unimplemented_standard_methods(self):
- for method in ['HEAD', 'GET', 'DELETE', 'OPTIONS']:
- response = self.fetch('/', method=method)
- self.assertEqual(response.code, 405)
- for method in ['POST', 'PUT']:
- response = self.fetch('/', method=method, body=b'')
- self.assertEqual(response.code, 405)
-
-
-class UnimplementedNonStandardMethodsTest(SimpleHandlerTestCase):
- # wsgiref.validate complains about unknown methods in a way that makes
- # this test not wsgi_safe.
- class Handler(RequestHandler):
- def other(self):
- # Even though this method exists, it won't get called automatically
- # because it is not in SUPPORTED_METHODS.
- self.write('other')
-
- def test_unimplemented_patch(self):
- # PATCH is recently standardized; Tornado supports it by default
- # but wsgiref.validate doesn't like it.
- response = self.fetch('/', method='PATCH', body=b'')
- self.assertEqual(response.code, 405)
-
- def test_unimplemented_other(self):
- response = self.fetch('/', method='OTHER',
- allow_nonstandard_methods=True)
- self.assertEqual(response.code, 405)
-
-
-@wsgi_safe
-class AllHTTPMethodsTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def method(self):
- self.write(self.request.method)
-
- get = delete = options = post = put = method
-
- def test_standard_methods(self):
- response = self.fetch('/', method='HEAD')
- self.assertEqual(response.body, b'')
- for method in ['GET', 'DELETE', 'OPTIONS']:
- response = self.fetch('/', method=method)
- self.assertEqual(response.body, utf8(method))
- for method in ['POST', 'PUT']:
- response = self.fetch('/', method=method, body=b'')
- self.assertEqual(response.body, utf8(method))
-
-
-class PatchMethodTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
-
- def patch(self):
- self.write('patch')
-
- def other(self):
- self.write('other')
-
- def test_patch(self):
- response = self.fetch('/', method='PATCH', body=b'')
- self.assertEqual(response.body, b'patch')
-
- def test_other(self):
- response = self.fetch('/', method='OTHER',
- allow_nonstandard_methods=True)
- self.assertEqual(response.body, b'other')
-
-
-@wsgi_safe
-class FinishInPrepareTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def prepare(self):
- self.finish('done')
-
- def get(self):
- # It's difficult to assert for certain that a method did not
- # or will not be called in an asynchronous context, but this
- # will be logged noisily if it is reached.
- raise Exception('should not reach this method')
-
- def test_finish_in_prepare(self):
- response = self.fetch('/')
- self.assertEqual(response.body, b'done')
-
-
-@wsgi_safe
-class Default404Test(WebTestCase):
- def get_handlers(self):
- # If there are no handlers at all a default redirect handler gets added.
- return [('/foo', RequestHandler)]
-
- def test_404(self):
- response = self.fetch('/')
- self.assertEqual(response.code, 404)
- self.assertEqual(response.body,
- b'404: Not Found '
- b'404: Not Found')
-
-
-@wsgi_safe
-class Custom404Test(WebTestCase):
- def get_handlers(self):
- return [('/foo', RequestHandler)]
-
- def get_app_kwargs(self):
- class Custom404Handler(RequestHandler):
- def get(self):
- self.set_status(404)
- self.write('custom 404 response')
-
- return dict(default_handler_class=Custom404Handler)
-
- def test_404(self):
- response = self.fetch('/')
- self.assertEqual(response.code, 404)
- self.assertEqual(response.body, b'custom 404 response')
-
-
-@wsgi_safe
-class DefaultHandlerArgumentsTest(WebTestCase):
- def get_handlers(self):
- return [('/foo', RequestHandler)]
-
- def get_app_kwargs(self):
- return dict(default_handler_class=ErrorHandler,
- default_handler_args=dict(status_code=403))
-
- def test_403(self):
- response = self.fetch('/')
- self.assertEqual(response.code, 403)
-
-
-@wsgi_safe
-class HandlerByNameTest(WebTestCase):
- def get_handlers(self):
- # All three are equivalent.
- return [('/hello1', HelloHandler),
- ('/hello2', 'tornado.test.web_test.HelloHandler'),
- url('/hello3', 'tornado.test.web_test.HelloHandler'),
- ]
-
- def test_handler_by_name(self):
- resp = self.fetch('/hello1')
- self.assertEqual(resp.body, b'hello')
- resp = self.fetch('/hello2')
- self.assertEqual(resp.body, b'hello')
- resp = self.fetch('/hello3')
- self.assertEqual(resp.body, b'hello')
-
-
-class StreamingRequestBodyTest(WebTestCase):
- def get_handlers(self):
- @stream_request_body
- class StreamingBodyHandler(RequestHandler):
- def initialize(self, test):
- self.test = test
-
- def prepare(self):
- self.test.prepared.set_result(None)
-
- def data_received(self, data):
- self.test.data.set_result(data)
-
- def get(self):
- self.test.finished.set_result(None)
- self.write({})
-
- @stream_request_body
- class EarlyReturnHandler(RequestHandler):
- def prepare(self):
- # If we finish the response in prepare, it won't continue to
- # the (non-existent) data_received.
- raise HTTPError(401)
-
- @stream_request_body
- class CloseDetectionHandler(RequestHandler):
- def initialize(self, test):
- self.test = test
-
- def on_connection_close(self):
- super(CloseDetectionHandler, self).on_connection_close()
- self.test.close_future.set_result(None)
-
- return [('/stream_body', StreamingBodyHandler, dict(test=self)),
- ('/early_return', EarlyReturnHandler),
- ('/close_detection', CloseDetectionHandler, dict(test=self))]
-
- def connect(self, url, connection_close):
- # Use a raw connection so we can control the sending of data.
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
- s.connect(("127.0.0.1", self.get_http_port()))
- stream = IOStream(s, io_loop=self.io_loop)
- stream.write(b"GET " + url + b" HTTP/1.1\r\n")
- if connection_close:
- stream.write(b"Connection: close\r\n")
- stream.write(b"Transfer-Encoding: chunked\r\n\r\n")
- return stream
-
- @gen_test
- def test_streaming_body(self):
- self.prepared = Future()
- self.data = Future()
- self.finished = Future()
-
- stream = self.connect(b"/stream_body", connection_close=True)
- yield self.prepared
- stream.write(b"4\r\nasdf\r\n")
- # Ensure the first chunk is received before we send the second.
- data = yield self.data
- self.assertEqual(data, b"asdf")
- self.data = Future()
- stream.write(b"4\r\nqwer\r\n")
- data = yield self.data
- self.assertEquals(data, b"qwer")
- stream.write(b"0\r\n")
- yield self.finished
- data = yield gen.Task(stream.read_until_close)
- # This would ideally use an HTTP1Connection to read the response.
- self.assertTrue(data.endswith(b"{}"))
- stream.close()
-
- @gen_test
- def test_early_return(self):
- stream = self.connect(b"/early_return", connection_close=False)
- data = yield gen.Task(stream.read_until_close)
- self.assertTrue(data.startswith(b"HTTP/1.1 401"))
-
- @gen_test
- def test_early_return_with_data(self):
- stream = self.connect(b"/early_return", connection_close=False)
- stream.write(b"4\r\nasdf\r\n")
- data = yield gen.Task(stream.read_until_close)
- self.assertTrue(data.startswith(b"HTTP/1.1 401"))
-
- @gen_test
- def test_close_during_upload(self):
- self.close_future = Future()
- stream = self.connect(b"/close_detection", connection_close=False)
- stream.close()
- yield self.close_future
-
-
-class StreamingRequestFlowControlTest(WebTestCase):
- def get_handlers(self):
- from tornado.ioloop import IOLoop
-
- # Each method in this handler returns a Future and yields to the
- # IOLoop so the future is not immediately ready. Ensure that the
- # Futures are respected and no method is called before the previous
- # one has completed.
- @stream_request_body
- class FlowControlHandler(RequestHandler):
- def initialize(self, test):
- self.test = test
- self.method = None
- self.methods = []
-
- @contextlib.contextmanager
- def in_method(self, method):
- if self.method is not None:
- self.test.fail("entered method %s while in %s" %
- (method, self.method))
- self.method = method
- self.methods.append(method)
- try:
- yield
- finally:
- self.method = None
-
- @gen.coroutine
- def prepare(self):
- # Note that asynchronous prepare() does not block data_received,
- # so we don't use in_method here.
- self.methods.append('prepare')
- yield gen.Task(IOLoop.current().add_callback)
-
- @gen.coroutine
- def data_received(self, data):
- with self.in_method('data_received'):
- yield gen.Task(IOLoop.current().add_callback)
-
- @gen.coroutine
- def post(self):
- with self.in_method('post'):
- yield gen.Task(IOLoop.current().add_callback)
- self.write(dict(methods=self.methods))
-
- return [('/', FlowControlHandler, dict(test=self))]
-
- def get_httpserver_options(self):
- # Use a small chunk size so flow control is relevant even though
- # all the data arrives at once.
- return dict(chunk_size=10)
-
- def test_flow_control(self):
- response = self.fetch('/', body='abcdefghijklmnopqrstuvwxyz',
- method='POST')
- response.rethrow()
- self.assertEqual(json_decode(response.body),
- dict(methods=['prepare', 'data_received',
- 'data_received', 'data_received',
- 'post']))
-
-
-@wsgi_safe
-class IncorrectContentLengthTest(SimpleHandlerTestCase):
- def get_handlers(self):
- test = self
- self.server_error = None
-
- # Manually set a content-length that doesn't match the actual content.
- class TooHigh(RequestHandler):
- def get(self):
- self.set_header("Content-Length", "42")
- try:
- self.finish("ok")
- except Exception as e:
- test.server_error = e
- raise
-
- class TooLow(RequestHandler):
- def get(self):
- self.set_header("Content-Length", "2")
- try:
- self.finish("hello")
- except Exception as e:
- test.server_error = e
- raise
-
- return [('/high', TooHigh),
- ('/low', TooLow)]
-
- def test_content_length_too_high(self):
- # When the content-length is too high, the connection is simply
- # closed without completing the response. An error is logged on
- # the server.
- with ExpectLog(app_log, "(Uncaught exception|Exception in callback)"):
- with ExpectLog(gen_log,
- "(Cannot send error response after headers written"
- "|Failed to flush partial response)"):
- response = self.fetch("/high")
- self.assertEqual(response.code, 599)
- self.assertEqual(str(self.server_error),
- "Tried to write 40 bytes less than Content-Length")
-
- def test_content_length_too_low(self):
- # When the content-length is too low, the connection is closed
- # without writing the last chunk, so the client never sees the request
- # complete (which would be a framing error).
- with ExpectLog(app_log, "(Uncaught exception|Exception in callback)"):
- with ExpectLog(gen_log,
- "(Cannot send error response after headers written"
- "|Failed to flush partial response)"):
- response = self.fetch("/low")
- self.assertEqual(response.code, 599)
- self.assertEqual(str(self.server_error),
- "Tried to write more data than Content-Length")
-
-
-class ClientCloseTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- if self.request.version.startswith('HTTP/1'):
- # Simulate a connection closed by the client during
- # request processing. The client will see an error, but the
- # server should respond gracefully (without logging errors
- # because we were unable to write out as many bytes as
- # Content-Length said we would)
- self.request.connection.stream.close()
- self.write('hello')
- else:
- # TODO: add a HTTP2-compatible version of this test.
- self.write('requires HTTP/1.x')
-
- def test_client_close(self):
- response = self.fetch('/')
- if response.body == b'requires HTTP/1.x':
- self.skipTest('requires HTTP/1.x')
- self.assertEqual(response.code, 599)
-
-
-class SignedValueTest(unittest.TestCase):
- SECRET = "It's a secret to everybody"
- SECRET_DICT = {0: "asdfbasdf", 1: "12312312", 2: "2342342"}
-
- def past(self):
- return self.present() - 86400 * 32
-
- def present(self):
- return 1300000000
-
- def test_known_values(self):
- signed_v1 = create_signed_value(SignedValueTest.SECRET, "key", "value",
- version=1, clock=self.present)
- self.assertEqual(
- signed_v1,
- b"dmFsdWU=|1300000000|31c934969f53e48164c50768b40cbd7e2daaaa4f")
-
- signed_v2 = create_signed_value(SignedValueTest.SECRET, "key", "value",
- version=2, clock=self.present)
- self.assertEqual(
- signed_v2,
- b"2|1:0|10:1300000000|3:key|8:dmFsdWU=|"
- b"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152")
-
- signed_default = create_signed_value(SignedValueTest.SECRET,
- "key", "value", clock=self.present)
- self.assertEqual(signed_default, signed_v2)
-
- decoded_v1 = decode_signed_value(SignedValueTest.SECRET, "key",
- signed_v1, min_version=1,
- clock=self.present)
- self.assertEqual(decoded_v1, b"value")
-
- decoded_v2 = decode_signed_value(SignedValueTest.SECRET, "key",
- signed_v2, min_version=2,
- clock=self.present)
- self.assertEqual(decoded_v2, b"value")
-
- def test_name_swap(self):
- signed1 = create_signed_value(SignedValueTest.SECRET, "key1", "value",
- clock=self.present)
- signed2 = create_signed_value(SignedValueTest.SECRET, "key2", "value",
- clock=self.present)
- # Try decoding each string with the other's "name"
- decoded1 = decode_signed_value(SignedValueTest.SECRET, "key2", signed1,
- clock=self.present)
- self.assertIs(decoded1, None)
- decoded2 = decode_signed_value(SignedValueTest.SECRET, "key1", signed2,
- clock=self.present)
- self.assertIs(decoded2, None)
-
- def test_expired(self):
- signed = create_signed_value(SignedValueTest.SECRET, "key1", "value",
- clock=self.past)
- decoded_past = decode_signed_value(SignedValueTest.SECRET, "key1",
- signed, clock=self.past)
- self.assertEqual(decoded_past, b"value")
- decoded_present = decode_signed_value(SignedValueTest.SECRET, "key1",
- signed, clock=self.present)
- self.assertIs(decoded_present, None)
-
- def test_payload_tampering(self):
- # These cookies are variants of the one in test_known_values.
- sig = "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"
-
- def validate(prefix):
- return (b'value' ==
- decode_signed_value(SignedValueTest.SECRET, "key",
- prefix + sig, clock=self.present))
- self.assertTrue(validate("2|1:0|10:1300000000|3:key|8:dmFsdWU=|"))
- # Change key version
- self.assertFalse(validate("2|1:1|10:1300000000|3:key|8:dmFsdWU=|"))
- # length mismatch (field too short)
- self.assertFalse(validate("2|1:0|10:130000000|3:key|8:dmFsdWU=|"))
- # length mismatch (field too long)
- self.assertFalse(validate("2|1:0|10:1300000000|3:keey|8:dmFsdWU=|"))
-
- def test_signature_tampering(self):
- prefix = "2|1:0|10:1300000000|3:key|8:dmFsdWU=|"
-
- def validate(sig):
- return (b'value' ==
- decode_signed_value(SignedValueTest.SECRET, "key",
- prefix + sig, clock=self.present))
- self.assertTrue(validate(
- "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"))
- # All zeros
- self.assertFalse(validate("0" * 32))
- # Change one character
- self.assertFalse(validate(
- "4d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"))
- # Change another character
- self.assertFalse(validate(
- "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e153"))
- # Truncate
- self.assertFalse(validate(
- "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e15"))
- # Lengthen
- self.assertFalse(validate(
- "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e1538"))
-
- def test_non_ascii(self):
- value = b"\xe9"
- signed = create_signed_value(SignedValueTest.SECRET, "key", value,
- clock=self.present)
- decoded = decode_signed_value(SignedValueTest.SECRET, "key", signed,
- clock=self.present)
- self.assertEqual(value, decoded)
-
- def test_key_versioning_read_write_default_key(self):
- value = b"\xe9"
- signed = create_signed_value(SignedValueTest.SECRET_DICT,
- "key", value, clock=self.present,
- key_version=0)
- decoded = decode_signed_value(SignedValueTest.SECRET_DICT,
- "key", signed, clock=self.present)
- self.assertEqual(value, decoded)
-
- def test_key_versioning_read_write_non_default_key(self):
- value = b"\xe9"
- signed = create_signed_value(SignedValueTest.SECRET_DICT,
- "key", value, clock=self.present,
- key_version=1)
- decoded = decode_signed_value(SignedValueTest.SECRET_DICT,
- "key", signed, clock=self.present)
- self.assertEqual(value, decoded)
-
- def test_key_versioning_invalid_key(self):
- value = b"\xe9"
- signed = create_signed_value(SignedValueTest.SECRET_DICT,
- "key", value, clock=self.present,
- key_version=0)
- newkeys = SignedValueTest.SECRET_DICT.copy()
- newkeys.pop(0)
- decoded = decode_signed_value(newkeys,
- "key", signed, clock=self.present)
- self.assertEqual(None, decoded)
-
- def test_key_version_retrieval(self):
- value = b"\xe9"
- signed = create_signed_value(SignedValueTest.SECRET_DICT,
- "key", value, clock=self.present,
- key_version=1)
- key_version = get_signature_key_version(signed)
- self.assertEqual(1, key_version)
-
-
-@wsgi_safe
-class XSRFTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- version = int(self.get_argument("version", "2"))
- # This would be a bad idea in a real app, but in this test
- # it's fine.
- self.settings["xsrf_cookie_version"] = version
- self.write(self.xsrf_token)
-
- def post(self):
- self.write("ok")
-
- def get_app_kwargs(self):
- return dict(xsrf_cookies=True)
-
- def setUp(self):
- super(XSRFTest, self).setUp()
- self.xsrf_token = self.get_token()
-
- def get_token(self, old_token=None, version=None):
- if old_token is not None:
- headers = self.cookie_headers(old_token)
- else:
- headers = None
- response = self.fetch(
- "/" if version is None else ("/?version=%d" % version),
- headers=headers)
- response.rethrow()
- return native_str(response.body)
-
- def cookie_headers(self, token=None):
- if token is None:
- token = self.xsrf_token
- return {"Cookie": "_xsrf=" + token}
-
- def test_xsrf_fail_no_token(self):
- with ExpectLog(gen_log, ".*'_xsrf' argument missing"):
- response = self.fetch("/", method="POST", body=b"")
- self.assertEqual(response.code, 403)
-
- def test_xsrf_fail_body_no_cookie(self):
- with ExpectLog(gen_log, ".*XSRF cookie does not match POST"):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)))
- self.assertEqual(response.code, 403)
-
- def test_xsrf_fail_cookie_no_body(self):
- with ExpectLog(gen_log, ".*'_xsrf' argument missing"):
- response = self.fetch(
- "/", method="POST", body=b"",
- headers=self.cookie_headers())
- self.assertEqual(response.code, 403)
-
- def test_xsrf_success_short_token(self):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf='deadbeef')),
- headers=self.cookie_headers(token='deadbeef'))
- self.assertEqual(response.code, 200)
-
- def test_xsrf_success_non_hex_token(self):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf='xoxo')),
- headers=self.cookie_headers(token='xoxo'))
- self.assertEqual(response.code, 200)
-
- def test_xsrf_success_post_body(self):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
- headers=self.cookie_headers())
- self.assertEqual(response.code, 200)
-
- def test_xsrf_success_query_string(self):
- response = self.fetch(
- "/?" + urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
- method="POST", body=b"",
- headers=self.cookie_headers())
- self.assertEqual(response.code, 200)
-
- def test_xsrf_success_header(self):
- response = self.fetch("/", method="POST", body=b"",
- headers=dict({"X-Xsrftoken": self.xsrf_token},
- **self.cookie_headers()))
- self.assertEqual(response.code, 200)
-
- def test_distinct_tokens(self):
- # Every request gets a distinct token.
- NUM_TOKENS = 10
- tokens = set()
- for i in range(NUM_TOKENS):
- tokens.add(self.get_token())
- self.assertEqual(len(tokens), NUM_TOKENS)
-
- def test_cross_user(self):
- token2 = self.get_token()
- # Each token can be used to authenticate its own request.
- for token in (self.xsrf_token, token2):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf=token)),
- headers=self.cookie_headers(token))
- self.assertEqual(response.code, 200)
- # Sending one in the cookie and the other in the body is not allowed.
- for cookie_token, body_token in ((self.xsrf_token, token2),
- (token2, self.xsrf_token)):
- with ExpectLog(gen_log, '.*XSRF cookie does not match POST'):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf=body_token)),
- headers=self.cookie_headers(cookie_token))
- self.assertEqual(response.code, 403)
-
- def test_refresh_token(self):
- token = self.xsrf_token
- tokens_seen = set([token])
- # A user's token is stable over time. Refreshing the page in one tab
- # might update the cookie while an older tab still has the old cookie
- # in its DOM. Simulate this scenario by passing a constant token
- # in the body and re-querying for the token.
- for i in range(5):
- token = self.get_token(token)
- # Tokens are encoded uniquely each time
- tokens_seen.add(token)
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
- headers=self.cookie_headers(token))
- self.assertEqual(response.code, 200)
- self.assertEqual(len(tokens_seen), 6)
-
- def test_versioning(self):
- # Version 1 still produces distinct tokens per request.
- self.assertNotEqual(self.get_token(version=1),
- self.get_token(version=1))
-
- # Refreshed v1 tokens are all identical.
- v1_token = self.get_token(version=1)
- for i in range(5):
- self.assertEqual(self.get_token(v1_token, version=1), v1_token)
-
- # Upgrade to a v2 version of the same token
- v2_token = self.get_token(v1_token)
- self.assertNotEqual(v1_token, v2_token)
- # Each v1 token can map to many v2 tokens.
- self.assertNotEqual(v2_token, self.get_token(v1_token))
-
- # The tokens are cross-compatible.
- for cookie_token, body_token in ((v1_token, v2_token),
- (v2_token, v1_token)):
- response = self.fetch(
- "/", method="POST",
- body=urllib_parse.urlencode(dict(_xsrf=body_token)),
- headers=self.cookie_headers(cookie_token))
- self.assertEqual(response.code, 200)
-
-
-@wsgi_safe
-class FinishExceptionTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- self.set_status(401)
- self.set_header('WWW-Authenticate', 'Basic realm="something"')
- self.write('authentication required')
- raise Finish()
-
- def test_finish_exception(self):
- response = self.fetch('/')
- self.assertEqual(response.code, 401)
- self.assertEqual('Basic realm="something"',
- response.headers.get('WWW-Authenticate'))
- self.assertEqual(b'authentication required', response.body)
-
-
-@wsgi_safe
-class DecoratorTest(WebTestCase):
- def get_handlers(self):
- class RemoveSlashHandler(RequestHandler):
- @removeslash
- def get(self):
- pass
-
- class AddSlashHandler(RequestHandler):
- @addslash
- def get(self):
- pass
-
- return [("/removeslash/", RemoveSlashHandler),
- ("/addslash", AddSlashHandler),
- ]
-
- def test_removeslash(self):
- response = self.fetch("/removeslash/", follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertEqual(response.headers['Location'], "/removeslash")
-
- response = self.fetch("/removeslash/?foo=bar", follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertEqual(response.headers['Location'], "/removeslash?foo=bar")
-
- def test_addslash(self):
- response = self.fetch("/addslash", follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertEqual(response.headers['Location'], "/addslash/")
-
- response = self.fetch("/addslash?foo=bar", follow_redirects=False)
- self.assertEqual(response.code, 301)
- self.assertEqual(response.headers['Location'], "/addslash/?foo=bar")
-
-
-@wsgi_safe
-class CacheTest(WebTestCase):
- def get_handlers(self):
- class EtagHandler(RequestHandler):
- def get(self, computed_etag):
- self.write(computed_etag)
-
- def compute_etag(self):
- return self._write_buffer[0]
-
- return [
- ('/etag/(.*)', EtagHandler)
- ]
-
- def test_wildcard_etag(self):
- computed_etag = '"xyzzy"'
- etags = '*'
- self._test_etag(computed_etag, etags, 304)
-
- def test_strong_etag_match(self):
- computed_etag = '"xyzzy"'
- etags = '"xyzzy"'
- self._test_etag(computed_etag, etags, 304)
-
- def test_multiple_strong_etag_match(self):
- computed_etag = '"xyzzy1"'
- etags = '"xyzzy1", "xyzzy2"'
- self._test_etag(computed_etag, etags, 304)
-
- def test_strong_etag_not_match(self):
- computed_etag = '"xyzzy"'
- etags = '"xyzzy1"'
- self._test_etag(computed_etag, etags, 200)
-
- def test_multiple_strong_etag_not_match(self):
- computed_etag = '"xyzzy"'
- etags = '"xyzzy1", "xyzzy2"'
- self._test_etag(computed_etag, etags, 200)
-
- def test_weak_etag_match(self):
- computed_etag = '"xyzzy1"'
- etags = 'W/"xyzzy1"'
- self._test_etag(computed_etag, etags, 304)
-
- def test_multiple_weak_etag_match(self):
- computed_etag = '"xyzzy2"'
- etags = 'W/"xyzzy1", W/"xyzzy2"'
- self._test_etag(computed_etag, etags, 304)
-
- def test_weak_etag_not_match(self):
- computed_etag = '"xyzzy2"'
- etags = 'W/"xyzzy1"'
- self._test_etag(computed_etag, etags, 200)
-
- def test_multiple_weak_etag_not_match(self):
- computed_etag = '"xyzzy3"'
- etags = 'W/"xyzzy1", W/"xyzzy2"'
- self._test_etag(computed_etag, etags, 200)
-
- def _test_etag(self, computed_etag, etags, status_code):
- response = self.fetch(
- '/etag/' + computed_etag,
- headers={'If-None-Match': etags}
- )
- self.assertEqual(response.code, status_code)
-
-
-@wsgi_safe
-class RequestSummaryTest(SimpleHandlerTestCase):
- class Handler(RequestHandler):
- def get(self):
- # remote_ip is optional, although it's set by
- # both HTTPServer and WSGIAdapter.
- # Clobber it to make sure it doesn't break logging.
- self.request.remote_ip = None
- self.finish(self._request_summary())
-
- def test_missing_remote_ip(self):
- resp = self.fetch("/")
- self.assertEqual(resp.body, b"GET / (None)")
diff --git a/tornado/test/websocket_test.py b/tornado/test/websocket_test.py
deleted file mode 100644
index 6b182d07..00000000
--- a/tornado/test/websocket_test.py
+++ /dev/null
@@ -1,415 +0,0 @@
-from __future__ import absolute_import, division, print_function, with_statement
-
-import traceback
-
-from tornado.concurrent import Future
-from tornado import gen
-from tornado.httpclient import HTTPError, HTTPRequest
-from tornado.log import gen_log, app_log
-from tornado.testing import AsyncHTTPTestCase, gen_test, bind_unused_port, ExpectLog
-from tornado.test.util import unittest
-from tornado.web import Application, RequestHandler
-from tornado.util import u
-
-try:
- import tornado.websocket # noqa
- from tornado.util import _websocket_mask_python
-except ImportError:
- # The unittest module presents misleading errors on ImportError
- # (it acts as if websocket_test could not be found, hiding the underlying
- # error). If we get an ImportError here (which could happen due to
- # TORNADO_EXTENSION=1), print some extra information before failing.
- traceback.print_exc()
- raise
-
-from tornado.websocket import WebSocketHandler, websocket_connect, WebSocketError
-
-try:
- from tornado import speedups
-except ImportError:
- speedups = None
-
-
-class TestWebSocketHandler(WebSocketHandler):
- """Base class for testing handlers that exposes the on_close event.
-
- This allows for deterministic cleanup of the associated socket.
- """
- def initialize(self, close_future, compression_options=None):
- self.close_future = close_future
- self.compression_options = compression_options
-
- def get_compression_options(self):
- return self.compression_options
-
- def on_close(self):
- self.close_future.set_result((self.close_code, self.close_reason))
-
-
-class EchoHandler(TestWebSocketHandler):
- def on_message(self, message):
- self.write_message(message, isinstance(message, bytes))
-
-
-class ErrorInOnMessageHandler(TestWebSocketHandler):
- def on_message(self, message):
- 1 / 0
-
-
-class HeaderHandler(TestWebSocketHandler):
- def open(self):
- try:
- # In a websocket context, many RequestHandler methods
- # raise RuntimeErrors.
- self.set_status(503)
- raise Exception("did not get expected exception")
- except RuntimeError:
- pass
- self.write_message(self.request.headers.get('X-Test', ''))
-
-
-class NonWebSocketHandler(RequestHandler):
- def get(self):
- self.write('ok')
-
-
-class CloseReasonHandler(TestWebSocketHandler):
- def open(self):
- self.on_close_called = False
- self.close(1001, "goodbye")
-
-
-class AsyncPrepareHandler(TestWebSocketHandler):
- @gen.coroutine
- def prepare(self):
- yield gen.moment
-
- def on_message(self, message):
- self.write_message(message)
-
-
-class WebSocketBaseTestCase(AsyncHTTPTestCase):
- @gen.coroutine
- def ws_connect(self, path, compression_options=None):
- ws = yield websocket_connect(
- 'ws://127.0.0.1:%d%s' % (self.get_http_port(), path),
- compression_options=compression_options)
- raise gen.Return(ws)
-
- @gen.coroutine
- def close(self, ws):
- """Close a websocket connection and wait for the server side.
-
- If we don't wait here, there are sometimes leak warnings in the
- tests.
- """
- ws.close()
- yield self.close_future
-
-
-class WebSocketTest(WebSocketBaseTestCase):
- def get_app(self):
- self.close_future = Future()
- return Application([
- ('/echo', EchoHandler, dict(close_future=self.close_future)),
- ('/non_ws', NonWebSocketHandler),
- ('/header', HeaderHandler, dict(close_future=self.close_future)),
- ('/close_reason', CloseReasonHandler,
- dict(close_future=self.close_future)),
- ('/error_in_on_message', ErrorInOnMessageHandler,
- dict(close_future=self.close_future)),
- ('/async_prepare', AsyncPrepareHandler,
- dict(close_future=self.close_future)),
- ])
-
- def test_http_request(self):
- # WS server, HTTP client.
- response = self.fetch('/echo')
- self.assertEqual(response.code, 400)
-
- @gen_test
- def test_websocket_gen(self):
- ws = yield self.ws_connect('/echo')
- ws.write_message('hello')
- response = yield ws.read_message()
- self.assertEqual(response, 'hello')
- yield self.close(ws)
-
- def test_websocket_callbacks(self):
- websocket_connect(
- 'ws://127.0.0.1:%d/echo' % self.get_http_port(),
- io_loop=self.io_loop, callback=self.stop)
- ws = self.wait().result()
- ws.write_message('hello')
- ws.read_message(self.stop)
- response = self.wait().result()
- self.assertEqual(response, 'hello')
- self.close_future.add_done_callback(lambda f: self.stop())
- ws.close()
- self.wait()
-
- @gen_test
- def test_binary_message(self):
- ws = yield self.ws_connect('/echo')
- ws.write_message(b'hello \xe9', binary=True)
- response = yield ws.read_message()
- self.assertEqual(response, b'hello \xe9')
- yield self.close(ws)
-
- @gen_test
- def test_unicode_message(self):
- ws = yield self.ws_connect('/echo')
- ws.write_message(u('hello \u00e9'))
- response = yield ws.read_message()
- self.assertEqual(response, u('hello \u00e9'))
- yield self.close(ws)
-
- @gen_test
- def test_error_in_on_message(self):
- ws = yield self.ws_connect('/error_in_on_message')
- ws.write_message('hello')
- with ExpectLog(app_log, "Uncaught exception"):
- response = yield ws.read_message()
- self.assertIs(response, None)
- yield self.close(ws)
-
- @gen_test
- def test_websocket_http_fail(self):
- with self.assertRaises(HTTPError) as cm:
- yield self.ws_connect('/notfound')
- self.assertEqual(cm.exception.code, 404)
-
- @gen_test
- def test_websocket_http_success(self):
- with self.assertRaises(WebSocketError):
- yield self.ws_connect('/non_ws')
-
- @gen_test
- def test_websocket_network_fail(self):
- sock, port = bind_unused_port()
- sock.close()
- with self.assertRaises(IOError):
- with ExpectLog(gen_log, ".*"):
- yield websocket_connect(
- 'ws://127.0.0.1:%d/' % port,
- io_loop=self.io_loop,
- connect_timeout=3600)
-
- @gen_test
- def test_websocket_close_buffered_data(self):
- ws = yield websocket_connect(
- 'ws://127.0.0.1:%d/echo' % self.get_http_port())
- ws.write_message('hello')
- ws.write_message('world')
- # Close the underlying stream.
- ws.stream.close()
- yield self.close_future
-
- @gen_test
- def test_websocket_headers(self):
- # Ensure that arbitrary headers can be passed through websocket_connect.
- ws = yield websocket_connect(
- HTTPRequest('ws://127.0.0.1:%d/header' % self.get_http_port(),
- headers={'X-Test': 'hello'}))
- response = yield ws.read_message()
- self.assertEqual(response, 'hello')
- yield self.close(ws)
-
- @gen_test
- def test_server_close_reason(self):
- ws = yield self.ws_connect('/close_reason')
- msg = yield ws.read_message()
- # A message of None means the other side closed the connection.
- self.assertIs(msg, None)
- self.assertEqual(ws.close_code, 1001)
- self.assertEqual(ws.close_reason, "goodbye")
- # The on_close callback is called no matter which side closed.
- yield self.close_future
-
- @gen_test
- def test_client_close_reason(self):
- ws = yield self.ws_connect('/echo')
- ws.close(1001, 'goodbye')
- code, reason = yield self.close_future
- self.assertEqual(code, 1001)
- self.assertEqual(reason, 'goodbye')
-
- @gen_test
- def test_async_prepare(self):
- # Previously, an async prepare method triggered a bug that would
- # result in a timeout on test shutdown (and a memory leak).
- ws = yield self.ws_connect('/async_prepare')
- ws.write_message('hello')
- res = yield ws.read_message()
- self.assertEqual(res, 'hello')
-
- @gen_test
- def test_check_origin_valid_no_path(self):
- port = self.get_http_port()
-
- url = 'ws://127.0.0.1:%d/echo' % port
- headers = {'Origin': 'http://127.0.0.1:%d' % port}
-
- ws = yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
- ws.write_message('hello')
- response = yield ws.read_message()
- self.assertEqual(response, 'hello')
- yield self.close(ws)
-
- @gen_test
- def test_check_origin_valid_with_path(self):
- port = self.get_http_port()
-
- url = 'ws://127.0.0.1:%d/echo' % port
- headers = {'Origin': 'http://127.0.0.1:%d/something' % port}
-
- ws = yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
- ws.write_message('hello')
- response = yield ws.read_message()
- self.assertEqual(response, 'hello')
- yield self.close(ws)
-
- @gen_test
- def test_check_origin_invalid_partial_url(self):
- port = self.get_http_port()
-
- url = 'ws://127.0.0.1:%d/echo' % port
- headers = {'Origin': '127.0.0.1:%d' % port}
-
- with self.assertRaises(HTTPError) as cm:
- yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
- self.assertEqual(cm.exception.code, 403)
-
- @gen_test
- def test_check_origin_invalid(self):
- port = self.get_http_port()
-
- url = 'ws://127.0.0.1:%d/echo' % port
- # Host is 127.0.0.1, which should not be accessible from some other
- # domain
- headers = {'Origin': 'http://somewhereelse.com'}
-
- with self.assertRaises(HTTPError) as cm:
- yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
-
- self.assertEqual(cm.exception.code, 403)
-
- @gen_test
- def test_check_origin_invalid_subdomains(self):
- port = self.get_http_port()
-
- url = 'ws://localhost:%d/echo' % port
- # Subdomains should be disallowed by default. If we could pass a
- # resolver to websocket_connect we could test sibling domains as well.
- headers = {'Origin': 'http://subtenant.localhost'}
-
- with self.assertRaises(HTTPError) as cm:
- yield websocket_connect(HTTPRequest(url, headers=headers),
- io_loop=self.io_loop)
-
- self.assertEqual(cm.exception.code, 403)
-
-
-class CompressionTestMixin(object):
- MESSAGE = 'Hello world. Testing 123 123'
-
- def get_app(self):
- self.close_future = Future()
- return Application([
- ('/echo', EchoHandler, dict(
- close_future=self.close_future,
- compression_options=self.get_server_compression_options())),
- ])
-
- def get_server_compression_options(self):
- return None
-
- def get_client_compression_options(self):
- return None
-
- @gen_test
- def test_message_sizes(self):
- ws = yield self.ws_connect(
- '/echo',
- compression_options=self.get_client_compression_options())
- # Send the same message three times so we can measure the
- # effect of the context_takeover options.
- for i in range(3):
- ws.write_message(self.MESSAGE)
- response = yield ws.read_message()
- self.assertEqual(response, self.MESSAGE)
- self.assertEqual(ws.protocol._message_bytes_out, len(self.MESSAGE) * 3)
- self.assertEqual(ws.protocol._message_bytes_in, len(self.MESSAGE) * 3)
- self.verify_wire_bytes(ws.protocol._wire_bytes_in,
- ws.protocol._wire_bytes_out)
- yield self.close(ws)
-
-
-class UncompressedTestMixin(CompressionTestMixin):
- """Specialization of CompressionTestMixin when we expect no compression."""
- def verify_wire_bytes(self, bytes_in, bytes_out):
- # Bytes out includes the 4-byte mask key per message.
- self.assertEqual(bytes_out, 3 * (len(self.MESSAGE) + 6))
- self.assertEqual(bytes_in, 3 * (len(self.MESSAGE) + 2))
-
-
-class NoCompressionTest(UncompressedTestMixin, WebSocketBaseTestCase):
- pass
-
-
-# If only one side tries to compress, the extension is not negotiated.
-class ServerOnlyCompressionTest(UncompressedTestMixin, WebSocketBaseTestCase):
- def get_server_compression_options(self):
- return {}
-
-
-class ClientOnlyCompressionTest(UncompressedTestMixin, WebSocketBaseTestCase):
- def get_client_compression_options(self):
- return {}
-
-
-class DefaultCompressionTest(CompressionTestMixin, WebSocketBaseTestCase):
- def get_server_compression_options(self):
- return {}
-
- def get_client_compression_options(self):
- return {}
-
- def verify_wire_bytes(self, bytes_in, bytes_out):
- self.assertLess(bytes_out, 3 * (len(self.MESSAGE) + 6))
- self.assertLess(bytes_in, 3 * (len(self.MESSAGE) + 2))
- # Bytes out includes the 4 bytes mask key per message.
- self.assertEqual(bytes_out, bytes_in + 12)
-
-
-class MaskFunctionMixin(object):
- # Subclasses should define self.mask(mask, data)
- def test_mask(self):
- self.assertEqual(self.mask(b'abcd', b''), b'')
- self.assertEqual(self.mask(b'abcd', b'b'), b'\x03')
- self.assertEqual(self.mask(b'abcd', b'54321'), b'TVPVP')
- self.assertEqual(self.mask(b'ZXCV', b'98765432'), b'c`t`olpd')
- # Include test cases with \x00 bytes (to ensure that the C
- # extension isn't depending on null-terminated strings) and
- # bytes with the high bit set (to smoke out signedness issues).
- self.assertEqual(self.mask(b'\x00\x01\x02\x03',
- b'\xff\xfb\xfd\xfc\xfe\xfa'),
- b'\xff\xfa\xff\xff\xfe\xfb')
- self.assertEqual(self.mask(b'\xff\xfb\xfd\xfc',
- b'\x00\x01\x02\x03\x04\x05'),
- b'\xff\xfa\xff\xff\xfb\xfe')
-
-
-class PythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
- def mask(self, mask, data):
- return _websocket_mask_python(mask, data)
-
-
-@unittest.skipIf(speedups is None, "tornado.speedups module not present")
-class CythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
- def mask(self, mask, data):
- return speedups.websocket_mask(mask, data)
diff --git a/tornado/test/wsgi_test.py b/tornado/test/wsgi_test.py
deleted file mode 100644
index 42d74b88..00000000
--- a/tornado/test/wsgi_test.py
+++ /dev/null
@@ -1,100 +0,0 @@
-from __future__ import absolute_import, division, print_function, with_statement
-from wsgiref.validate import validator
-
-from tornado.escape import json_decode
-from tornado.test.httpserver_test import TypeCheckHandler
-from tornado.testing import AsyncHTTPTestCase
-from tornado.util import u
-from tornado.web import RequestHandler, Application
-from tornado.wsgi import WSGIApplication, WSGIContainer, WSGIAdapter
-
-
-class WSGIContainerTest(AsyncHTTPTestCase):
- def wsgi_app(self, environ, start_response):
- status = "200 OK"
- response_headers = [("Content-Type", "text/plain")]
- start_response(status, response_headers)
- return [b"Hello world!"]
-
- def get_app(self):
- return WSGIContainer(validator(self.wsgi_app))
-
- def test_simple(self):
- response = self.fetch("/")
- self.assertEqual(response.body, b"Hello world!")
-
-
-class WSGIApplicationTest(AsyncHTTPTestCase):
- def get_app(self):
- class HelloHandler(RequestHandler):
- def get(self):
- self.write("Hello world!")
-
- class PathQuotingHandler(RequestHandler):
- def get(self, path):
- self.write(path)
-
- # It would be better to run the wsgiref server implementation in
- # another thread instead of using our own WSGIContainer, but this
- # fits better in our async testing framework and the wsgiref
- # validator should keep us honest
- return WSGIContainer(validator(WSGIApplication([
- ("/", HelloHandler),
- ("/path/(.*)", PathQuotingHandler),
- ("/typecheck", TypeCheckHandler),
- ])))
-
- def test_simple(self):
- response = self.fetch("/")
- self.assertEqual(response.body, b"Hello world!")
-
- def test_path_quoting(self):
- response = self.fetch("/path/foo%20bar%C3%A9")
- self.assertEqual(response.body, u("foo bar\u00e9").encode("utf-8"))
-
- def test_types(self):
- headers = {"Cookie": "foo=bar"}
- response = self.fetch("/typecheck?foo=bar", headers=headers)
- data = json_decode(response.body)
- self.assertEqual(data, {})
-
- response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
- data = json_decode(response.body)
- self.assertEqual(data, {})
-
-# This is kind of hacky, but run some of the HTTPServer tests through
-# WSGIContainer and WSGIApplication to make sure everything survives
-# repeated disassembly and reassembly.
-from tornado.test import httpserver_test
-from tornado.test import web_test
-
-
-class WSGIConnectionTest(httpserver_test.HTTPConnectionTest):
- def get_app(self):
- return WSGIContainer(validator(WSGIApplication(self.get_handlers())))
-
-
-def wrap_web_tests_application():
- result = {}
- for cls in web_test.wsgi_safe_tests:
- class WSGIApplicationWrappedTest(cls):
- def get_app(self):
- self.app = WSGIApplication(self.get_handlers(),
- **self.get_app_kwargs())
- return WSGIContainer(validator(self.app))
- result["WSGIApplication_" + cls.__name__] = WSGIApplicationWrappedTest
- return result
-globals().update(wrap_web_tests_application())
-
-
-def wrap_web_tests_adapter():
- result = {}
- for cls in web_test.wsgi_safe_tests:
- class WSGIAdapterWrappedTest(cls):
- def get_app(self):
- self.app = Application(self.get_handlers(),
- **self.get_app_kwargs())
- return WSGIContainer(validator(WSGIAdapter(self.app)))
- result["WSGIAdapter_" + cls.__name__] = WSGIAdapterWrappedTest
- return result
-globals().update(wrap_web_tests_adapter())
diff --git a/tornado/web.py b/tornado/web.py
index d8afbe8a..aa5d02e1 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -144,12 +144,12 @@ May be overridden by passing a ``min_version`` keyword argument.
.. versionadded:: 3.2.1
"""
-class RequestHandler(object):
- """Subclass this class and define `get()` or `post()` to make a handler.
- If you want to support more methods than the standard GET/HEAD/POST, you
- should override the class variable ``SUPPORTED_METHODS`` in your
- `RequestHandler` subclass.
+class RequestHandler(object):
+ """Base class for HTTP request handlers.
+
+ Subclasses must define at least one of the methods defined in the
+ "Entry points" section below.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
diff --git a/tornado/websocket.py b/tornado/websocket.py
index adf238be..2f57b990 100644
--- a/tornado/websocket.py
+++ b/tornado/websocket.py
@@ -835,7 +835,8 @@ class WebSocketProtocol13(WebSocketProtocol):
self.handler.close_code = struct.unpack('>H', data[:2])[0]
if len(data) > 2:
self.handler.close_reason = to_unicode(data[2:])
- self.close()
+ # Echo the received close code, if any (RFC 6455 section 5.5.1).
+ self.close(self.handler.close_code)
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
@@ -886,6 +887,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
self._on_message_callback = on_message_callback
+ self.close_code = self.close_reason = None
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]