remove test files from included packages

This commit is contained in:
Alan Hamlett 2016-04-18 23:39:01 +02:00
parent 9bc81a341a
commit a8653a3294
32 changed files with 0 additions and 3228 deletions

View file

@ -1,34 +0,0 @@
# -*- coding: ascii -*-
from doctest import DocFileSuite
import unittest, os.path, sys
THIS_DIR = os.path.dirname(__file__)
README = os.path.join(THIS_DIR, os.pardir, os.pardir, 'README.txt')
class DocumentationTestCase(unittest.TestCase):
def test_readme_encoding(self):
'''Confirm the README.txt is pure ASCII.'''
f = open(README, 'rb')
try:
f.read().decode('US-ASCII')
finally:
f.close()
def test_suite():
"For the Z3 test runner"
return unittest.TestSuite((
DocumentationTestCase('test_readme_encoding'),
DocFileSuite(os.path.join(os.pardir, os.pardir, 'README.txt'))))
if __name__ == '__main__':
sys.path.insert(0, os.path.abspath(os.path.join(
THIS_DIR, os.pardir, os.pardir
)))
unittest.main(defaultTest='test_suite')

View file

@ -1,313 +0,0 @@
from operator import *
import os.path
import sys
import unittest
import warnings
if __name__ == '__main__':
# Only munge path if invoked as a script. Testrunners should have setup
# the paths already
sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, os.pardir)))
from pytz.lazy import LazyList, LazySet
class LazyListTestCase(unittest.TestCase):
initial_data = [3,2,1]
def setUp(self):
self.base = [3, 2, 1]
self.lesser = [2, 1, 0]
self.greater = [4, 3, 2]
self.lazy = LazyList(iter(list(self.base)))
def test_unary_ops(self):
unary_ops = [str, repr, len, bool, not_]
try:
unary_ops.append(unicode)
except NameError:
pass # unicode no longer exists in Python 3.
for op in unary_ops:
self.assertEqual(
op(self.lazy),
op(self.base), str(op))
def test_binary_ops(self):
binary_ops = [eq, ge, gt, le, lt, ne, add, concat]
try:
binary_ops.append(cmp)
except NameError:
pass # cmp no longer exists in Python 3.
for op in binary_ops:
self.assertEqual(
op(self.lazy, self.lazy),
op(self.base, self.base), str(op))
for other in [self.base, self.lesser, self.greater]:
self.assertEqual(
op(self.lazy, other),
op(self.base, other), '%s %s' % (op, other))
self.assertEqual(
op(other, self.lazy),
op(other, self.base), '%s %s' % (op, other))
# Multiplication
self.assertEqual(self.lazy * 3, self.base * 3)
self.assertEqual(3 * self.lazy, 3 * self.base)
# Contains
self.assertTrue(2 in self.lazy)
self.assertFalse(42 in self.lazy)
def test_iadd(self):
self.lazy += [1]
self.base += [1]
self.assertEqual(self.lazy, self.base)
def test_bool(self):
self.assertTrue(bool(self.lazy))
self.assertFalse(bool(LazyList()))
self.assertFalse(bool(LazyList(iter([]))))
def test_hash(self):
self.assertRaises(TypeError, hash, self.lazy)
def test_isinstance(self):
self.assertTrue(isinstance(self.lazy, list))
self.assertFalse(isinstance(self.lazy, tuple))
def test_callable(self):
try:
callable
except NameError:
return # No longer exists with Python 3.
self.assertFalse(callable(self.lazy))
def test_append(self):
self.base.append('extra')
self.lazy.append('extra')
self.assertEqual(self.lazy, self.base)
def test_count(self):
self.assertEqual(self.lazy.count(2), 1)
def test_index(self):
self.assertEqual(self.lazy.index(2), 1)
def test_extend(self):
self.base.extend([6, 7])
self.lazy.extend([6, 7])
self.assertEqual(self.lazy, self.base)
def test_insert(self):
self.base.insert(0, 'ping')
self.lazy.insert(0, 'ping')
self.assertEqual(self.lazy, self.base)
def test_pop(self):
self.assertEqual(self.lazy.pop(), self.base.pop())
self.assertEqual(self.lazy, self.base)
def test_remove(self):
self.base.remove(2)
self.lazy.remove(2)
self.assertEqual(self.lazy, self.base)
def test_reverse(self):
self.base.reverse()
self.lazy.reverse()
self.assertEqual(self.lazy, self.base)
def test_reversed(self):
self.assertEqual(list(reversed(self.lazy)), list(reversed(self.base)))
def test_sort(self):
self.base.sort()
self.assertNotEqual(self.lazy, self.base, 'Test data already sorted')
self.lazy.sort()
self.assertEqual(self.lazy, self.base)
def test_sorted(self):
self.assertEqual(sorted(self.lazy), sorted(self.base))
def test_getitem(self):
for idx in range(-len(self.base), len(self.base)):
self.assertEqual(self.lazy[idx], self.base[idx])
def test_setitem(self):
for idx in range(-len(self.base), len(self.base)):
self.base[idx] = idx + 1000
self.assertNotEqual(self.lazy, self.base)
self.lazy[idx] = idx + 1000
self.assertEqual(self.lazy, self.base)
def test_delitem(self):
del self.base[0]
self.assertNotEqual(self.lazy, self.base)
del self.lazy[0]
self.assertEqual(self.lazy, self.base)
del self.base[-2]
self.assertNotEqual(self.lazy, self.base)
del self.lazy[-2]
self.assertEqual(self.lazy, self.base)
def test_iter(self):
self.assertEqual(list(iter(self.lazy)), list(iter(self.base)))
def test_getslice(self):
for i in range(-len(self.base), len(self.base)):
for j in range(-len(self.base), len(self.base)):
for step in [-1, 1]:
self.assertEqual(self.lazy[i:j:step], self.base[i:j:step])
def test_setslice(self):
for i in range(-len(self.base), len(self.base)):
for j in range(-len(self.base), len(self.base)):
for step in [-1, 1]:
replacement = range(0, len(self.base[i:j:step]))
self.base[i:j:step] = replacement
self.lazy[i:j:step] = replacement
self.assertEqual(self.lazy, self.base)
def test_delslice(self):
del self.base[0:1]
del self.lazy[0:1]
self.assertEqual(self.lazy, self.base)
del self.base[-1:1:-1]
del self.lazy[-1:1:-1]
self.assertEqual(self.lazy, self.base)
class LazySetTestCase(unittest.TestCase):
initial_data = set([3,2,1])
def setUp(self):
self.base = set([3, 2, 1])
self.lazy = LazySet(iter(set(self.base)))
def test_unary_ops(self):
# These ops just need to work.
unary_ops = [str, repr]
try:
unary_ops.append(unicode)
except NameError:
pass # unicode no longer exists in Python 3.
for op in unary_ops:
op(self.lazy) # These ops just need to work.
# These ops should return identical values as a real set.
unary_ops = [len, bool, not_]
for op in unary_ops:
self.assertEqual(
op(self.lazy),
op(self.base), '%s(lazy) == %r' % (op, op(self.lazy)))
def test_binary_ops(self):
binary_ops = [eq, ge, gt, le, lt, ne, sub, and_, or_, xor]
try:
binary_ops.append(cmp)
except NameError:
pass # cmp no longer exists in Python 3.
for op in binary_ops:
self.assertEqual(
op(self.lazy, self.lazy),
op(self.base, self.base), str(op))
self.assertEqual(
op(self.lazy, self.base),
op(self.base, self.base), str(op))
self.assertEqual(
op(self.base, self.lazy),
op(self.base, self.base), str(op))
# Contains
self.assertTrue(2 in self.lazy)
self.assertFalse(42 in self.lazy)
def test_iops(self):
try:
iops = [isub, iand, ior, ixor]
except NameError:
return # Don't exist in older Python versions.
for op in iops:
# Mutating operators, so make fresh copies.
lazy = LazySet(self.base)
base = self.base.copy()
op(lazy, set([1]))
op(base, set([1]))
self.assertEqual(lazy, base, str(op))
def test_bool(self):
self.assertTrue(bool(self.lazy))
self.assertFalse(bool(LazySet()))
self.assertFalse(bool(LazySet(iter([]))))
def test_hash(self):
self.assertRaises(TypeError, hash, self.lazy)
def test_isinstance(self):
self.assertTrue(isinstance(self.lazy, set))
def test_callable(self):
try:
callable
except NameError:
return # No longer exists with Python 3.
self.assertFalse(callable(self.lazy))
def test_add(self):
self.base.add('extra')
self.lazy.add('extra')
self.assertEqual(self.lazy, self.base)
def test_copy(self):
self.assertEqual(self.lazy.copy(), self.base)
def test_method_ops(self):
ops = [
'difference', 'intersection', 'isdisjoint',
'issubset', 'issuperset', 'symmetric_difference', 'union',
'difference_update', 'intersection_update',
'symmetric_difference_update', 'update']
for op in ops:
if not hasattr(set, op):
continue # Not in this version of Python.
# Make a copy, as some of the ops are mutating.
lazy = LazySet(set(self.base))
base = set(self.base)
self.assertEqual(
getattr(self.lazy, op)(set([1])),
getattr(self.base, op)(set([1])), op)
self.assertEqual(self.lazy, self.base, op)
def test_discard(self):
self.base.discard(1)
self.assertNotEqual(self.lazy, self.base)
self.lazy.discard(1)
self.assertEqual(self.lazy, self.base)
def test_pop(self):
self.assertEqual(self.lazy.pop(), self.base.pop())
self.assertEqual(self.lazy, self.base)
def test_remove(self):
self.base.remove(2)
self.lazy.remove(2)
self.assertEqual(self.lazy, self.base)
def test_clear(self):
self.lazy.clear()
self.assertEqual(self.lazy, set())
if __name__ == '__main__':
warnings.simplefilter("error") # Warnings should be fatal in tests.
unittest.main()

View file

@ -1,837 +0,0 @@
# -*- coding: ascii -*-
import sys, os, os.path
import unittest, doctest
try:
import cPickle as pickle
except ImportError:
import pickle
from datetime import datetime, time, timedelta, tzinfo
import warnings
if __name__ == '__main__':
# Only munge path if invoked as a script. Testrunners should have setup
# the paths already
sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, os.pardir)))
import pytz
from pytz import reference
from pytz.tzfile import _byte_string
from pytz.tzinfo import DstTzInfo, StaticTzInfo
# I test for expected version to ensure the correct version of pytz is
# actually being tested.
EXPECTED_VERSION='2016.3'
EXPECTED_OLSON_VERSION='2016c'
fmt = '%Y-%m-%d %H:%M:%S %Z%z'
NOTIME = timedelta(0)
# GMT is a tzinfo.StaticTzInfo--the class we primarily want to test--while
# UTC is reference implementation. They both have the same timezone meaning.
UTC = pytz.timezone('UTC')
GMT = pytz.timezone('GMT')
assert isinstance(GMT, StaticTzInfo), 'GMT is no longer a StaticTzInfo'
def prettydt(dt):
"""datetime as a string using a known format.
We don't use strftime as it doesn't handle years earlier than 1900
per http://bugs.python.org/issue1777412
"""
if dt.utcoffset() >= timedelta(0):
offset = '+%s' % (dt.utcoffset(),)
else:
offset = '-%s' % (-1 * dt.utcoffset(),)
return '%04d-%02d-%02d %02d:%02d:%02d %s %s' % (
dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.tzname(), offset)
try:
unicode
except NameError:
# Python 3.x doesn't have unicode(), making writing code
# for Python 2.3 and Python 3.x a pain.
unicode = str
class BasicTest(unittest.TestCase):
def testVersion(self):
# Ensuring the correct version of pytz has been loaded
self.assertEqual(EXPECTED_VERSION, pytz.__version__,
'Incorrect pytz version loaded. Import path is stuffed '
'or this test needs updating. (Wanted %s, got %s)'
% (EXPECTED_VERSION, pytz.__version__))
self.assertEqual(EXPECTED_OLSON_VERSION, pytz.OLSON_VERSION,
'Incorrect pytz version loaded. Import path is stuffed '
'or this test needs updating. (Wanted %s, got %s)'
% (EXPECTED_OLSON_VERSION, pytz.OLSON_VERSION))
def testGMT(self):
now = datetime.now(tz=GMT)
self.assertTrue(now.utcoffset() == NOTIME)
self.assertTrue(now.dst() == NOTIME)
self.assertTrue(now.timetuple() == now.utctimetuple())
self.assertTrue(now==now.replace(tzinfo=UTC))
def testReferenceUTC(self):
now = datetime.now(tz=UTC)
self.assertTrue(now.utcoffset() == NOTIME)
self.assertTrue(now.dst() == NOTIME)
self.assertTrue(now.timetuple() == now.utctimetuple())
def testUnknownOffsets(self):
# This tzinfo behavior is required to make
# datetime.time.{utcoffset, dst, tzname} work as documented.
dst_tz = pytz.timezone('US/Eastern')
# This information is not known when we don't have a date,
# so return None per API.
self.assertTrue(dst_tz.utcoffset(None) is None)
self.assertTrue(dst_tz.dst(None) is None)
# We don't know the abbreviation, but this is still a valid
# tzname per the Python documentation.
self.assertEqual(dst_tz.tzname(None), 'US/Eastern')
def clearCache(self):
pytz._tzinfo_cache.clear()
def testUnicodeTimezone(self):
# We need to ensure that cold lookups work for both Unicode
# and traditional strings, and that the desired singleton is
# returned.
self.clearCache()
eastern = pytz.timezone(unicode('US/Eastern'))
self.assertTrue(eastern is pytz.timezone('US/Eastern'))
self.clearCache()
eastern = pytz.timezone('US/Eastern')
self.assertTrue(eastern is pytz.timezone(unicode('US/Eastern')))
class PicklingTest(unittest.TestCase):
def _roundtrip_tzinfo(self, tz):
p = pickle.dumps(tz)
unpickled_tz = pickle.loads(p)
self.assertTrue(tz is unpickled_tz, '%s did not roundtrip' % tz.zone)
def _roundtrip_datetime(self, dt):
# Ensure that the tzinfo attached to a datetime instance
# is identical to the one returned. This is important for
# DST timezones, as some state is stored in the tzinfo.
tz = dt.tzinfo
p = pickle.dumps(dt)
unpickled_dt = pickle.loads(p)
unpickled_tz = unpickled_dt.tzinfo
self.assertTrue(tz is unpickled_tz, '%s did not roundtrip' % tz.zone)
def testDst(self):
tz = pytz.timezone('Europe/Amsterdam')
dt = datetime(2004, 2, 1, 0, 0, 0)
for localized_tz in tz._tzinfos.values():
self._roundtrip_tzinfo(localized_tz)
self._roundtrip_datetime(dt.replace(tzinfo=localized_tz))
def testRoundtrip(self):
dt = datetime(2004, 2, 1, 0, 0, 0)
for zone in pytz.all_timezones:
tz = pytz.timezone(zone)
self._roundtrip_tzinfo(tz)
def testDatabaseFixes(self):
# Hack the pickle to make it refer to a timezone abbreviation
# that does not match anything. The unpickler should be able
# to repair this case
tz = pytz.timezone('Australia/Melbourne')
p = pickle.dumps(tz)
tzname = tz._tzname
hacked_p = p.replace(_byte_string(tzname),
_byte_string('?'*len(tzname)))
self.assertNotEqual(p, hacked_p)
unpickled_tz = pickle.loads(hacked_p)
self.assertTrue(tz is unpickled_tz)
# Simulate a database correction. In this case, the incorrect
# data will continue to be used.
p = pickle.dumps(tz)
new_utcoffset = tz._utcoffset.seconds + 42
# Python 3 introduced a new pickle protocol where numbers are stored in
# hexadecimal representation. Here we extract the pickle
# representation of the number for the current Python version.
old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)
self.assertNotEqual(p, hacked_p)
unpickled_tz = pickle.loads(hacked_p)
self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
self.assertTrue(tz is not unpickled_tz)
def testOldPickles(self):
# Ensure that applications serializing pytz instances as pickles
# have no troubles upgrading to a new pytz release. These pickles
# where created with pytz2006j
east1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
"I0\nS'EST'\np3\ntRp4\n."
))
east2 = pytz.timezone('US/Eastern').localize(
datetime(2006, 1, 1)).tzinfo
self.assertTrue(east1 is east2)
# Confirm changes in name munging between 2006j and 2007c cause
# no problems.
pap1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
"\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
pap2 = pytz.timezone('America/Port-au-Prince').localize(
datetime(1910, 1, 1)).tzinfo
self.assertTrue(pap1 is pap2)
gmt1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
gmt2 = pytz.timezone('Etc/GMT+10')
self.assertTrue(gmt1 is gmt2)
class USEasternDSTStartTestCase(unittest.TestCase):
tzinfo = pytz.timezone('US/Eastern')
# 24 hours before DST changeover
transition_time = datetime(2002, 4, 7, 7, 0, 0, tzinfo=UTC)
# Increase for 'flexible' DST transitions due to 1 minute granularity
# of Python's datetime library
instant = timedelta(seconds=1)
# before transition
before = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
# after transition
after = {
'tzname': 'EDT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
def _test_tzname(self, utc_dt, wanted):
tzname = wanted['tzname']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(dt.tzname(), tzname,
'Expected %s as tzname for %s. Got %s' % (
tzname, str(utc_dt), dt.tzname()
)
)
def _test_utcoffset(self, utc_dt, wanted):
utcoffset = wanted['utcoffset']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(
dt.utcoffset(), wanted['utcoffset'],
'Expected %s as utcoffset for %s. Got %s' % (
utcoffset, utc_dt, dt.utcoffset()
)
)
def _test_dst(self, utc_dt, wanted):
dst = wanted['dst']
dt = utc_dt.astimezone(self.tzinfo)
self.assertEqual(dt.dst(),dst,
'Expected %s as dst for %s. Got %s' % (
dst, utc_dt, dt.dst()
)
)
def test_arithmetic(self):
utc_dt = self.transition_time
for days in range(-420, 720, 20):
delta = timedelta(days=days)
# Make sure we can get back where we started
dt = utc_dt.astimezone(self.tzinfo)
dt2 = dt + delta
dt2 = dt2 - delta
self.assertEqual(dt, dt2)
# Make sure arithmetic crossing DST boundaries ends
# up in the correct timezone after normalization
utc_plus_delta = (utc_dt + delta).astimezone(self.tzinfo)
local_plus_delta = self.tzinfo.normalize(dt + delta)
self.assertEqual(
prettydt(utc_plus_delta),
prettydt(local_plus_delta),
'Incorrect result for delta==%d days. Wanted %r. Got %r'%(
days,
prettydt(utc_plus_delta),
prettydt(local_plus_delta),
)
)
def _test_all(self, utc_dt, wanted):
self._test_utcoffset(utc_dt, wanted)
self._test_tzname(utc_dt, wanted)
self._test_dst(utc_dt, wanted)
def testDayBefore(self):
self._test_all(
self.transition_time - timedelta(days=1), self.before
)
def testTwoHoursBefore(self):
self._test_all(
self.transition_time - timedelta(hours=2), self.before
)
def testHourBefore(self):
self._test_all(
self.transition_time - timedelta(hours=1), self.before
)
def testInstantBefore(self):
self._test_all(
self.transition_time - self.instant, self.before
)
def testTransition(self):
self._test_all(
self.transition_time, self.after
)
def testInstantAfter(self):
self._test_all(
self.transition_time + self.instant, self.after
)
def testHourAfter(self):
self._test_all(
self.transition_time + timedelta(hours=1), self.after
)
def testTwoHoursAfter(self):
self._test_all(
self.transition_time + timedelta(hours=1), self.after
)
def testDayAfter(self):
self._test_all(
self.transition_time + timedelta(days=1), self.after
)
class USEasternDSTEndTestCase(USEasternDSTStartTestCase):
tzinfo = pytz.timezone('US/Eastern')
transition_time = datetime(2002, 10, 27, 6, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EDT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
class USEasternEPTStartTestCase(USEasternDSTStartTestCase):
transition_time = datetime(1945, 8, 14, 23, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EWT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EPT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
class USEasternEPTEndTestCase(USEasternDSTStartTestCase):
transition_time = datetime(1945, 9, 30, 6, 0, 0, tzinfo=UTC)
before = {
'tzname': 'EPT',
'utcoffset': timedelta(hours = -4),
'dst': timedelta(hours = 1),
}
after = {
'tzname': 'EST',
'utcoffset': timedelta(hours = -5),
'dst': timedelta(hours = 0),
}
class WarsawWMTEndTestCase(USEasternDSTStartTestCase):
# In 1915, Warsaw changed from Warsaw to Central European time.
# This involved the clocks being set backwards, causing a end-of-DST
# like situation without DST being involved.
tzinfo = pytz.timezone('Europe/Warsaw')
transition_time = datetime(1915, 8, 4, 22, 36, 0, tzinfo=UTC)
before = {
'tzname': 'WMT',
'utcoffset': timedelta(hours=1, minutes=24),
'dst': timedelta(0),
}
after = {
'tzname': 'CET',
'utcoffset': timedelta(hours=1),
'dst': timedelta(0),
}
class VilniusWMTEndTestCase(USEasternDSTStartTestCase):
# At the end of 1916, Vilnius changed timezones putting its clock
# forward by 11 minutes 35 seconds. Neither timezone was in DST mode.
tzinfo = pytz.timezone('Europe/Vilnius')
instant = timedelta(seconds=31)
transition_time = datetime(1916, 12, 31, 22, 36, 00, tzinfo=UTC)
before = {
'tzname': 'WMT',
'utcoffset': timedelta(hours=1, minutes=24),
'dst': timedelta(0),
}
after = {
'tzname': 'KMT',
'utcoffset': timedelta(hours=1, minutes=36), # Really 1:35:36
'dst': timedelta(0),
}
class VilniusCESTStartTestCase(USEasternDSTStartTestCase):
# In 1941, Vilnius changed from MSG to CEST, switching to summer
# time while simultaneously reducing its UTC offset by two hours,
# causing the clocks to go backwards for this summer time
# switchover.
tzinfo = pytz.timezone('Europe/Vilnius')
transition_time = datetime(1941, 6, 23, 21, 00, 00, tzinfo=UTC)
before = {
'tzname': 'MSK',
'utcoffset': timedelta(hours=3),
'dst': timedelta(0),
}
after = {
'tzname': 'CEST',
'utcoffset': timedelta(hours=2),
'dst': timedelta(hours=1),
}
class LondonHistoryStartTestCase(USEasternDSTStartTestCase):
# The first known timezone transition in London was in 1847 when
# clocks where synchronized to GMT. However, we currently only
# understand v1 format tzfile(5) files which does handle years
# this far in the past, so our earliest known transition is in
# 1916.
tzinfo = pytz.timezone('Europe/London')
# transition_time = datetime(1847, 12, 1, 1, 15, 00, tzinfo=UTC)
# before = {
# 'tzname': 'LMT',
# 'utcoffset': timedelta(minutes=-75),
# 'dst': timedelta(0),
# }
# after = {
# 'tzname': 'GMT',
# 'utcoffset': timedelta(0),
# 'dst': timedelta(0),
# }
transition_time = datetime(1916, 5, 21, 2, 00, 00, tzinfo=UTC)
before = {
'tzname': 'GMT',
'utcoffset': timedelta(0),
'dst': timedelta(0),
}
after = {
'tzname': 'BST',
'utcoffset': timedelta(hours=1),
'dst': timedelta(hours=1),
}
class LondonHistoryEndTestCase(USEasternDSTStartTestCase):
# Timezone switchovers are projected into the future, even
# though no official statements exist or could be believed even
# if they did exist. We currently only check the last known
# transition in 2037, as we are still using v1 format tzfile(5)
# files.
tzinfo = pytz.timezone('Europe/London')
# transition_time = datetime(2499, 10, 25, 1, 0, 0, tzinfo=UTC)
transition_time = datetime(2037, 10, 25, 1, 0, 0, tzinfo=UTC)
before = {
'tzname': 'BST',
'utcoffset': timedelta(hours=1),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'GMT',
'utcoffset': timedelta(0),
'dst': timedelta(0),
}
class NoumeaHistoryStartTestCase(USEasternDSTStartTestCase):
# Noumea adopted a whole hour offset in 1912. Previously
# it was 11 hours, 5 minutes and 48 seconds off UTC. However,
# due to limitations of the Python datetime library, we need
# to round that to 11 hours 6 minutes.
tzinfo = pytz.timezone('Pacific/Noumea')
transition_time = datetime(1912, 1, 12, 12, 54, 12, tzinfo=UTC)
before = {
'tzname': 'LMT',
'utcoffset': timedelta(hours=11, minutes=6),
'dst': timedelta(0),
}
after = {
'tzname': 'NCT',
'utcoffset': timedelta(hours=11),
'dst': timedelta(0),
}
class NoumeaDSTEndTestCase(USEasternDSTStartTestCase):
# Noumea dropped DST in 1997.
tzinfo = pytz.timezone('Pacific/Noumea')
transition_time = datetime(1997, 3, 1, 15, 00, 00, tzinfo=UTC)
before = {
'tzname': 'NCST',
'utcoffset': timedelta(hours=12),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'NCT',
'utcoffset': timedelta(hours=11),
'dst': timedelta(0),
}
class NoumeaNoMoreDSTTestCase(NoumeaDSTEndTestCase):
# Noumea dropped DST in 1997. Here we test that it stops occuring.
transition_time = (
NoumeaDSTEndTestCase.transition_time + timedelta(days=365*10))
before = NoumeaDSTEndTestCase.after
after = NoumeaDSTEndTestCase.after
class TahitiTestCase(USEasternDSTStartTestCase):
# Tahiti has had a single transition in its history.
tzinfo = pytz.timezone('Pacific/Tahiti')
transition_time = datetime(1912, 10, 1, 9, 58, 16, tzinfo=UTC)
before = {
'tzname': 'LMT',
'utcoffset': timedelta(hours=-9, minutes=-58),
'dst': timedelta(0),
}
after = {
'tzname': 'TAHT',
'utcoffset': timedelta(hours=-10),
'dst': timedelta(0),
}
class SamoaInternationalDateLineChange(USEasternDSTStartTestCase):
# At the end of 2011, Samoa will switch from being east of the
# international dateline to the west. There will be no Dec 30th
# 2011 and it will switch from UTC-10 to UTC+14.
tzinfo = pytz.timezone('Pacific/Apia')
transition_time = datetime(2011, 12, 30, 10, 0, 0, tzinfo=UTC)
before = {
'tzname': 'SDT',
'utcoffset': timedelta(hours=-10),
'dst': timedelta(hours=1),
}
after = {
'tzname': 'WSDT',
'utcoffset': timedelta(hours=14),
'dst': timedelta(hours=1),
}
class ReferenceUSEasternDSTStartTestCase(USEasternDSTStartTestCase):
tzinfo = reference.Eastern
def test_arithmetic(self):
# Reference implementation cannot handle this
pass
class ReferenceUSEasternDSTEndTestCase(USEasternDSTEndTestCase):
tzinfo = reference.Eastern
def testHourBefore(self):
# Python's datetime library has a bug, where the hour before
# a daylight saving transition is one hour out. For example,
# at the end of US/Eastern daylight saving time, 01:00 EST
# occurs twice (once at 05:00 UTC and once at 06:00 UTC),
# whereas the first should actually be 01:00 EDT.
# Note that this bug is by design - by accepting this ambiguity
# for one hour one hour per year, an is_dst flag on datetime.time
# became unnecessary.
self._test_all(
self.transition_time - timedelta(hours=1), self.after
)
def testInstantBefore(self):
self._test_all(
self.transition_time - timedelta(seconds=1), self.after
)
def test_arithmetic(self):
# Reference implementation cannot handle this
pass
class LocalTestCase(unittest.TestCase):
def testLocalize(self):
loc_tz = pytz.timezone('Europe/Amsterdam')
loc_time = loc_tz.localize(datetime(1930, 5, 10, 0, 0, 0))
# Actually +00:19:32, but Python datetime rounds this
self.assertEqual(loc_time.strftime('%Z%z'), 'AMT+0020')
loc_time = loc_tz.localize(datetime(1930, 5, 20, 0, 0, 0))
# Actually +00:19:32, but Python datetime rounds this
self.assertEqual(loc_time.strftime('%Z%z'), 'NST+0120')
loc_time = loc_tz.localize(datetime(1940, 5, 10, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'NET+0020')
loc_time = loc_tz.localize(datetime(1940, 5, 20, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')
loc_time = loc_tz.localize(datetime(2004, 2, 1, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')
loc_time = loc_tz.localize(datetime(2004, 4, 1, 0, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')
tz = pytz.timezone('Europe/Amsterdam')
loc_time = loc_tz.localize(datetime(1943, 3, 29, 1, 59, 59))
self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')
# Switch to US
loc_tz = pytz.timezone('US/Eastern')
# End of DST ambiguity check
loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')
loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
self.assertRaises(pytz.AmbiguousTimeError,
loc_tz.localize, datetime(1918, 10, 27, 1, 59, 59), is_dst=None
)
# Start of DST non-existent times
loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')
self.assertRaises(pytz.NonExistentTimeError,
loc_tz.localize, datetime(1918, 3, 31, 2, 0, 0), is_dst=None
)
# Weird changes - war time and peace time both is_dst==True
loc_time = loc_tz.localize(datetime(1942, 2, 9, 3, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'EWT-0400')
loc_time = loc_tz.localize(datetime(1945, 8, 14, 19, 0, 0))
self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')
loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=1)
self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')
loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=0)
self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')
# Weird changes - ambiguous time (end-of-DST like) but is_dst==False
for zonename, ambiguous_naive, expected in [
('Europe/Warsaw', datetime(1915, 8, 4, 23, 59, 59),
['1915-08-04 23:59:59 WMT+0124',
'1915-08-04 23:59:59 CET+0100']),
('Europe/Moscow', datetime(2014, 10, 26, 1, 30),
['2014-10-26 01:30:00 MSK+0400',
'2014-10-26 01:30:00 MSK+0300'])]:
loc_tz = pytz.timezone(zonename)
self.assertRaises(pytz.AmbiguousTimeError,
loc_tz.localize, ambiguous_naive, is_dst=None
)
# Also test non-boolean is_dst in the weird case
for dst in [True, timedelta(1), False, timedelta(0)]:
loc_time = loc_tz.localize(ambiguous_naive, is_dst=dst)
self.assertEqual(loc_time.strftime(fmt), expected[not dst])
def testNormalize(self):
tz = pytz.timezone('US/Eastern')
dt = datetime(2004, 4, 4, 7, 0, 0, tzinfo=UTC).astimezone(tz)
dt2 = dt - timedelta(minutes=10)
self.assertEqual(
dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'2004-04-04 02:50:00 EDT-0400'
)
dt2 = tz.normalize(dt2)
self.assertEqual(
dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'2004-04-04 01:50:00 EST-0500'
)
def testPartialMinuteOffsets(self):
# utcoffset in Amsterdam was not a whole minute until 1937
# However, we fudge this by rounding them, as the Python
# datetime library
tz = pytz.timezone('Europe/Amsterdam')
utc_dt = datetime(1914, 1, 1, 13, 40, 28, tzinfo=UTC) # correct
utc_dt = utc_dt.replace(second=0) # But we need to fudge it
loc_dt = utc_dt.astimezone(tz)
self.assertEqual(
loc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'1914-01-01 14:00:00 AMT+0020'
)
# And get back...
utc_dt = loc_dt.astimezone(UTC)
self.assertEqual(
utc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
'1914-01-01 13:40:00 UTC+0000'
)
def no_testCreateLocaltime(self):
# It would be nice if this worked, but it doesn't.
tz = pytz.timezone('Europe/Amsterdam')
dt = datetime(2004, 10, 31, 2, 0, 0, tzinfo=tz)
self.assertEqual(
dt.strftime(fmt),
'2004-10-31 02:00:00 CET+0100'
)
class CommonTimezonesTestCase(unittest.TestCase):
def test_bratislava(self):
# Bratislava is the default timezone for Slovakia, but our
# heuristics where not adding it to common_timezones. Ideally,
# common_timezones should be populated from zone.tab at runtime,
# but I'm hesitant to pay the startup cost as loading the list
# on demand whilst remaining backwards compatible seems
# difficult.
self.assertTrue('Europe/Bratislava' in pytz.common_timezones)
self.assertTrue('Europe/Bratislava' in pytz.common_timezones_set)
def test_us_eastern(self):
self.assertTrue('US/Eastern' in pytz.common_timezones)
self.assertTrue('US/Eastern' in pytz.common_timezones_set)
def test_belfast(self):
# Belfast uses London time.
self.assertTrue('Europe/Belfast' in pytz.all_timezones_set)
self.assertFalse('Europe/Belfast' in pytz.common_timezones)
self.assertFalse('Europe/Belfast' in pytz.common_timezones_set)
class BaseTzInfoTestCase:
'''Ensure UTC, StaticTzInfo and DstTzInfo work consistently.
These tests are run for each type of tzinfo.
'''
tz = None # override
tz_class = None # override
def test_expectedclass(self):
self.assertTrue(isinstance(self.tz, self.tz_class))
def test_fromutc(self):
# naive datetime.
dt1 = datetime(2011, 10, 31)
# localized datetime, same timezone.
dt2 = self.tz.localize(dt1)
# Both should give the same results. Note that the standard
# Python tzinfo.fromutc() only supports the second.
for dt in [dt1, dt2]:
loc_dt = self.tz.fromutc(dt)
loc_dt2 = pytz.utc.localize(dt1).astimezone(self.tz)
self.assertEqual(loc_dt, loc_dt2)
# localized datetime, different timezone.
new_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not new_tz)
dt3 = new_tz.localize(dt1)
self.assertRaises(ValueError, self.tz.fromutc, dt3)
def test_normalize(self):
other_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not other_tz)
dt = datetime(2012, 3, 26, 12, 0)
other_dt = other_tz.localize(dt)
local_dt = self.tz.normalize(other_dt)
self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
self.assertNotEqual(
local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
def test_astimezone(self):
other_tz = pytz.timezone('Europe/Paris')
self.assertTrue(self.tz is not other_tz)
dt = datetime(2012, 3, 26, 12, 0)
other_dt = other_tz.localize(dt)
local_dt = other_dt.astimezone(self.tz)
self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
self.assertNotEqual(
local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
class OptimizedUTCTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.utc
tz_class = tz.__class__
class LegacyUTCTestCase(unittest.TestCase, BaseTzInfoTestCase):
# Deprecated timezone, but useful for comparison tests.
tz = pytz.timezone('Etc/UTC')
tz_class = StaticTzInfo
class StaticTzInfoTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.timezone('GMT')
tz_class = StaticTzInfo
class DstTzInfoTestCase(unittest.TestCase, BaseTzInfoTestCase):
tz = pytz.timezone('Australia/Melbourne')
tz_class = DstTzInfo
def test_suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite('pytz'))
suite.addTest(doctest.DocTestSuite('pytz.tzinfo'))
import test_tzinfo
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_tzinfo))
return suite
if __name__ == '__main__':
warnings.simplefilter("error") # Warnings should be fatal in tests.
unittest.main(defaultTest='test_suite')

View file

@ -1,89 +0,0 @@
from __future__ import absolute_import
import unittest
import doctest
import sys
class NoExtensionTestSuite(unittest.TestSuite):
def run(self, result):
import simplejson
simplejson._toggle_speedups(False)
result = unittest.TestSuite.run(self, result)
simplejson._toggle_speedups(True)
return result
class TestMissingSpeedups(unittest.TestCase):
def runTest(self):
if hasattr(sys, 'pypy_translation_info'):
"PyPy doesn't need speedups! :)"
elif hasattr(self, 'skipTest'):
self.skipTest('_speedups.so is missing!')
def additional_tests(suite=None):
import simplejson
import simplejson.encoder
import simplejson.decoder
if suite is None:
suite = unittest.TestSuite()
for mod in (simplejson, simplejson.encoder, simplejson.decoder):
suite.addTest(doctest.DocTestSuite(mod))
suite.addTest(doctest.DocFileSuite('../../index.rst'))
return suite
def all_tests_suite():
def get_suite():
return additional_tests(
unittest.TestLoader().loadTestsFromNames([
'simplejson.tests.test_bitsize_int_as_string',
'simplejson.tests.test_bigint_as_string',
'simplejson.tests.test_check_circular',
'simplejson.tests.test_decode',
'simplejson.tests.test_default',
'simplejson.tests.test_dump',
'simplejson.tests.test_encode_basestring_ascii',
'simplejson.tests.test_encode_for_html',
'simplejson.tests.test_errors',
'simplejson.tests.test_fail',
'simplejson.tests.test_float',
'simplejson.tests.test_indent',
'simplejson.tests.test_pass1',
'simplejson.tests.test_pass2',
'simplejson.tests.test_pass3',
'simplejson.tests.test_recursion',
'simplejson.tests.test_scanstring',
'simplejson.tests.test_separators',
'simplejson.tests.test_speedups',
'simplejson.tests.test_unicode',
'simplejson.tests.test_decimal',
'simplejson.tests.test_tuple',
'simplejson.tests.test_namedtuple',
'simplejson.tests.test_tool',
'simplejson.tests.test_for_json',
'simplejson.tests.test_subclass',
]))
suite = get_suite()
import simplejson
if simplejson._import_c_make_encoder() is None:
suite.addTest(TestMissingSpeedups())
else:
suite = unittest.TestSuite([
suite,
NoExtensionTestSuite([get_suite()]),
])
return suite
def main():
runner = unittest.TextTestRunner(verbosity=1 + sys.argv.count('-v'))
suite = all_tests_suite()
raise SystemExit(not runner.run(suite).wasSuccessful())
if __name__ == '__main__':
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
main()

View file

@ -1,67 +0,0 @@
from unittest import TestCase
import simplejson as json
class TestBigintAsString(TestCase):
# Python 2.5, at least the one that ships on Mac OS X, calculates
# 2 ** 53 as 0! It manages to calculate 1 << 53 correctly.
values = [(200, 200),
((1 << 53) - 1, 9007199254740991),
((1 << 53), '9007199254740992'),
((1 << 53) + 1, '9007199254740993'),
(-100, -100),
((-1 << 53), '-9007199254740992'),
((-1 << 53) - 1, '-9007199254740993'),
((-1 << 53) + 1, -9007199254740991)]
options = (
{"bigint_as_string": True},
{"int_as_string_bitcount": 53}
)
def test_ints(self):
for opts in self.options:
for val, expect in self.values:
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
def test_lists(self):
for opts in self.options:
for val, expect in self.values:
val = [val, val]
expect = [expect, expect]
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
def test_dicts(self):
for opts in self.options:
for val, expect in self.values:
val = {'k': val}
expect = {'k': expect}
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
def test_dict_keys(self):
for opts in self.options:
for val, _ in self.values:
expect = {str(val): 'value'}
val = {val: 'value'}
self.assertEqual(
expect,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))

View file

@ -1,73 +0,0 @@
from unittest import TestCase
import simplejson as json
class TestBitSizeIntAsString(TestCase):
# Python 2.5, at least the one that ships on Mac OS X, calculates
# 2 ** 31 as 0! It manages to calculate 1 << 31 correctly.
values = [
(200, 200),
((1 << 31) - 1, (1 << 31) - 1),
((1 << 31), str(1 << 31)),
((1 << 31) + 1, str((1 << 31) + 1)),
(-100, -100),
((-1 << 31), str(-1 << 31)),
((-1 << 31) - 1, str((-1 << 31) - 1)),
((-1 << 31) + 1, (-1 << 31) + 1),
]
def test_invalid_counts(self):
for n in ['foo', -1, 0, 1.0]:
self.assertRaises(
TypeError,
json.dumps, 0, int_as_string_bitcount=n)
def test_ints_outside_range_fails(self):
self.assertNotEqual(
str(1 << 15),
json.loads(json.dumps(1 << 15, int_as_string_bitcount=16)),
)
def test_ints(self):
for val, expect in self.values:
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, int_as_string_bitcount=31)),
)
def test_lists(self):
for val, expect in self.values:
val = [val, val]
expect = [expect, expect]
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, int_as_string_bitcount=31)))
def test_dicts(self):
for val, expect in self.values:
val = {'k': val}
expect = {'k': expect}
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, int_as_string_bitcount=31)))
def test_dict_keys(self):
for val, _ in self.values:
expect = {str(val): 'value'}
val = {val: 'value'}
self.assertEqual(
expect,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, int_as_string_bitcount=31)))

View file

@ -1,30 +0,0 @@
from unittest import TestCase
import simplejson as json
def default_iterable(obj):
return list(obj)
class TestCheckCircular(TestCase):
def test_circular_dict(self):
dct = {}
dct['a'] = dct
self.assertRaises(ValueError, json.dumps, dct)
def test_circular_list(self):
lst = []
lst.append(lst)
self.assertRaises(ValueError, json.dumps, lst)
def test_circular_composite(self):
dct2 = {}
dct2['a'] = []
dct2['a'].append(dct2)
self.assertRaises(ValueError, json.dumps, dct2)
def test_circular_default(self):
json.dumps([set()], default=default_iterable)
self.assertRaises(TypeError, json.dumps, [set()])
def test_circular_off_default(self):
json.dumps([set()], default=default_iterable, check_circular=False)
self.assertRaises(TypeError, json.dumps, [set()], check_circular=False)

View file

@ -1,71 +0,0 @@
import decimal
from decimal import Decimal
from unittest import TestCase
from simplejson.compat import StringIO, reload_module
import simplejson as json
class TestDecimal(TestCase):
NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500"
def dumps(self, obj, **kw):
sio = StringIO()
json.dump(obj, sio, **kw)
res = json.dumps(obj, **kw)
self.assertEqual(res, sio.getvalue())
return res
def loads(self, s, **kw):
sio = StringIO(s)
res = json.loads(s, **kw)
self.assertEqual(res, json.load(sio, **kw))
return res
def test_decimal_encode(self):
for d in map(Decimal, self.NUMS):
self.assertEqual(self.dumps(d, use_decimal=True), str(d))
def test_decimal_decode(self):
for s in self.NUMS:
self.assertEqual(self.loads(s, parse_float=Decimal), Decimal(s))
def test_stringify_key(self):
for d in map(Decimal, self.NUMS):
v = {d: d}
self.assertEqual(
self.loads(
self.dumps(v, use_decimal=True), parse_float=Decimal),
{str(d): d})
def test_decimal_roundtrip(self):
for d in map(Decimal, self.NUMS):
# The type might not be the same (int and Decimal) but they
# should still compare equal.
for v in [d, [d], {'': d}]:
self.assertEqual(
self.loads(
self.dumps(v, use_decimal=True), parse_float=Decimal),
v)
def test_decimal_defaults(self):
d = Decimal('1.1')
# use_decimal=True is the default
self.assertRaises(TypeError, json.dumps, d, use_decimal=False)
self.assertEqual('1.1', json.dumps(d))
self.assertEqual('1.1', json.dumps(d, use_decimal=True))
self.assertRaises(TypeError, json.dump, d, StringIO(),
use_decimal=False)
sio = StringIO()
json.dump(d, sio)
self.assertEqual('1.1', sio.getvalue())
sio = StringIO()
json.dump(d, sio, use_decimal=True)
self.assertEqual('1.1', sio.getvalue())
def test_decimal_reload(self):
# Simulate a subinterpreter that reloads the Python modules but not
# the C code https://github.com/simplejson/simplejson/issues/34
global Decimal
Decimal = reload_module(decimal).Decimal
import simplejson.encoder
simplejson.encoder.Decimal = Decimal
self.test_decimal_roundtrip()

View file

@ -1,99 +0,0 @@
from __future__ import absolute_import
import decimal
from unittest import TestCase
import simplejson as json
from simplejson.compat import StringIO
from simplejson import OrderedDict
class TestDecode(TestCase):
if not hasattr(TestCase, 'assertIs'):
def assertIs(self, a, b):
self.assertTrue(a is b, '%r is %r' % (a, b))
def test_decimal(self):
rval = json.loads('1.1', parse_float=decimal.Decimal)
self.assertTrue(isinstance(rval, decimal.Decimal))
self.assertEqual(rval, decimal.Decimal('1.1'))
def test_float(self):
rval = json.loads('1', parse_int=float)
self.assertTrue(isinstance(rval, float))
self.assertEqual(rval, 1.0)
def test_decoder_optimizations(self):
# Several optimizations were made that skip over calls to
# the whitespace regex, so this test is designed to try and
# exercise the uncommon cases. The array cases are already covered.
rval = json.loads('{ "key" : "value" , "k":"v" }')
self.assertEqual(rval, {"key":"value", "k":"v"})
def test_empty_objects(self):
s = '{}'
self.assertEqual(json.loads(s), eval(s))
s = '[]'
self.assertEqual(json.loads(s), eval(s))
s = '""'
self.assertEqual(json.loads(s), eval(s))
def test_object_pairs_hook(self):
s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4),
("qrt", 5), ("pad", 6), ("hoy", 7)]
self.assertEqual(json.loads(s), eval(s))
self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
self.assertEqual(json.load(StringIO(s),
object_pairs_hook=lambda x: x), p)
od = json.loads(s, object_pairs_hook=OrderedDict)
self.assertEqual(od, OrderedDict(p))
self.assertEqual(type(od), OrderedDict)
# the object_pairs_hook takes priority over the object_hook
self.assertEqual(json.loads(s,
object_pairs_hook=OrderedDict,
object_hook=lambda x: None),
OrderedDict(p))
def check_keys_reuse(self, source, loads):
rval = loads(source)
(a, b), (c, d) = sorted(rval[0]), sorted(rval[1])
self.assertIs(a, c)
self.assertIs(b, d)
def test_keys_reuse_str(self):
s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'.encode('utf8')
self.check_keys_reuse(s, json.loads)
def test_keys_reuse_unicode(self):
s = u'[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'
self.check_keys_reuse(s, json.loads)
def test_empty_strings(self):
self.assertEqual(json.loads('""'), "")
self.assertEqual(json.loads(u'""'), u"")
self.assertEqual(json.loads('[""]'), [""])
self.assertEqual(json.loads(u'[""]'), [u""])
def test_raw_decode(self):
cls = json.decoder.JSONDecoder
self.assertEqual(
({'a': {}}, 9),
cls().raw_decode("{\"a\": {}}"))
# http://code.google.com/p/simplejson/issues/detail?id=85
self.assertEqual(
({'a': {}}, 9),
cls(object_pairs_hook=dict).raw_decode("{\"a\": {}}"))
# https://github.com/simplejson/simplejson/pull/38
self.assertEqual(
({'a': {}}, 11),
cls().raw_decode(" \n{\"a\": {}}"))
def test_bounds_checking(self):
# https://github.com/simplejson/simplejson/issues/98
j = json.decoder.JSONDecoder()
for i in [4, 5, 6, -1, -2, -3, -4, -5, -6]:
self.assertRaises(ValueError, j.scan_once, '1234', i)
self.assertRaises(ValueError, j.raw_decode, '1234', i)
x, y = sorted(['128931233', '472389423'], key=id)
diff = id(x) - id(y)
self.assertRaises(ValueError, j.scan_once, y, diff)
self.assertRaises(ValueError, j.raw_decode, y, i)

View file

@ -1,9 +0,0 @@
from unittest import TestCase
import simplejson as json
class TestDefault(TestCase):
def test_default(self):
self.assertEqual(
json.dumps(type, default=repr),
json.dumps(repr(type)))

View file

@ -1,130 +0,0 @@
from unittest import TestCase
from simplejson.compat import StringIO, long_type, b, binary_type, PY3
import simplejson as json
def as_text_type(s):
if PY3 and isinstance(s, binary_type):
return s.decode('ascii')
return s
class TestDump(TestCase):
def test_dump(self):
sio = StringIO()
json.dump({}, sio)
self.assertEqual(sio.getvalue(), '{}')
def test_constants(self):
for c in [None, True, False]:
self.assertTrue(json.loads(json.dumps(c)) is c)
self.assertTrue(json.loads(json.dumps([c]))[0] is c)
self.assertTrue(json.loads(json.dumps({'a': c}))['a'] is c)
def test_stringify_key(self):
items = [(b('bytes'), 'bytes'),
(1.0, '1.0'),
(10, '10'),
(True, 'true'),
(False, 'false'),
(None, 'null'),
(long_type(100), '100')]
for k, expect in items:
self.assertEqual(
json.loads(json.dumps({k: expect})),
{expect: expect})
self.assertEqual(
json.loads(json.dumps({k: expect}, sort_keys=True)),
{expect: expect})
self.assertRaises(TypeError, json.dumps, {json: 1})
for v in [{}, {'other': 1}, {b('derp'): 1, 'herp': 2}]:
for sort_keys in [False, True]:
v0 = dict(v)
v0[json] = 1
v1 = dict((as_text_type(key), val) for (key, val) in v.items())
self.assertEqual(
json.loads(json.dumps(v0, skipkeys=True, sort_keys=sort_keys)),
v1)
self.assertEqual(
json.loads(json.dumps({'': v0}, skipkeys=True, sort_keys=sort_keys)),
{'': v1})
self.assertEqual(
json.loads(json.dumps([v0], skipkeys=True, sort_keys=sort_keys)),
[v1])
def test_dumps(self):
self.assertEqual(json.dumps({}), '{}')
def test_encode_truefalse(self):
self.assertEqual(json.dumps(
{True: False, False: True}, sort_keys=True),
'{"false": true, "true": false}')
self.assertEqual(
json.dumps(
{2: 3.0,
4.0: long_type(5),
False: 1,
long_type(6): True,
"7": 0},
sort_keys=True),
'{"2": 3.0, "4.0": 5, "6": true, "7": 0, "false": 1}')
def test_ordered_dict(self):
# http://bugs.python.org/issue6105
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
s = json.dumps(json.OrderedDict(items))
self.assertEqual(
s,
'{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
def test_indent_unknown_type_acceptance(self):
"""
A test against the regression mentioned at `github issue 29`_.
The indent parameter should accept any type which pretends to be
an instance of int or long when it comes to being multiplied by
strings, even if it is not actually an int or long, for
backwards compatibility.
.. _github issue 29:
http://github.com/simplejson/simplejson/issue/29
"""
class AwesomeInt(object):
"""An awesome reimplementation of integers"""
def __init__(self, *args, **kwargs):
if len(args) > 0:
# [construct from literals, objects, etc.]
# ...
# Finally, if args[0] is an integer, store it
if isinstance(args[0], int):
self._int = args[0]
# [various methods]
def __mul__(self, other):
# [various ways to multiply AwesomeInt objects]
# ... finally, if the right-hand operand is not awesome enough,
# try to do a normal integer multiplication
if hasattr(self, '_int'):
return self._int * other
else:
raise NotImplementedError("To do non-awesome things with"
" this object, please construct it from an integer!")
s = json.dumps([0, 1, 2], indent=AwesomeInt(3))
self.assertEqual(s, '[\n 0,\n 1,\n 2\n]')
def test_accumulator(self):
# the C API uses an accumulator that collects after 100,000 appends
lst = [0] * 100000
self.assertEqual(json.loads(json.dumps(lst)), lst)
def test_sort_keys(self):
# https://github.com/simplejson/simplejson/issues/106
for num_keys in range(2, 32):
p = dict((str(x), x) for x in range(num_keys))
sio = StringIO()
json.dump(p, sio, sort_keys=True)
self.assertEqual(sio.getvalue(), json.dumps(p, sort_keys=True))
self.assertEqual(json.loads(sio.getvalue()), p)

View file

@ -1,47 +0,0 @@
from unittest import TestCase
import simplejson.encoder
from simplejson.compat import b
CASES = [
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
(u'controls', '"controls"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
(u' s p a c e d ', '" s p a c e d "'),
(u'\U0001d120', '"\\ud834\\udd20"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
]
class TestEncodeBaseStringAscii(TestCase):
def test_py_encode_basestring_ascii(self):
self._test_encode_basestring_ascii(simplejson.encoder.py_encode_basestring_ascii)
def test_c_encode_basestring_ascii(self):
if not simplejson.encoder.c_encode_basestring_ascii:
return
self._test_encode_basestring_ascii(simplejson.encoder.c_encode_basestring_ascii)
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
fname = encode_basestring_ascii.__name__
for input_string, expect in CASES:
result = encode_basestring_ascii(input_string)
#self.assertEqual(result, expect,
# '{0!r} != {1!r} for {2}({3!r})'.format(
# result, expect, fname, input_string))
self.assertEqual(result, expect,
'%r != %r for %s(%r)' % (result, expect, fname, input_string))
def test_sorted_dict(self):
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
s = simplejson.dumps(dict(items), sort_keys=True)
self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}')

View file

@ -1,30 +0,0 @@
import unittest
import simplejson as json
class TestEncodeForHTML(unittest.TestCase):
def setUp(self):
self.decoder = json.JSONDecoder()
self.encoder = json.JSONEncoderForHTML()
def test_basic_encode(self):
self.assertEqual(r'"\u0026"', self.encoder.encode('&'))
self.assertEqual(r'"\u003c"', self.encoder.encode('<'))
self.assertEqual(r'"\u003e"', self.encoder.encode('>'))
def test_basic_roundtrip(self):
for char in '&<>':
self.assertEqual(
char, self.decoder.decode(
self.encoder.encode(char)))
def test_prevent_script_breakout(self):
bad_string = '</script><script>alert("gotcha")</script>'
self.assertEqual(
r'"\u003c/script\u003e\u003cscript\u003e'
r'alert(\"gotcha\")\u003c/script\u003e"',
self.encoder.encode(bad_string))
self.assertEqual(
bad_string, self.decoder.decode(
self.encoder.encode(bad_string)))

View file

@ -1,51 +0,0 @@
import sys, pickle
from unittest import TestCase
import simplejson as json
from simplejson.compat import u, b
class TestErrors(TestCase):
def test_string_keys_error(self):
data = [{'a': 'A', 'b': (2, 4), 'c': 3.0, ('d',): 'D tuple'}]
self.assertRaises(TypeError, json.dumps, data)
def test_decode_error(self):
err = None
try:
json.loads('{}\na\nb')
except json.JSONDecodeError:
err = sys.exc_info()[1]
else:
self.fail('Expected JSONDecodeError')
self.assertEqual(err.lineno, 2)
self.assertEqual(err.colno, 1)
self.assertEqual(err.endlineno, 3)
self.assertEqual(err.endcolno, 2)
def test_scan_error(self):
err = None
for t in (u, b):
try:
json.loads(t('{"asdf": "'))
except json.JSONDecodeError:
err = sys.exc_info()[1]
else:
self.fail('Expected JSONDecodeError')
self.assertEqual(err.lineno, 1)
self.assertEqual(err.colno, 10)
def test_error_is_pickable(self):
err = None
try:
json.loads('{}\na\nb')
except json.JSONDecodeError:
err = sys.exc_info()[1]
else:
self.fail('Expected JSONDecodeError')
s = pickle.dumps(err)
e = pickle.loads(s)
self.assertEqual(err.msg, e.msg)
self.assertEqual(err.doc, e.doc)
self.assertEqual(err.pos, e.pos)
self.assertEqual(err.end, e.end)

View file

@ -1,176 +0,0 @@
import sys
from unittest import TestCase
import simplejson as json
# 2007-10-05
JSONDOCS = [
# http://json.org/JSON_checker/test/fail1.json
'"A JSON payload should be an object or array, not a string."',
# http://json.org/JSON_checker/test/fail2.json
'["Unclosed array"',
# http://json.org/JSON_checker/test/fail3.json
'{unquoted_key: "keys must be quoted"}',
# http://json.org/JSON_checker/test/fail4.json
'["extra comma",]',
# http://json.org/JSON_checker/test/fail5.json
'["double extra comma",,]',
# http://json.org/JSON_checker/test/fail6.json
'[ , "<-- missing value"]',
# http://json.org/JSON_checker/test/fail7.json
'["Comma after the close"],',
# http://json.org/JSON_checker/test/fail8.json
'["Extra close"]]',
# http://json.org/JSON_checker/test/fail9.json
'{"Extra comma": true,}',
# http://json.org/JSON_checker/test/fail10.json
'{"Extra value after close": true} "misplaced quoted value"',
# http://json.org/JSON_checker/test/fail11.json
'{"Illegal expression": 1 + 2}',
# http://json.org/JSON_checker/test/fail12.json
'{"Illegal invocation": alert()}',
# http://json.org/JSON_checker/test/fail13.json
'{"Numbers cannot have leading zeroes": 013}',
# http://json.org/JSON_checker/test/fail14.json
'{"Numbers cannot be hex": 0x14}',
# http://json.org/JSON_checker/test/fail15.json
'["Illegal backslash escape: \\x15"]',
# http://json.org/JSON_checker/test/fail16.json
'[\\naked]',
# http://json.org/JSON_checker/test/fail17.json
'["Illegal backslash escape: \\017"]',
# http://json.org/JSON_checker/test/fail18.json
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
# http://json.org/JSON_checker/test/fail19.json
'{"Missing colon" null}',
# http://json.org/JSON_checker/test/fail20.json
'{"Double colon":: null}',
# http://json.org/JSON_checker/test/fail21.json
'{"Comma instead of colon", null}',
# http://json.org/JSON_checker/test/fail22.json
'["Colon instead of comma": false]',
# http://json.org/JSON_checker/test/fail23.json
'["Bad value", truth]',
# http://json.org/JSON_checker/test/fail24.json
"['single quote']",
# http://json.org/JSON_checker/test/fail25.json
'["\ttab\tcharacter\tin\tstring\t"]',
# http://json.org/JSON_checker/test/fail26.json
'["tab\\ character\\ in\\ string\\ "]',
# http://json.org/JSON_checker/test/fail27.json
'["line\nbreak"]',
# http://json.org/JSON_checker/test/fail28.json
'["line\\\nbreak"]',
# http://json.org/JSON_checker/test/fail29.json
'[0e]',
# http://json.org/JSON_checker/test/fail30.json
'[0e+]',
# http://json.org/JSON_checker/test/fail31.json
'[0e+-1]',
# http://json.org/JSON_checker/test/fail32.json
'{"Comma instead if closing brace": true,',
# http://json.org/JSON_checker/test/fail33.json
'["mismatch"}',
# http://code.google.com/p/simplejson/issues/detail?id=3
u'["A\u001FZ control characters in string"]',
# misc based on coverage
'{',
'{]',
'{"foo": "bar"]',
'{"foo": "bar"',
'nul',
'nulx',
'-',
'-x',
'-e',
'-e0',
'-Infinite',
'-Inf',
'Infinit',
'Infinite',
'NaM',
'NuN',
'falsy',
'fal',
'trug',
'tru',
'1e',
'1ex',
'1e-',
'1e-x',
]
SKIPS = {
1: "why not have a string payload?",
18: "spec doesn't specify any nesting limitations",
}
class TestFail(TestCase):
def test_failures(self):
for idx, doc in enumerate(JSONDOCS):
idx = idx + 1
if idx in SKIPS:
json.loads(doc)
continue
try:
json.loads(doc)
except json.JSONDecodeError:
pass
else:
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
def test_array_decoder_issue46(self):
# http://code.google.com/p/simplejson/issues/detail?id=46
for doc in [u'[,]', '[,]']:
try:
json.loads(doc)
except json.JSONDecodeError:
e = sys.exc_info()[1]
self.assertEqual(e.pos, 1)
self.assertEqual(e.lineno, 1)
self.assertEqual(e.colno, 2)
except Exception:
e = sys.exc_info()[1]
self.fail("Unexpected exception raised %r %s" % (e, e))
else:
self.fail("Unexpected success parsing '[,]'")
def test_truncated_input(self):
test_cases = [
('', 'Expecting value', 0),
('[', "Expecting value or ']'", 1),
('[42', "Expecting ',' delimiter", 3),
('[42,', 'Expecting value', 4),
('["', 'Unterminated string starting at', 1),
('["spam', 'Unterminated string starting at', 1),
('["spam"', "Expecting ',' delimiter", 7),
('["spam",', 'Expecting value', 8),
('{', 'Expecting property name enclosed in double quotes', 1),
('{"', 'Unterminated string starting at', 1),
('{"spam', 'Unterminated string starting at', 1),
('{"spam"', "Expecting ':' delimiter", 7),
('{"spam":', 'Expecting value', 8),
('{"spam":42', "Expecting ',' delimiter", 10),
('{"spam":42,', 'Expecting property name enclosed in double quotes',
11),
('"', 'Unterminated string starting at', 0),
('"spam', 'Unterminated string starting at', 0),
('[,', "Expecting value", 1),
]
for data, msg, idx in test_cases:
try:
json.loads(data)
except json.JSONDecodeError:
e = sys.exc_info()[1]
self.assertEqual(
e.msg[:len(msg)],
msg,
"%r doesn't start with %r for %r" % (e.msg, msg, data))
self.assertEqual(
e.pos, idx,
"pos %r != %r for %r" % (e.pos, idx, data))
except Exception:
e = sys.exc_info()[1]
self.fail("Unexpected exception raised %r %s" % (e, e))
else:
self.fail("Unexpected success parsing '%r'" % (data,))

View file

@ -1,35 +0,0 @@
import math
from unittest import TestCase
from simplejson.compat import long_type, text_type
import simplejson as json
from simplejson.decoder import NaN, PosInf, NegInf
class TestFloat(TestCase):
def test_degenerates_allow(self):
for inf in (PosInf, NegInf):
self.assertEqual(json.loads(json.dumps(inf)), inf)
# Python 2.5 doesn't have math.isnan
nan = json.loads(json.dumps(NaN))
self.assertTrue((0 + nan) != nan)
def test_degenerates_ignore(self):
for f in (PosInf, NegInf, NaN):
self.assertEqual(json.loads(json.dumps(f, ignore_nan=True)), None)
def test_degenerates_deny(self):
for f in (PosInf, NegInf, NaN):
self.assertRaises(ValueError, json.dumps, f, allow_nan=False)
def test_floats(self):
for num in [1617161771.7650001, math.pi, math.pi**100,
math.pi**-100, 3.1]:
self.assertEqual(float(json.dumps(num)), num)
self.assertEqual(json.loads(json.dumps(num)), num)
self.assertEqual(json.loads(text_type(json.dumps(num))), num)
def test_ints(self):
for num in [1, long_type(1), 1<<32, 1<<64]:
self.assertEqual(json.dumps(num), str(num))
self.assertEqual(int(json.dumps(num)), num)
self.assertEqual(json.loads(json.dumps(num)), num)
self.assertEqual(json.loads(text_type(json.dumps(num))), num)

View file

@ -1,97 +0,0 @@
import unittest
import simplejson as json
class ForJson(object):
def for_json(self):
return {'for_json': 1}
class NestedForJson(object):
def for_json(self):
return {'nested': ForJson()}
class ForJsonList(object):
def for_json(self):
return ['list']
class DictForJson(dict):
def for_json(self):
return {'alpha': 1}
class ListForJson(list):
def for_json(self):
return ['list']
class TestForJson(unittest.TestCase):
def assertRoundTrip(self, obj, other, for_json=True):
if for_json is None:
# None will use the default
s = json.dumps(obj)
else:
s = json.dumps(obj, for_json=for_json)
self.assertEqual(
json.loads(s),
other)
def test_for_json_encodes_stand_alone_object(self):
self.assertRoundTrip(
ForJson(),
ForJson().for_json())
def test_for_json_encodes_object_nested_in_dict(self):
self.assertRoundTrip(
{'hooray': ForJson()},
{'hooray': ForJson().for_json()})
def test_for_json_encodes_object_nested_in_list_within_dict(self):
self.assertRoundTrip(
{'list': [0, ForJson(), 2, 3]},
{'list': [0, ForJson().for_json(), 2, 3]})
def test_for_json_encodes_object_nested_within_object(self):
self.assertRoundTrip(
NestedForJson(),
{'nested': {'for_json': 1}})
def test_for_json_encodes_list(self):
self.assertRoundTrip(
ForJsonList(),
ForJsonList().for_json())
def test_for_json_encodes_list_within_object(self):
self.assertRoundTrip(
{'nested': ForJsonList()},
{'nested': ForJsonList().for_json()})
def test_for_json_encodes_dict_subclass(self):
self.assertRoundTrip(
DictForJson(a=1),
DictForJson(a=1).for_json())
def test_for_json_encodes_list_subclass(self):
self.assertRoundTrip(
ListForJson(['l']),
ListForJson(['l']).for_json())
def test_for_json_ignored_if_not_true_with_dict_subclass(self):
for for_json in (None, False):
self.assertRoundTrip(
DictForJson(a=1),
{'a': 1},
for_json=for_json)
def test_for_json_ignored_if_not_true_with_list_subclass(self):
for for_json in (None, False):
self.assertRoundTrip(
ListForJson(['l']),
['l'],
for_json=for_json)
def test_raises_typeerror_if_for_json_not_true_with_object(self):
self.assertRaises(TypeError, json.dumps, ForJson())
self.assertRaises(TypeError, json.dumps, ForJson(), for_json=False)

View file

@ -1,86 +0,0 @@
from unittest import TestCase
import textwrap
import simplejson as json
from simplejson.compat import StringIO
class TestIndent(TestCase):
def test_indent(self):
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh',
'i-vhbjkhnth',
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
expect = textwrap.dedent("""\
[
\t[
\t\t"blorpie"
\t],
\t[
\t\t"whoops"
\t],
\t[],
\t"d-shtaeou",
\t"d-nthiouh",
\t"i-vhbjkhnth",
\t{
\t\t"nifty": 87
\t},
\t{
\t\t"field": "yes",
\t\t"morefield": false
\t}
]""")
d1 = json.dumps(h)
d2 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': '))
d3 = json.dumps(h, indent=' ', sort_keys=True, separators=(',', ': '))
d4 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': '))
h1 = json.loads(d1)
h2 = json.loads(d2)
h3 = json.loads(d3)
h4 = json.loads(d4)
self.assertEqual(h1, h)
self.assertEqual(h2, h)
self.assertEqual(h3, h)
self.assertEqual(h4, h)
self.assertEqual(d3, expect.replace('\t', ' '))
self.assertEqual(d4, expect.replace('\t', ' '))
# NOTE: Python 2.4 textwrap.dedent converts tabs to spaces,
# so the following is expected to fail. Python 2.4 is not a
# supported platform in simplejson 2.1.0+.
self.assertEqual(d2, expect)
def test_indent0(self):
h = {3: 1}
def check(indent, expected):
d1 = json.dumps(h, indent=indent)
self.assertEqual(d1, expected)
sio = StringIO()
json.dump(h, sio, indent=indent)
self.assertEqual(sio.getvalue(), expected)
# indent=0 should emit newlines
check(0, '{\n"3": 1\n}')
# indent=None is more compact
check(None, '{"3": 1}')
def test_separators(self):
lst = [1,2,3,4]
expect = '[\n1,\n2,\n3,\n4\n]'
expect_spaces = '[\n1, \n2, \n3, \n4\n]'
# Ensure that separators still works
self.assertEqual(
expect_spaces,
json.dumps(lst, indent=0, separators=(', ', ': ')))
# Force the new defaults
self.assertEqual(
expect,
json.dumps(lst, indent=0, separators=(',', ': ')))
# Added in 2.1.4
self.assertEqual(
expect,
json.dumps(lst, indent=0))

View file

@ -1,20 +0,0 @@
from unittest import TestCase
import simplejson as json
from operator import itemgetter
class TestItemSortKey(TestCase):
def test_simple_first(self):
a = {'a': 1, 'c': 5, 'jack': 'jill', 'pick': 'axe', 'array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'}
self.assertEqual(
'{"a": 1, "c": 5, "crate": "dog", "jack": "jill", "pick": "axe", "zeak": "oh", "array": [1, 5, 6, 9], "tuple": [83, 12, 3]}',
json.dumps(a, item_sort_key=json.simple_first))
def test_case(self):
a = {'a': 1, 'c': 5, 'Jack': 'jill', 'pick': 'axe', 'Array': [1, 5, 6, 9], 'tuple': (83, 12, 3), 'crate': 'dog', 'zeak': 'oh'}
self.assertEqual(
'{"Array": [1, 5, 6, 9], "Jack": "jill", "a": 1, "c": 5, "crate": "dog", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}',
json.dumps(a, item_sort_key=itemgetter(0)))
self.assertEqual(
'{"a": 1, "Array": [1, 5, 6, 9], "c": 5, "crate": "dog", "Jack": "jill", "pick": "axe", "tuple": [83, 12, 3], "zeak": "oh"}',
json.dumps(a, item_sort_key=lambda kv: kv[0].lower()))

View file

@ -1,31 +0,0 @@
import unittest
from StringIO import StringIO
import simplejson as json
def iter_dumps(obj, **kw):
return ''.join(json.JSONEncoder(**kw).iterencode(obj))
def sio_dump(obj, **kw):
sio = StringIO()
json.dumps(obj, **kw)
return sio.getvalue()
class TestIterable(unittest.TestCase):
def test_iterable(self):
l = [1, 2, 3]
for dumps in (json.dumps, iter_dumps, sio_dump):
expect = dumps(l)
default_expect = dumps(sum(l))
# Default is False
self.assertRaises(TypeError, dumps, iter(l))
self.assertRaises(TypeError, dumps, iter(l), iterable_as_array=False)
self.assertEqual(expect, dumps(iter(l), iterable_as_array=True))
# Ensure that the "default" gets called
self.assertEqual(default_expect, dumps(iter(l), default=sum))
self.assertEqual(default_expect, dumps(iter(l), iterable_as_array=False, default=sum))
# Ensure that the "default" does not get called
self.assertEqual(
default_expect,
dumps(iter(l), iterable_as_array=True, default=sum))

View file

@ -1,122 +0,0 @@
from __future__ import absolute_import
import unittest
import simplejson as json
from simplejson.compat import StringIO
try:
from collections import namedtuple
except ImportError:
class Value(tuple):
def __new__(cls, *args):
return tuple.__new__(cls, args)
def _asdict(self):
return {'value': self[0]}
class Point(tuple):
def __new__(cls, *args):
return tuple.__new__(cls, args)
def _asdict(self):
return {'x': self[0], 'y': self[1]}
else:
Value = namedtuple('Value', ['value'])
Point = namedtuple('Point', ['x', 'y'])
class DuckValue(object):
def __init__(self, *args):
self.value = Value(*args)
def _asdict(self):
return self.value._asdict()
class DuckPoint(object):
def __init__(self, *args):
self.point = Point(*args)
def _asdict(self):
return self.point._asdict()
class DeadDuck(object):
_asdict = None
class DeadDict(dict):
_asdict = None
CONSTRUCTORS = [
lambda v: v,
lambda v: [v],
lambda v: [{'key': v}],
]
class TestNamedTuple(unittest.TestCase):
def test_namedtuple_dumps(self):
for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
d = v._asdict()
self.assertEqual(d, json.loads(json.dumps(v)))
self.assertEqual(
d,
json.loads(json.dumps(v, namedtuple_as_object=True)))
self.assertEqual(d, json.loads(json.dumps(v, tuple_as_array=False)))
self.assertEqual(
d,
json.loads(json.dumps(v, namedtuple_as_object=True,
tuple_as_array=False)))
def test_namedtuple_dumps_false(self):
for v in [Value(1), Point(1, 2)]:
l = list(v)
self.assertEqual(
l,
json.loads(json.dumps(v, namedtuple_as_object=False)))
self.assertRaises(TypeError, json.dumps, v,
tuple_as_array=False, namedtuple_as_object=False)
def test_namedtuple_dump(self):
for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
d = v._asdict()
sio = StringIO()
json.dump(v, sio)
self.assertEqual(d, json.loads(sio.getvalue()))
sio = StringIO()
json.dump(v, sio, namedtuple_as_object=True)
self.assertEqual(
d,
json.loads(sio.getvalue()))
sio = StringIO()
json.dump(v, sio, tuple_as_array=False)
self.assertEqual(d, json.loads(sio.getvalue()))
sio = StringIO()
json.dump(v, sio, namedtuple_as_object=True,
tuple_as_array=False)
self.assertEqual(
d,
json.loads(sio.getvalue()))
def test_namedtuple_dump_false(self):
for v in [Value(1), Point(1, 2)]:
l = list(v)
sio = StringIO()
json.dump(v, sio, namedtuple_as_object=False)
self.assertEqual(
l,
json.loads(sio.getvalue()))
self.assertRaises(TypeError, json.dump, v, StringIO(),
tuple_as_array=False, namedtuple_as_object=False)
def test_asdict_not_callable_dump(self):
for f in CONSTRUCTORS:
self.assertRaises(TypeError,
json.dump, f(DeadDuck()), StringIO(), namedtuple_as_object=True)
sio = StringIO()
json.dump(f(DeadDict()), sio, namedtuple_as_object=True)
self.assertEqual(
json.dumps(f({})),
sio.getvalue())
def test_asdict_not_callable_dumps(self):
for f in CONSTRUCTORS:
self.assertRaises(TypeError,
json.dumps, f(DeadDuck()), namedtuple_as_object=True)
self.assertEqual(
json.dumps(f({})),
json.dumps(f(DeadDict()), namedtuple_as_object=True))

View file

@ -1,71 +0,0 @@
from unittest import TestCase
import simplejson as json
# from http://json.org/JSON_checker/test/pass1.json
JSON = r'''
[
"JSON Test Pattern pass1",
{"object with 1 member":["array with 1 element"]},
{},
[],
-42,
true,
false,
null,
{
"integer": 1234567890,
"real": -9876.543210,
"e": 0.123456789e-12,
"E": 1.234567890E+34,
"": 23456789012E66,
"zero": 0,
"one": 1,
"space": " ",
"quote": "\"",
"backslash": "\\",
"controls": "\b\f\n\r\t",
"slash": "/ & \/",
"alpha": "abcdefghijklmnopqrstuvwyz",
"ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
"digit": "0123456789",
"special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?",
"hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
"true": true,
"false": false,
"null": null,
"array":[ ],
"object":{ },
"address": "50 St. James Street",
"url": "http://www.JSON.org/",
"comment": "// /* <!-- --",
"# -- --> */": " ",
" s p a c e d " :[1,2 , 3
,
4 , 5 , 6 ,7 ],"compact": [1,2,3,4,5,6,7],
"jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
"quotes": "&#34; \u0022 %22 0x22 034 &#x22;",
"\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
: "A key can be any string"
},
0.5 ,98.6
,
99.44
,
1066,
1e1,
0.1e1,
1e-1,
1e00,2e+00,2e-00
,"rosebud"]
'''
class TestPass1(TestCase):
def test_parse(self):
# test in/out equivalence and parsing
res = json.loads(JSON)
out = json.dumps(res)
self.assertEqual(res, json.loads(out))

View file

@ -1,14 +0,0 @@
from unittest import TestCase
import simplejson as json
# from http://json.org/JSON_checker/test/pass2.json
JSON = r'''
[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]]
'''
class TestPass2(TestCase):
def test_parse(self):
# test in/out equivalence and parsing
res = json.loads(JSON)
out = json.dumps(res)
self.assertEqual(res, json.loads(out))

View file

@ -1,20 +0,0 @@
from unittest import TestCase
import simplejson as json
# from http://json.org/JSON_checker/test/pass3.json
JSON = r'''
{
"JSON Test Pattern pass3": {
"The outermost value": "must be an object or array.",
"In this test": "It is an object."
}
}
'''
class TestPass3(TestCase):
def test_parse(self):
# test in/out equivalence and parsing
res = json.loads(JSON)
out = json.dumps(res)
self.assertEqual(res, json.loads(out))

View file

@ -1,67 +0,0 @@
from unittest import TestCase
import simplejson as json
class JSONTestObject:
pass
class RecursiveJSONEncoder(json.JSONEncoder):
recurse = False
def default(self, o):
if o is JSONTestObject:
if self.recurse:
return [JSONTestObject]
else:
return 'JSONTestObject'
return json.JSONEncoder.default(o)
class TestRecursion(TestCase):
def test_listrecursion(self):
x = []
x.append(x)
try:
json.dumps(x)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on list recursion")
x = []
y = [x]
x.append(y)
try:
json.dumps(x)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on alternating list recursion")
y = []
x = [y, y]
# ensure that the marker is cleared
json.dumps(x)
def test_dictrecursion(self):
x = {}
x["test"] = x
try:
json.dumps(x)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on dict recursion")
x = {}
y = {"a": x, "b": x}
# ensure that the marker is cleared
json.dumps(y)
def test_defaultrecursion(self):
enc = RecursiveJSONEncoder()
self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"')
enc.recurse = True
try:
enc.encode(JSONTestObject)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on default recursion")

View file

@ -1,194 +0,0 @@
import sys
from unittest import TestCase
import simplejson as json
import simplejson.decoder
from simplejson.compat import b, PY3
class TestScanString(TestCase):
# The bytes type is intentionally not used in most of these tests
# under Python 3 because the decoder immediately coerces to str before
# calling scanstring. In Python 2 we are testing the code paths
# for both unicode and str.
#
# The reason this is done is because Python 3 would require
# entirely different code paths for parsing bytes and str.
#
def test_py_scanstring(self):
self._test_scanstring(simplejson.decoder.py_scanstring)
def test_c_scanstring(self):
if not simplejson.decoder.c_scanstring:
return
self._test_scanstring(simplejson.decoder.c_scanstring)
def _test_scanstring(self, scanstring):
if sys.maxunicode == 65535:
self.assertEqual(
scanstring(u'"z\U0001d120x"', 1, None, True),
(u'z\U0001d120x', 6))
else:
self.assertEqual(
scanstring(u'"z\U0001d120x"', 1, None, True),
(u'z\U0001d120x', 5))
self.assertEqual(
scanstring('"\\u007b"', 1, None, True),
(u'{', 8))
self.assertEqual(
scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True),
(u'A JSON payload should be an object or array, not a string.', 60))
self.assertEqual(
scanstring('["Unclosed array"', 2, None, True),
(u'Unclosed array', 17))
self.assertEqual(
scanstring('["extra comma",]', 2, None, True),
(u'extra comma', 14))
self.assertEqual(
scanstring('["double extra comma",,]', 2, None, True),
(u'double extra comma', 21))
self.assertEqual(
scanstring('["Comma after the close"],', 2, None, True),
(u'Comma after the close', 24))
self.assertEqual(
scanstring('["Extra close"]]', 2, None, True),
(u'Extra close', 14))
self.assertEqual(
scanstring('{"Extra comma": true,}', 2, None, True),
(u'Extra comma', 14))
self.assertEqual(
scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True),
(u'Extra value after close', 26))
self.assertEqual(
scanstring('{"Illegal expression": 1 + 2}', 2, None, True),
(u'Illegal expression', 21))
self.assertEqual(
scanstring('{"Illegal invocation": alert()}', 2, None, True),
(u'Illegal invocation', 21))
self.assertEqual(
scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True),
(u'Numbers cannot have leading zeroes', 37))
self.assertEqual(
scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True),
(u'Numbers cannot be hex', 24))
self.assertEqual(
scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True),
(u'Too deep', 30))
self.assertEqual(
scanstring('{"Missing colon" null}', 2, None, True),
(u'Missing colon', 16))
self.assertEqual(
scanstring('{"Double colon":: null}', 2, None, True),
(u'Double colon', 15))
self.assertEqual(
scanstring('{"Comma instead of colon", null}', 2, None, True),
(u'Comma instead of colon', 25))
self.assertEqual(
scanstring('["Colon instead of comma": false]', 2, None, True),
(u'Colon instead of comma', 25))
self.assertEqual(
scanstring('["Bad value", truth]', 2, None, True),
(u'Bad value', 12))
for c in map(chr, range(0x00, 0x1f)):
self.assertEqual(
scanstring(c + '"', 0, None, False),
(c, 2))
self.assertRaises(
ValueError,
scanstring, c + '"', 0, None, True)
self.assertRaises(ValueError, scanstring, '', 0, None, True)
self.assertRaises(ValueError, scanstring, 'a', 0, None, True)
self.assertRaises(ValueError, scanstring, '\\', 0, None, True)
self.assertRaises(ValueError, scanstring, '\\u', 0, None, True)
self.assertRaises(ValueError, scanstring, '\\u0', 0, None, True)
self.assertRaises(ValueError, scanstring, '\\u01', 0, None, True)
self.assertRaises(ValueError, scanstring, '\\u012', 0, None, True)
self.assertRaises(ValueError, scanstring, '\\u0123', 0, None, True)
if sys.maxunicode > 65535:
self.assertRaises(ValueError,
scanstring, '\\ud834\\u"', 0, None, True)
self.assertRaises(ValueError,
scanstring, '\\ud834\\x0123"', 0, None, True)
def test_issue3623(self):
self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1,
"xxx")
self.assertRaises(UnicodeDecodeError,
json.encoder.encode_basestring_ascii, b("xx\xff"))
def test_overflow(self):
# Python 2.5 does not have maxsize, Python 3 does not have maxint
maxsize = getattr(sys, 'maxsize', getattr(sys, 'maxint', None))
assert maxsize is not None
self.assertRaises(OverflowError, json.decoder.scanstring, "xxx",
maxsize + 1)
def test_surrogates(self):
scanstring = json.decoder.scanstring
def assertScan(given, expect, test_utf8=True):
givens = [given]
if not PY3 and test_utf8:
givens.append(given.encode('utf8'))
for given in givens:
(res, count) = scanstring(given, 1, None, True)
self.assertEqual(len(given), count)
self.assertEqual(res, expect)
assertScan(
u'"z\\ud834\\u0079x"',
u'z\ud834yx')
assertScan(
u'"z\\ud834\\udd20x"',
u'z\U0001d120x')
assertScan(
u'"z\\ud834\\ud834\\udd20x"',
u'z\ud834\U0001d120x')
assertScan(
u'"z\\ud834x"',
u'z\ud834x')
assertScan(
u'"z\\udd20x"',
u'z\udd20x')
assertScan(
u'"z\ud834x"',
u'z\ud834x')
# It may look strange to join strings together, but Python is drunk.
# https://gist.github.com/etrepum/5538443
assertScan(
u'"z\\ud834\udd20x12345"',
u''.join([u'z\ud834', u'\udd20x12345']))
assertScan(
u'"z\ud834\\udd20x"',
u''.join([u'z\ud834', u'\udd20x']))
# these have different behavior given UTF8 input, because the surrogate
# pair may be joined (in maxunicode > 65535 builds)
assertScan(
u''.join([u'"z\ud834', u'\udd20x"']),
u''.join([u'z\ud834', u'\udd20x']),
test_utf8=False)
self.assertRaises(ValueError,
scanstring, u'"z\\ud83x"', 1, None, True)
self.assertRaises(ValueError,
scanstring, u'"z\\ud834\\udd2x"', 1, None, True)

View file

@ -1,42 +0,0 @@
import textwrap
from unittest import TestCase
import simplejson as json
class TestSeparators(TestCase):
def test_separators(self):
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
expect = textwrap.dedent("""\
[
[
"blorpie"
] ,
[
"whoops"
] ,
[] ,
"d-shtaeou" ,
"d-nthiouh" ,
"i-vhbjkhnth" ,
{
"nifty" : 87
} ,
{
"field" : "yes" ,
"morefield" : false
}
]""")
d1 = json.dumps(h)
d2 = json.dumps(h, indent=' ', sort_keys=True, separators=(' ,', ' : '))
h1 = json.loads(d1)
h2 = json.loads(d2)
self.assertEqual(h1, h)
self.assertEqual(h2, h)
self.assertEqual(d2, expect)

View file

@ -1,39 +0,0 @@
import sys
import unittest
from unittest import TestCase
from simplejson import encoder, scanner
def has_speedups():
return encoder.c_make_encoder is not None
def skip_if_speedups_missing(func):
def wrapper(*args, **kwargs):
if not has_speedups():
if hasattr(unittest, 'SkipTest'):
raise unittest.SkipTest("C Extension not available")
else:
sys.stdout.write("C Extension not available")
return
return func(*args, **kwargs)
return wrapper
class TestDecode(TestCase):
@skip_if_speedups_missing
def test_make_scanner(self):
self.assertRaises(AttributeError, scanner.c_make_scanner, 1)
@skip_if_speedups_missing
def test_make_encoder(self):
self.assertRaises(
TypeError,
encoder.c_make_encoder,
None,
("\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7"
"\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75"),
None
)

View file

@ -1,37 +0,0 @@
from unittest import TestCase
import simplejson as json
from decimal import Decimal
class AlternateInt(int):
def __repr__(self):
return 'invalid json'
__str__ = __repr__
class AlternateFloat(float):
def __repr__(self):
return 'invalid json'
__str__ = __repr__
# class AlternateDecimal(Decimal):
# def __repr__(self):
# return 'invalid json'
class TestSubclass(TestCase):
def test_int(self):
self.assertEqual(json.dumps(AlternateInt(1)), '1')
self.assertEqual(json.dumps(AlternateInt(-1)), '-1')
self.assertEqual(json.loads(json.dumps({AlternateInt(1): 1})), {'1': 1})
def test_float(self):
self.assertEqual(json.dumps(AlternateFloat(1.0)), '1.0')
self.assertEqual(json.dumps(AlternateFloat(-1.0)), '-1.0')
self.assertEqual(json.loads(json.dumps({AlternateFloat(1.0): 1})), {'1.0': 1})
# NOTE: Decimal subclasses are not supported as-is
# def test_decimal(self):
# self.assertEqual(json.dumps(AlternateDecimal('1.0')), '1.0')
# self.assertEqual(json.dumps(AlternateDecimal('-1.0')), '-1.0')

View file

@ -1,97 +0,0 @@
from __future__ import with_statement
import os
import sys
import textwrap
import unittest
import subprocess
import tempfile
try:
# Python 3.x
from test.support import strip_python_stderr
except ImportError:
# Python 2.6+
try:
from test.test_support import strip_python_stderr
except ImportError:
# Python 2.5
import re
def strip_python_stderr(stderr):
return re.sub(
r"\[\d+ refs\]\r?\n?$".encode(),
"".encode(),
stderr).strip()
class TestTool(unittest.TestCase):
data = """
[["blorpie"],[ "whoops" ] , [
],\t"d-shtaeou",\r"d-nthiouh",
"i-vhbjkhnth", {"nifty":87}, {"morefield" :\tfalse,"field"
:"yes"} ]
"""
expect = textwrap.dedent("""\
[
[
"blorpie"
],
[
"whoops"
],
[],
"d-shtaeou",
"d-nthiouh",
"i-vhbjkhnth",
{
"nifty": 87
},
{
"field": "yes",
"morefield": false
}
]
""")
def runTool(self, args=None, data=None):
argv = [sys.executable, '-m', 'simplejson.tool']
if args:
argv.extend(args)
proc = subprocess.Popen(argv,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = proc.communicate(data)
self.assertEqual(strip_python_stderr(err), ''.encode())
self.assertEqual(proc.returncode, 0)
return out
def test_stdin_stdout(self):
self.assertEqual(
self.runTool(data=self.data.encode()),
self.expect.encode())
def test_infile_stdout(self):
with tempfile.NamedTemporaryFile() as infile:
infile.write(self.data.encode())
infile.flush()
self.assertEqual(
self.runTool(args=[infile.name]),
self.expect.encode())
def test_infile_outfile(self):
with tempfile.NamedTemporaryFile() as infile:
infile.write(self.data.encode())
infile.flush()
# outfile will get overwritten by tool, so the delete
# may not work on some platforms. Do it manually.
outfile = tempfile.NamedTemporaryFile()
try:
self.assertEqual(
self.runTool(args=[infile.name, outfile.name]),
''.encode())
with open(outfile.name, 'rb') as f:
self.assertEqual(f.read(), self.expect.encode())
finally:
outfile.close()
if os.path.exists(outfile.name):
os.unlink(outfile.name)

View file

@ -1,47 +0,0 @@
import unittest
from simplejson.compat import StringIO
import simplejson as json
class TestTuples(unittest.TestCase):
def test_tuple_array_dumps(self):
t = (1, 2, 3)
expect = json.dumps(list(t))
# Default is True
self.assertEqual(expect, json.dumps(t))
self.assertEqual(expect, json.dumps(t, tuple_as_array=True))
self.assertRaises(TypeError, json.dumps, t, tuple_as_array=False)
# Ensure that the "default" does not get called
self.assertEqual(expect, json.dumps(t, default=repr))
self.assertEqual(expect, json.dumps(t, tuple_as_array=True,
default=repr))
# Ensure that the "default" gets called
self.assertEqual(
json.dumps(repr(t)),
json.dumps(t, tuple_as_array=False, default=repr))
def test_tuple_array_dump(self):
t = (1, 2, 3)
expect = json.dumps(list(t))
# Default is True
sio = StringIO()
json.dump(t, sio)
self.assertEqual(expect, sio.getvalue())
sio = StringIO()
json.dump(t, sio, tuple_as_array=True)
self.assertEqual(expect, sio.getvalue())
self.assertRaises(TypeError, json.dump, t, StringIO(),
tuple_as_array=False)
# Ensure that the "default" does not get called
sio = StringIO()
json.dump(t, sio, default=repr)
self.assertEqual(expect, sio.getvalue())
sio = StringIO()
json.dump(t, sio, tuple_as_array=True, default=repr)
self.assertEqual(expect, sio.getvalue())
# Ensure that the "default" gets called
sio = StringIO()
json.dump(t, sio, tuple_as_array=False, default=repr)
self.assertEqual(
json.dumps(repr(t)),
sio.getvalue())

View file

@ -1,153 +0,0 @@
import sys
import codecs
from unittest import TestCase
import simplejson as json
from simplejson.compat import unichr, text_type, b, u, BytesIO
class TestUnicode(TestCase):
def test_encoding1(self):
encoder = json.JSONEncoder(encoding='utf-8')
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
s = u.encode('utf-8')
ju = encoder.encode(u)
js = encoder.encode(s)
self.assertEqual(ju, js)
def test_encoding2(self):
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
s = u.encode('utf-8')
ju = json.dumps(u, encoding='utf-8')
js = json.dumps(s, encoding='utf-8')
self.assertEqual(ju, js)
def test_encoding3(self):
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps(u)
self.assertEqual(j, '"\\u03b1\\u03a9"')
def test_encoding4(self):
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps([u])
self.assertEqual(j, '["\\u03b1\\u03a9"]')
def test_encoding5(self):
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps(u, ensure_ascii=False)
self.assertEqual(j, u'"' + u + u'"')
def test_encoding6(self):
u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = json.dumps([u], ensure_ascii=False)
self.assertEqual(j, u'["' + u + u'"]')
def test_big_unicode_encode(self):
u = u'\U0001d120'
self.assertEqual(json.dumps(u), '"\\ud834\\udd20"')
self.assertEqual(json.dumps(u, ensure_ascii=False), u'"\U0001d120"')
def test_big_unicode_decode(self):
u = u'z\U0001d120x'
self.assertEqual(json.loads('"' + u + '"'), u)
self.assertEqual(json.loads('"z\\ud834\\udd20x"'), u)
def test_unicode_decode(self):
for i in range(0, 0xd7ff):
u = unichr(i)
#s = '"\\u{0:04x}"'.format(i)
s = '"\\u%04x"' % (i,)
self.assertEqual(json.loads(s), u)
def test_object_pairs_hook_with_unicode(self):
s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4),
(u"qrt", 5), (u"pad", 6), (u"hoy", 7)]
self.assertEqual(json.loads(s), eval(s))
self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p)
od = json.loads(s, object_pairs_hook=json.OrderedDict)
self.assertEqual(od, json.OrderedDict(p))
self.assertEqual(type(od), json.OrderedDict)
# the object_pairs_hook takes priority over the object_hook
self.assertEqual(json.loads(s,
object_pairs_hook=json.OrderedDict,
object_hook=lambda x: None),
json.OrderedDict(p))
def test_default_encoding(self):
self.assertEqual(json.loads(u'{"a": "\xe9"}'.encode('utf-8')),
{'a': u'\xe9'})
def test_unicode_preservation(self):
self.assertEqual(type(json.loads(u'""')), text_type)
self.assertEqual(type(json.loads(u'"a"')), text_type)
self.assertEqual(type(json.loads(u'["a"]')[0]), text_type)
def test_ensure_ascii_false_returns_unicode(self):
# http://code.google.com/p/simplejson/issues/detail?id=48
self.assertEqual(type(json.dumps([], ensure_ascii=False)), text_type)
self.assertEqual(type(json.dumps(0, ensure_ascii=False)), text_type)
self.assertEqual(type(json.dumps({}, ensure_ascii=False)), text_type)
self.assertEqual(type(json.dumps("", ensure_ascii=False)), text_type)
def test_ensure_ascii_false_bytestring_encoding(self):
# http://code.google.com/p/simplejson/issues/detail?id=48
doc1 = {u'quux': b('Arr\xc3\xaat sur images')}
doc2 = {u'quux': u('Arr\xeat sur images')}
doc_ascii = '{"quux": "Arr\\u00eat sur images"}'
doc_unicode = u'{"quux": "Arr\xeat sur images"}'
self.assertEqual(json.dumps(doc1), doc_ascii)
self.assertEqual(json.dumps(doc2), doc_ascii)
self.assertEqual(json.dumps(doc1, ensure_ascii=False), doc_unicode)
self.assertEqual(json.dumps(doc2, ensure_ascii=False), doc_unicode)
def test_ensure_ascii_linebreak_encoding(self):
# http://timelessrepo.com/json-isnt-a-javascript-subset
s1 = u'\u2029\u2028'
s2 = s1.encode('utf8')
expect = '"\\u2029\\u2028"'
self.assertEqual(json.dumps(s1), expect)
self.assertEqual(json.dumps(s2), expect)
self.assertEqual(json.dumps(s1, ensure_ascii=False), expect)
self.assertEqual(json.dumps(s2, ensure_ascii=False), expect)
def test_invalid_escape_sequences(self):
# incomplete escape sequence
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1234')
# invalid escape sequence
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123x"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12x4"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1x34"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ux234"')
if sys.maxunicode > 65535:
# invalid escape sequence for low surrogate
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000x"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00x0"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0x00"')
self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\ux000"')
def test_ensure_ascii_still_works(self):
# in the ascii range, ensure that everything is the same
for c in map(unichr, range(0, 127)):
self.assertEqual(
json.dumps(c, ensure_ascii=False),
json.dumps(c))
snowman = u'\N{SNOWMAN}'
self.assertEqual(
json.dumps(c, ensure_ascii=False),
'"' + c + '"')
def test_strip_bom(self):
content = u"\u3053\u3093\u306b\u3061\u308f"
json_doc = codecs.BOM_UTF8 + b(json.dumps(content))
self.assertEqual(json.load(BytesIO(json_doc)), content)
for doc in json_doc, json_doc.decode('utf8'):
self.assertEqual(json.loads(doc), content)