test_chunk_writer.py :  » Development » Bazaar » bzr-2.2b3 » bzrlib » tests » Python Open Source

Home
Python Open Source
1.3.1.2 Python
2.Ajax
3.Aspect Oriented
4.Blog
5.Build
6.Business Application
7.Chart Report
8.Content Management Systems
9.Cryptographic
10.Database
11.Development
12.Editor
13.Email
14.ERP
15.Game 2D 3D
16.GIS
17.GUI
18.IDE
19.Installer
20.IRC
21.Issue Tracker
22.Language Interface
23.Log
24.Math
25.Media Sound Audio
26.Mobile
27.Network
28.Parser
29.PDF
30.Project Management
31.RSS
32.Search
33.Security
34.Template Engines
35.Test
36.UML
37.USB Serial
38.Web Frameworks
39.Web Server
40.Web Services
41.Web Unit
42.Wiki
43.Windows
44.XML
Python Open Source » Development » Bazaar 
Bazaar » bzr 2.2b3 » bzrlib » tests » test_chunk_writer.py
# Copyright (C) 2008 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#

"""Tests for writing fixed size chunks with compression."""

import zlib

from bzrlib import chunk_writer
from bzrlib.tests import TestCaseWithTransport


class TestWriter(TestCaseWithTransport):

    def check_chunk(self, bytes_list, size):
        bytes = ''.join(bytes_list)
        self.assertEqual(size, len(bytes))
        return zlib.decompress(bytes)

    def test_chunk_writer_empty(self):
        writer = chunk_writer.ChunkWriter(4096)
        bytes_list, unused, padding = writer.finish()
        node_bytes = self.check_chunk(bytes_list, 4096)
        self.assertEqual("", node_bytes)
        self.assertEqual(None, unused)
        # Only a zlib header.
        self.assertEqual(4088, padding)

    def test_optimize_for_speed(self):
        writer = chunk_writer.ChunkWriter(4096)
        writer.set_optimize(for_size=False)
        self.assertEqual(chunk_writer.ChunkWriter._repack_opts_for_speed,
                         (writer._max_repack, writer._max_zsync))
        writer = chunk_writer.ChunkWriter(4096, optimize_for_size=False)
        self.assertEqual(chunk_writer.ChunkWriter._repack_opts_for_speed,
                         (writer._max_repack, writer._max_zsync))

    def test_optimize_for_size(self):
        writer = chunk_writer.ChunkWriter(4096)
        writer.set_optimize(for_size=True)
        self.assertEqual(chunk_writer.ChunkWriter._repack_opts_for_size,
                         (writer._max_repack, writer._max_zsync))
        writer = chunk_writer.ChunkWriter(4096, optimize_for_size=True)
        self.assertEqual(chunk_writer.ChunkWriter._repack_opts_for_size,
                         (writer._max_repack, writer._max_zsync))

    def test_some_data(self):
        writer = chunk_writer.ChunkWriter(4096)
        writer.write("foo bar baz quux\n")
        bytes_list, unused, padding = writer.finish()
        node_bytes = self.check_chunk(bytes_list, 4096)
        self.assertEqual("foo bar baz quux\n", node_bytes)
        self.assertEqual(None, unused)
        # More than just the header..
        self.assertEqual(4073, padding)

    def test_too_much_data_does_not_exceed_size(self):
        # Generate enough data to exceed 4K
        lines = []
        for group in range(48):
            offset = group * 50
            numbers = range(offset, offset + 50)
            # Create a line with this group
            lines.append(''.join(map(str, numbers)) + '\n')
        writer = chunk_writer.ChunkWriter(4096)
        for idx, line in enumerate(lines):
            if writer.write(line):
                self.assertEqual(46, idx)
                break
        bytes_list, unused, _ = writer.finish()
        node_bytes = self.check_chunk(bytes_list, 4096)
        # the first 46 lines should have been added
        expected_bytes = ''.join(lines[:46])
        self.assertEqualDiff(expected_bytes, node_bytes)
        # And the line that failed should have been saved for us
        self.assertEqual(lines[46], unused)

    def test_too_much_data_preserves_reserve_space(self):
        # Generate enough data to exceed 4K
        lines = []
        for group in range(48):
            offset = group * 50
            numbers = range(offset, offset + 50)
            # Create a line with this group
            lines.append(''.join(map(str, numbers)) + '\n')
        writer = chunk_writer.ChunkWriter(4096, 256)
        for idx, line in enumerate(lines):
            if writer.write(line):
                self.assertEqual(44, idx)
                break
        else:
            self.fail('We were able to write all lines')
        self.assertFalse(writer.write("A"*256, reserved=True))
        bytes_list, unused, _ = writer.finish()
        node_bytes = self.check_chunk(bytes_list, 4096)
        # the first 44 lines should have been added
        expected_bytes = ''.join(lines[:44]) + "A"*256
        self.assertEqualDiff(expected_bytes, node_bytes)
        # And the line that failed should have been saved for us
        self.assertEqual(lines[44], unused)
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.