Commit 537be6a3 authored by dpranke@chromium.org's avatar dpranke@chromium.org

2011-01-30 Dirk Pranke <dpranke@chromium.org>

        Reviewed by Mihai Parparita.

        Add more unit tests for rebaseline-chromium-webkit-tests. This
        change involves restructuring a bunch of r-c-w-t code to make it
        more testable as well. We also add wrapper classes for handling
        testing zip files and fetching URLs.

        https://bugs.webkit.org/show_bug.cgi?id=53040

        * Scripts/webkitpy/common/system/urlfetcher.py:
        * Scripts/webkitpy/common/system/urlfetcher_mock.py:
        * Scripts/webkitpy/common/system/zipfileset_mock.py:
        * Scripts/webkitpy/layout_tests/rebaseline_chromium_webkit_tests.py:
        * Scripts/webkitpy/layout_tests/rebaseline_chromium_webkit_tests_unittest.py:
        * Scripts/webkitpy/tool/mocktool.py:

git-svn-id: http://svn.webkit.org/repository/webkit/trunk@77095 268f45cc-cd09-0410-ab3c-d52691b4dbfc
parent 7ddf61bc
2011-01-30 Dirk Pranke <dpranke@chromium.org>
Reviewed by Mihai Parparita.
Add more unit tests for rebaseline-chromium-webkit-tests. This
change involves restructuring a bunch of r-c-w-t code to make it
more testable as well. We also add wrapper classes for handling
testing zip files and fetching URLs.
https://bugs.webkit.org/show_bug.cgi?id=53040
* Scripts/webkitpy/common/system/urlfetcher.py:
* Scripts/webkitpy/common/system/urlfetcher_mock.py:
* Scripts/webkitpy/common/system/zipfileset_mock.py:
* Scripts/webkitpy/layout_tests/rebaseline_chromium_webkit_tests.py:
* Scripts/webkitpy/layout_tests/rebaseline_chromium_webkit_tests_unittest.py:
* Scripts/webkitpy/tool/mocktool.py:
2011-01-30 Dirk Pranke <dpranke@chromium.org>
Reviewed by Eric Seidel.
......@@ -41,4 +41,4 @@ sys.path.append(os.path.join(webkitpy_directory, "thirdparty"))
import rebaseline_chromium_webkit_tests
if __name__ == '__main__':
rebaseline_chromium_webkit_tests.main()
rebaseline_chromium_webkit_tests.main(sys.argv[1:])
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Wrapper module for fetching URLs."""
import urllib
class UrlFetcher(object):
"""Class with restricted interface to fetch URLs (makes testing easier)"""
def __init__(self, filesystem):
self._filesystem = filesystem
def fetch(self, url):
"""Fetches the contents of the URL as a string."""
file_object = urllib.urlopen(url)
content = file_object.read()
file_object.close()
return content
def fetch_into_file(self, url):
"""Fetches the contents of the URL into a temporary file and return the filename.
This is the equivalent of urllib.retrieve() except that we don't return any headers.
"""
file_object, filename = self._filesystem.open_binary_tempfile('-fetched')
contents = self.fetch(url)
file_object.write(contents)
file_object.close()
return filename
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def make_fetcher_cls(urls):
"""UrlFetcher factory routine that simulates network access
using a dict of URLs -> contents."""
class MockFetcher(object):
def __init__(self, filesystem):
self._filesystem = filesystem
def fetch(self, url):
return urls[url]
def fetch_into_file(self, url):
f, fn = self._filesystem.open_binary_tempfile('mockfetcher')
f.write(self.fetch(url))
f.close()
return fn
return MockFetcher
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def make_factory(ziphashes):
"""ZipFileSet factory routine that looks up zipfiles in a dict;
each zipfile should also be a dict of member names -> contents."""
class MockZipFileSet(object):
def __init__(self, url):
self._url = url
self._ziphash = ziphashes[url]
def namelist(self):
return self._ziphash.keys()
def read(self, member):
return self._ziphash[member]
def close(self):
pass
def maker(url):
# We return None because there's no tempfile to delete.
return (None, MockZipFileSet(url))
return maker
......@@ -47,18 +47,17 @@ import optparse
import re
import sys
import time
import urllib
import zipfile
from webkitpy.common.checkout import scm
from webkitpy.common.system import zipfileset
from webkitpy.common.system import path
from webkitpy.common.system import urlfetcher
from webkitpy.common.system.executive import ScriptError
import port
from layout_package import test_expectations
from webkitpy.layout_tests import port
from webkitpy.layout_tests.layout_package import test_expectations
_log = logging.getLogger("webkitpy.layout_tests."
"rebaseline_chromium_webkit_tests")
_log = logging.getLogger(__name__)
BASELINE_SUFFIXES = ['.txt', '.png', '.checksum']
REBASELINE_PLATFORM_ORDER = ['mac', 'win', 'win-xp', 'win-vista', 'linux']
......@@ -142,7 +141,7 @@ class Rebaseliner(object):
REVISION_REGEX = r'<a href=\"(\d+)/\">'
def __init__(self, running_port, target_port, platform, options):
def __init__(self, running_port, target_port, platform, options, url_fetcher, zip_factory, scm):
"""
Args:
running_port: the Port the script is running on.
......@@ -150,14 +149,19 @@ class Rebaseliner(object):
configuration information like the test_expectations.txt
file location and the list of test platforms.
platform: the test platform to rebaseline
options: the command-line options object."""
options: the command-line options object.
url_fetcher: object that can fetch objects from URLs
zip_factory: optional object that can fetch zip files from URLs
scm: scm object for adding new baselines
"""
self._platform = platform
self._options = options
self._port = running_port
self._filesystem = running_port._filesystem
self._target_port = target_port
self._rebaseline_port = port.get(
self._target_port.test_platform_name_to_name(platform), options)
self._target_port.test_platform_name_to_name(platform), options,
filesystem=self._filesystem)
self._rebaselining_tests = []
self._rebaselined_tests = []
......@@ -173,7 +177,9 @@ class Rebaseliner(object):
self._platform,
False,
False)
self._scm = scm.default_scm()
self._url_fetcher = url_fetcher
self._zip_factory = zip_factory
self._scm = scm
def run(self, backup):
"""Run rebaseline process."""
......@@ -192,8 +198,11 @@ class Rebaseliner(object):
log_dashed_string('Extracting and adding new baselines',
self._platform)
if not self._extract_and_add_new_baselines(archive_file):
archive_file.close()
return False
archive_file.close()
log_dashed_string('Updating rebaselined tests in file',
self._platform)
self._update_rebaselined_tests_in_file(backup)
......@@ -254,9 +263,7 @@ class Rebaseliner(object):
_log.debug('Url to retrieve revision: "%s"', url)
f = urllib.urlopen(url)
content = f.read()
f.close()
content = self._url_fetcher.fetch(url)
revisions = re.findall(self.REVISION_REGEX, content)
if not revisions:
......@@ -313,33 +320,24 @@ class Rebaseliner(object):
return archive_url
def _download_buildbot_archive(self):
"""Download layout test archive file from buildbot.
Returns:
True if download succeeded or
False otherwise.
"""
"""Download layout test archive file from buildbot and return a handle to it."""
url = self._get_archive_url()
if url is None:
return None
fn = urllib.urlretrieve(url)[0]
_log.info('Archive downloaded and saved to file: "%s"', fn)
return fn
def _extract_and_add_new_baselines(self, archive_file):
"""Extract new baselines from archive and add them to SVN repository.
archive_file = zipfileset.ZipFileSet(url, filesystem=self._filesystem,
zip_factory=self._zip_factory)
_log.info('Archive downloaded')
return archive_file
Args:
archive_file: full path to the archive file.
def _extract_and_add_new_baselines(self, zip_file):
"""Extract new baselines from the zip file and add them to SVN repository.
Returns:
List of tests that have been rebaselined or
None on failure.
"""
zip_file = zipfile.ZipFile(archive_file, 'r')
zip_namelist = zip_file.namelist()
_log.debug('zip file namelist:')
......@@ -419,7 +417,6 @@ class Rebaseliner(object):
test_no += 1
zip_file.close()
self._filesystem.remove(archive_file)
return self._rebaselined_tests
......@@ -857,18 +854,9 @@ def parse_options(args):
return (options, target_options)
def main():
"""Main function to produce new baselines."""
(options, target_options) = parse_options(sys.argv[1:])
# We need to create three different Port objects over the life of this
# script. |target_port_obj| is used to determine configuration information:
# location of the expectations file, names of ports to rebaseline, etc.
# |port_obj| is used for runtime functionality like actually diffing
# Then we create a rebaselining port to actual find and manage the
# baselines.
target_port_obj = port.get(None, target_options)
def main(args):
"""Bootstrap function that sets up the object references we need and calls real_main()."""
options, target_options = parse_options(args)
# Set up our logging format.
log_level = logging.INFO
......@@ -879,20 +867,53 @@ def main():
'%(levelname)s %(message)s'),
datefmt='%y%m%d %H:%M:%S')
target_port_obj = port.get(None, target_options)
host_port_obj = get_host_port_object(options)
if not host_port_obj:
sys.exit(1)
if not host_port_obj or not target_port_obj:
return 1
url_fetcher = urlfetcher.UrlFetcher(host_port_obj._filesystem)
scm_obj = scm.default_scm()
# We use the default zip factory method.
zip_factory = None
return real_main(options, target_options, host_port_obj, target_port_obj, url_fetcher,
zip_factory, scm_obj)
def real_main(options, target_options, host_port_obj, target_port_obj, url_fetcher,
zip_factory, scm_obj):
"""Main function to produce new baselines. The Rebaseliner object uses two
different Port objects - one to represent the machine the object is running
on, and one to represent the port whose expectations are being updated.
E.g., you can run the script on a mac and rebaseline the 'win' port.
Args:
options: command-line argument used for the host_port_obj (see below)
target_options: command_line argument used for the target_port_obj.
This object may have slightly different values than |options|.
host_port_obj: a Port object for the platform the script is running
on. This is used to produce image and text diffs, mostly, and
is usually acquired from get_host_port_obj().
target_port_obj: a Port obj representing the port getting rebaselined.
This is used to find the expectations file, the baseline paths,
etc.
url_fetcher: object used to download the build archives from the bots
zip_factory: factory function used to create zip file objects for
the archives.
scm_obj: object used to add new baselines to the source control system.
"""
# Verify 'platforms' option is valid.
if not options.platforms:
_log.error('Invalid "platforms" option. --platforms must be '
'specified in order to rebaseline.')
sys.exit(1)
return 1
platforms = [p.strip().lower() for p in options.platforms.split(',')]
for platform in platforms:
if not platform in REBASELINE_PLATFORM_ORDER:
_log.error('Invalid platform: "%s"' % (platform))
sys.exit(1)
return 1
# Adjust the platform order so rebaseline tool is running at the order of
# 'mac', 'win' and 'linux'. This is in same order with layout test baseline
......@@ -909,7 +930,8 @@ def main():
backup = options.backup
for platform in rebaseline_platforms:
rebaseliner = Rebaseliner(host_port_obj, target_port_obj,
platform, options)
platform, options, url_fetcher, zip_factory,
scm_obj)
_log.info('')
log_dashed_string('Rebaseline started', platform)
......@@ -934,7 +956,8 @@ def main():
html_generator.show_html()
log_dashed_string('Rebaselining result comparison done', None)
sys.exit(0)
return 0
if '__main__' == __name__:
main()
sys.exit(main(sys.argv[1:]))
......@@ -32,10 +32,14 @@
import unittest
from webkitpy.tool import mocktool
from webkitpy.common.system import urlfetcher_mock
from webkitpy.common.system import filesystem_mock
from webkitpy.common.system import zipfileset_mock
from webkitpy.common.system import outputcapture
from webkitpy.common.system.executive import Executive, ScriptError
import port
import rebaseline_chromium_webkit_tests
from webkitpy.layout_tests import port
from webkitpy.layout_tests import rebaseline_chromium_webkit_tests
class MockPort(object):
......@@ -52,6 +56,57 @@ def get_mock_get(config_expectations):
return mock_get
ARCHIVE_URL = 'http://localhost/layout_test_results'
def test_options():
return mocktool.MockOptions(configuration=None,
backup=False,
html_directory='/tmp',
archive_url=ARCHIVE_URL,
force_archive_url=None,
webkit_canary=True,
use_drt=False,
target_platform='chromium',
verbose=False,
quiet=False,
platforms='mac,win')
def test_host_port_and_filesystem(options, expectations):
filesystem = port.unit_test_filesystem()
host_port_obj = port.get('test', options, filesystem=filesystem,
user=mocktool.MockUser())
expectations_path = host_port_obj.path_to_test_expectations_file()
filesystem.write_text_file(expectations_path, expectations)
return (host_port_obj, filesystem)
def test_url_fetcher(filesystem):
urls = {
ARCHIVE_URL + '/Webkit_Mac10_5/': '<a href="1/"><a href="2/">',
ARCHIVE_URL + '/Webkit_Win/': '<a href="1/">',
}
return urlfetcher_mock.make_fetcher_cls(urls)(filesystem)
def test_zip_factory():
ziphashes = {
ARCHIVE_URL + '/Webkit_Mac10_5/2/layout-test-results.zip': {
'layout-test-results/failures/expected/image-actual.txt': 'new-image-txt',
'layout-test-results/failures/expected/image-actual.checksum': 'new-image-checksum',
'layout-test-results/failures/expected/image-actual.png': 'new-image-png',
},
ARCHIVE_URL + '/Webkit_Win/1/layout-test-results.zip': {
'layout-test-results/failures/expected/image-actual.txt': 'win-image-txt',
'layout-test-results/failures/expected/image-actual.checksum': 'win-image-checksum',
'layout-test-results/failures/expected/image-actual.png': 'win-image-png',
},
}
return zipfileset_mock.make_factory(ziphashes)
class TestGetHostPortObject(unittest.TestCase):
def assert_result(self, release_present, debug_present, valid_port_obj):
# Tests whether we get a valid port object returned when we claim
......@@ -59,9 +114,8 @@ class TestGetHostPortObject(unittest.TestCase):
port.get = get_mock_get({'Release': release_present,
'Debug': debug_present})
options = mocktool.MockOptions(configuration=None,
html_directory=None)
port_obj = rebaseline_chromium_webkit_tests.get_host_port_object(
options)
html_directory='/tmp')
port_obj = rebaseline_chromium_webkit_tests.get_host_port_object(options)
if valid_port_obj:
self.assertNotEqual(port_obj, None)
else:
......@@ -83,18 +137,7 @@ class TestGetHostPortObject(unittest.TestCase):
port.get = old_get
class TestRebaseliner(unittest.TestCase):
def make_rebaseliner(self):
options = mocktool.MockOptions(configuration=None,
html_directory=None)
filesystem = port.unit_test_filesystem()
host_port_obj = port.get('test', options, filesystem=filesystem)
target_options = options
target_port_obj = port.get('test', target_options, filesystem=filesystem)
platform = target_port_obj.test_platform_name()
return rebaseline_chromium_webkit_tests.Rebaseliner(
host_port_obj, target_port_obj, platform, options)
class TestOptions(unittest.TestCase):
def test_parse_options(self):
(options, target_options) = rebaseline_chromium_webkit_tests.parse_options([])
self.assertTrue(target_options.chromium)
......@@ -104,39 +147,106 @@ class TestRebaseliner(unittest.TestCase):
self.assertFalse(hasattr(target_options, 'chromium'))
self.assertEqual(options.tolerance, 0)
class TestRebaseliner(unittest.TestCase):
def make_rebaseliner(self, expectations):
options = test_options()
host_port_obj, filesystem = test_host_port_and_filesystem(options, expectations)
target_options = options
target_port_obj = port.get('test', target_options,
filesystem=filesystem)
target_port_obj._expectations = expectations
platform = target_port_obj.test_platform_name()
url_fetcher = test_url_fetcher(filesystem)
zip_factory = test_zip_factory()
mock_scm = mocktool.MockSCM()
rebaseliner = rebaseline_chromium_webkit_tests.Rebaseliner(host_port_obj,
target_port_obj, platform, options, url_fetcher, zip_factory, mock_scm)
return rebaseliner, filesystem
def test_noop(self):
# this method tests that was can at least instantiate an object, even
# if there is nothing to do.
rebaseliner = self.make_rebaseliner()
self.assertNotEqual(rebaseliner, None)
rebaseliner, filesystem = self.make_rebaseliner("")
rebaseliner.run(False)
self.assertEqual(len(filesystem.written_files), 1)
def test_one_platform(self):
rebaseliner, filesystem = self.make_rebaseliner(
"BUGX REBASELINE MAC : failures/expected/image.html = IMAGE")
rebaseliner.run(False)
# We expect to have written 12 files over the course of this rebaseline:
# *) 3 files in /__im_tmp for the extracted archive members
# *) 3 new baselines under '/test.checkout/LayoutTests'
# *) 1 updated test_expectations file
# *) 4 files in /tmp for the new and old baselines in the result file
# *) 1 text diff in /tmp for the result file
self.assertEqual(len(filesystem.written_files), 12)
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test/test_expectations.txt'], '')
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test-mac/failures/expected/image-expected.checksum'], 'new-image-checksum')
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test-mac/failures/expected/image-expected.png'], 'new-image-png')
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test-mac/failures/expected/image-expected.txt'], 'new-image-txt')
def test_all_platforms(self):
rebaseliner, filesystem = self.make_rebaseliner(
"BUGX REBASELINE : failures/expected/image.html = IMAGE")
rebaseliner.run(False)
# See comment in test_one_platform for an explanation of the 12 written tests.
# Note that even though the rebaseline is marked for all platforms, each
# rebaseliner only ever does one.
self.assertEqual(len(filesystem.written_files), 12)
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test/test_expectations.txt'], 'BUGX REBASELINE WIN : failures/expected/image.html = IMAGE\n')
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test-mac/failures/expected/image-expected.checksum'], 'new-image-checksum')
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test-mac/failures/expected/image-expected.png'], 'new-image-png')
self.assertEqual(filesystem.files['/test.checkout/LayoutTests/platform/test-mac/failures/expected/image-expected.txt'], 'new-image-txt')
def test_diff_baselines_txt(self):
rebaseliner = self.make_rebaseliner()
output = rebaseliner._port.expected_text(
rebaseliner._port._filesystem.join(rebaseliner._port.layout_tests_dir(),
'passes/text.html'))
rebaseliner, filesystem = self.make_rebaseliner("")
port = rebaseliner._port
output = port.expected_text(
port._filesystem.join(port.layout_tests_dir(), 'passes/text.html'))
self.assertFalse(rebaseliner._diff_baselines(output, output,
is_image=False))
def test_diff_baselines_png(self):
rebaseliner = self.make_rebaseliner()
image = rebaseliner._port.expected_image(
rebaseliner._port._filesystem.join(rebaseliner._port.layout_tests_dir(),
'passes/image.html'))
rebaseliner, filesystem = self.make_rebaseliner('')
port = rebaseliner._port
image = port.expected_image(
port._filesystem.join(port.layout_tests_dir(), 'passes/image.html'))
self.assertFalse(rebaseliner._diff_baselines(image, image,
is_image=True))